file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
12.1k
| suffix
large_stringlengths 0
12k
| middle
large_stringlengths 0
7.51k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
bindings.js | Price
};
};
// Check if all referenced services in connections and placements are really deployed.
Deployment.prototype.vet = function() {
var labelMap = {};
this.services.forEach(function(service) {
labelMap[service.name] = true;
});
this.services.forEach(function(service) {
service.connections.forEach(function(conn) {
var to = conn.to.name;
if (!labelMap[to]) {
throw service.name + " has a connection to undeployed service: " + to;
}
});
var hasFloatingIp = false;
service.placements.forEach(function(plcm) {
if (plcm.floatingIp) {
hasFloatingIp = true;
}
var otherLabel = plcm.otherLabel;
if (otherLabel !== undefined && !labelMap[otherLabel]) {
throw service.name + " has a placement in terms of an " +
"undeployed service: " + otherLabel;
}
});
if (hasFloatingIp && service.incomingPublic.length
&& service.containers.length > 1) {
throw service.name + " has a floating IP and multiple containers. " +
"This is not yet supported."
}
});
};
// deploy adds an object, or list of objects, to the deployment.
// Deployable objects must implement the deploy(deployment) interface.
Deployment.prototype.deploy = function(toDeployList) {
if (toDeployList.constructor !== Array) {
toDeployList = [toDeployList];
}
var that = this;
toDeployList.forEach(function(toDeploy) {
if (!toDeploy.deploy) {
throw "only objects that implement \"deploy(deployment)\" can be deployed";
}
toDeploy.deploy(that);
});
};
Deployment.prototype.assert = function(rule, desired) {
this.invariants.push(new Assertion(rule, desired));
};
function Service(name, containers) {
this.name = uniqueLabelName(name);
this.containers = containers;
this.annotations = [];
this.placements = [];
this.connections = [];
this.outgoingPublic = [];
this.incomingPublic = [];
}
// Get the Quilt hostname that represents the entire service.
Service.prototype.hostname = function() {
return this.name + ".q";
};
// Get a list of Quilt hostnames that address the containers within the service.
Service.prototype.children = function() {
var i;
var res = [];
for (i = 1; i < this.containers.length + 1; i++) {
res.push(i + "." + this.name + ".q");
}
return res;
};
Service.prototype.annotate = function(annotation) {
this.annotations.push(annotation);
};
Service.prototype.canReach = function(target) {
if (target === publicInternet) {
return reachable(this.name, publicInternetLabel);
}
return reachable(this.name, target.name);
};
Service.prototype.canReachACL = function(target) {
return reachableACL(this.name, target.name);
};
Service.prototype.between = function(src, dst) {
return between(src.name, this.name, dst.name);
};
Service.prototype.neighborOf = function(target) {
return neighbor(this.name, target.name);
};
Service.prototype.deploy = function(deployment) {
deployment.services.push(this);
};
Service.prototype.connect = function(range, to) {
range = boxRange(range);
if (to === publicInternet) {
return this.connectToPublic(range);
}
this.connections.push(new Connection(range, to));
};
// publicInternet is an object that looks like another service that can be
// connected to or from. However, it is actually just syntactic sugar to hide
// the connectToPublic and connectFromPublic functions.
var publicInternet = {
connect: function(range, to) {
to.connectFromPublic(range);
},
canReach: function(to) {
return reachable(publicInternetLabel, to.name);
}
};
// Allow outbound traffic from the service to public internet.
Service.prototype.connectToPublic = function(range) {
range = boxRange(range);
if (range.min != range.max) {
throw "public internet cannot connect on port ranges";
}
this.outgoingPublic.push(range);
};
// Allow inbound traffic from public internet to the service.
Service.prototype.connectFromPublic = function(range) {
range = boxRange(range);
if (range.min != range.max) {
throw "public internet cannot connect on port ranges";
}
this.incomingPublic.push(range);
};
Service.prototype.place = function(rule) {
this.placements.push(rule);
};
Service.prototype.getQuiltConnections = function() {
var connections = [];
var that = this;
this.connections.forEach(function(conn) {
connections.push({
from: that.name,
to: conn.to.name,
minPort: conn.minPort,
maxPort: conn.maxPort
});
});
this.outgoingPublic.forEach(function(rng) {
connections.push({
from: that.name,
to: publicInternetLabel,
minPort: rng.min,
maxPort: rng.max
});
});
this.incomingPublic.forEach(function(rng) {
connections.push({
from: publicInternetLabel,
to: that.name,
minPort: rng.min,
maxPort: rng.max
});
});
return connections;
};
Service.prototype.getQuiltPlacements = function() {
var placements = [];
var that = this;
this.placements.forEach(function(placement) {
placements.push({
targetLabel: that.name,
exclusive: placement.exclusive,
otherLabel: placement.otherLabel || "",
provider: placement.provider || "",
size: placement.size || "",
region: placement.region || "",
floatingIp: placement.floatingIp || ""
});
});
return placements;
};
var labelNameCount = {};
function uniqueLabelName(name) {
if (!(name in labelNameCount)) {
labelNameCount[name] = 0;
}
var count = ++labelNameCount[name];
if (count == 1) {
return name;
}
return name + labelNameCount[name];
}
// Box raw integers into range.
function boxRange(x) {
if (x === undefined) {
return new Range(0, 0);
}
if (typeof x === "number") {
x = new Range(x, x);
}
return x;
}
function Machine(optionalArgs) {
this._refID = _.uniqueId();
this.provider = optionalArgs.provider || "";
this.role = optionalArgs.role || "";
this.region = optionalArgs.region || "";
this.size = optionalArgs.size || "";
this.floatingIp = optionalArgs.floatingIp || "";
this.diskSize = optionalArgs.diskSize || 0;
this.sshKeys = optionalArgs.sshKeys || [];
this.cpu = boxRange(optionalArgs.cpu);
this.ram = boxRange(optionalArgs.ram);
}
Machine.prototype.deploy = function(deployment) {
deployment.machines.push(this);
};
// Create a new machine with the same attributes.
Machine.prototype.clone = function() {
// _.clone only creates a shallow copy, so we must clone sshKeys ourselves.
var keyClone = _.clone(this.sshKeys);
var cloned = _.clone(this);
cloned.sshKeys = keyClone;
return new Machine(cloned);
};
Machine.prototype.withRole = function(role) {
var copy = this.clone();
copy.role = role;
return copy;
};
Machine.prototype.asWorker = function() {
return this.withRole("Worker");
};
Machine.prototype.asMaster = function() {
return this.withRole("Master");
};
// Create n new machines with the same attributes.
Machine.prototype.replicate = function(n) {
var i;
var res = [];
for (i = 0 ; i < n ; i++) {
res.push(this.clone());
}
return res;
};
function Container(image, command) {
// refID is used to distinguish deployments with multiple references to the
// same container, and deployments with multiple containers with the exact
// same attributes.
this._refID = _.uniqueId();
this.image = image;
this.command = command || [];
this.env = {};
}
// Create a new Container with the same attributes.
Container.prototype.clone = function() {
var cloned = new Container(this.image, _.clone(this.command));
cloned.env = _.clone(this.env);
return cloned;
};
// Create n new Containers with the same attributes.
Container.prototype.replicate = function(n) {
var i;
var res = [];
for (i = 0 ; i < n ; i++) {
res.push(this.clone());
}
return res;
};
Container.prototype.setEnv = function(key, val) {
this.env[key] = val;
};
Container.prototype.withEnv = function(env) {
var cloned = this.clone();
cloned.env = env;
return cloned;
};
var enough = { form: "enough" };
var between = invariantType("between");
var neighbor = invariantType("reachDirect");
var reachableACL = invariantType("reachACL");
var reachable = invariantType("reach");
function Assertion(invariant, desired) {
this.form = invariant.form;
this.nodes = invariant.nodes;
this.target = desired;
}
function invariantType(form) | {
return function() {
// Convert the arguments object into a real array. We can't simply use
// Array.from because it isn't defined in Otto.
var nodes = [];
var i;
for (i = 0 ; i < arguments.length ; i++) {
nodes.push(arguments[i]);
}
return {
form: form,
nodes: nodes
};
};
} | identifier_body |
|
bindings.js | (function(plcm) {
if (plcm.floatingIp) {
hasFloatingIp = true;
}
var otherLabel = plcm.otherLabel;
if (otherLabel !== undefined && !labelMap[otherLabel]) {
throw service.name + " has a placement in terms of an " +
"undeployed service: " + otherLabel;
}
});
if (hasFloatingIp && service.incomingPublic.length
&& service.containers.length > 1) {
throw service.name + " has a floating IP and multiple containers. " +
"This is not yet supported."
}
});
};
// deploy adds an object, or list of objects, to the deployment.
// Deployable objects must implement the deploy(deployment) interface.
Deployment.prototype.deploy = function(toDeployList) {
if (toDeployList.constructor !== Array) {
toDeployList = [toDeployList];
}
var that = this;
toDeployList.forEach(function(toDeploy) {
if (!toDeploy.deploy) {
throw "only objects that implement \"deploy(deployment)\" can be deployed";
}
toDeploy.deploy(that);
});
};
Deployment.prototype.assert = function(rule, desired) {
this.invariants.push(new Assertion(rule, desired));
};
function Service(name, containers) {
this.name = uniqueLabelName(name);
this.containers = containers;
this.annotations = [];
this.placements = [];
this.connections = [];
this.outgoingPublic = [];
this.incomingPublic = [];
}
// Get the Quilt hostname that represents the entire service.
Service.prototype.hostname = function() {
return this.name + ".q";
};
// Get a list of Quilt hostnames that address the containers within the service.
Service.prototype.children = function() {
var i;
var res = [];
for (i = 1; i < this.containers.length + 1; i++) {
res.push(i + "." + this.name + ".q");
}
return res;
};
Service.prototype.annotate = function(annotation) {
this.annotations.push(annotation);
};
Service.prototype.canReach = function(target) {
if (target === publicInternet) {
return reachable(this.name, publicInternetLabel);
}
return reachable(this.name, target.name);
};
Service.prototype.canReachACL = function(target) {
return reachableACL(this.name, target.name);
};
Service.prototype.between = function(src, dst) {
return between(src.name, this.name, dst.name);
};
Service.prototype.neighborOf = function(target) {
return neighbor(this.name, target.name);
};
Service.prototype.deploy = function(deployment) {
deployment.services.push(this);
};
Service.prototype.connect = function(range, to) {
range = boxRange(range);
if (to === publicInternet) {
return this.connectToPublic(range);
}
this.connections.push(new Connection(range, to));
};
// publicInternet is an object that looks like another service that can be
// connected to or from. However, it is actually just syntactic sugar to hide
// the connectToPublic and connectFromPublic functions.
var publicInternet = {
connect: function(range, to) {
to.connectFromPublic(range);
},
canReach: function(to) {
return reachable(publicInternetLabel, to.name);
}
};
// Allow outbound traffic from the service to public internet.
Service.prototype.connectToPublic = function(range) {
range = boxRange(range);
if (range.min != range.max) {
throw "public internet cannot connect on port ranges";
}
this.outgoingPublic.push(range);
};
// Allow inbound traffic from public internet to the service.
Service.prototype.connectFromPublic = function(range) {
range = boxRange(range);
if (range.min != range.max) {
throw "public internet cannot connect on port ranges";
}
this.incomingPublic.push(range);
};
Service.prototype.place = function(rule) {
this.placements.push(rule);
};
Service.prototype.getQuiltConnections = function() {
var connections = [];
var that = this;
this.connections.forEach(function(conn) {
connections.push({
from: that.name,
to: conn.to.name,
minPort: conn.minPort,
maxPort: conn.maxPort
});
});
this.outgoingPublic.forEach(function(rng) {
connections.push({
from: that.name,
to: publicInternetLabel,
minPort: rng.min,
maxPort: rng.max
});
});
this.incomingPublic.forEach(function(rng) {
connections.push({
from: publicInternetLabel,
to: that.name,
minPort: rng.min,
maxPort: rng.max
});
});
return connections;
};
Service.prototype.getQuiltPlacements = function() {
var placements = [];
var that = this;
this.placements.forEach(function(placement) {
placements.push({
targetLabel: that.name,
exclusive: placement.exclusive,
otherLabel: placement.otherLabel || "",
provider: placement.provider || "",
size: placement.size || "",
region: placement.region || "",
floatingIp: placement.floatingIp || ""
});
});
return placements;
};
var labelNameCount = {};
function uniqueLabelName(name) {
if (!(name in labelNameCount)) {
labelNameCount[name] = 0;
}
var count = ++labelNameCount[name];
if (count == 1) {
return name;
}
return name + labelNameCount[name];
}
// Box raw integers into range.
function boxRange(x) {
if (x === undefined) {
return new Range(0, 0);
}
if (typeof x === "number") {
x = new Range(x, x);
}
return x;
}
function Machine(optionalArgs) {
this._refID = _.uniqueId();
this.provider = optionalArgs.provider || "";
this.role = optionalArgs.role || "";
this.region = optionalArgs.region || "";
this.size = optionalArgs.size || "";
this.floatingIp = optionalArgs.floatingIp || "";
this.diskSize = optionalArgs.diskSize || 0;
this.sshKeys = optionalArgs.sshKeys || [];
this.cpu = boxRange(optionalArgs.cpu);
this.ram = boxRange(optionalArgs.ram);
}
Machine.prototype.deploy = function(deployment) {
deployment.machines.push(this);
};
// Create a new machine with the same attributes.
Machine.prototype.clone = function() {
// _.clone only creates a shallow copy, so we must clone sshKeys ourselves.
var keyClone = _.clone(this.sshKeys);
var cloned = _.clone(this);
cloned.sshKeys = keyClone;
return new Machine(cloned);
};
Machine.prototype.withRole = function(role) {
var copy = this.clone();
copy.role = role;
return copy;
};
Machine.prototype.asWorker = function() {
return this.withRole("Worker");
};
Machine.prototype.asMaster = function() {
return this.withRole("Master");
};
// Create n new machines with the same attributes.
Machine.prototype.replicate = function(n) {
var i;
var res = [];
for (i = 0 ; i < n ; i++) {
res.push(this.clone());
}
return res;
};
function Container(image, command) {
// refID is used to distinguish deployments with multiple references to the
// same container, and deployments with multiple containers with the exact
// same attributes.
this._refID = _.uniqueId();
this.image = image;
this.command = command || [];
this.env = {};
}
// Create a new Container with the same attributes.
Container.prototype.clone = function() {
var cloned = new Container(this.image, _.clone(this.command));
cloned.env = _.clone(this.env);
return cloned;
};
// Create n new Containers with the same attributes.
Container.prototype.replicate = function(n) {
var i;
var res = [];
for (i = 0 ; i < n ; i++) {
res.push(this.clone());
}
return res;
};
Container.prototype.setEnv = function(key, val) {
this.env[key] = val;
};
Container.prototype.withEnv = function(env) {
var cloned = this.clone();
cloned.env = env;
return cloned;
};
var enough = { form: "enough" };
var between = invariantType("between");
var neighbor = invariantType("reachDirect");
var reachableACL = invariantType("reachACL");
var reachable = invariantType("reach");
function Assertion(invariant, desired) {
this.form = invariant.form;
this.nodes = invariant.nodes;
this.target = desired;
}
function invariantType(form) {
return function() {
// Convert the arguments object into a real array. We can't simply use
// Array.from because it isn't defined in Otto.
var nodes = [];
var i;
for (i = 0 ; i < arguments.length ; i++) {
nodes.push(arguments[i]);
}
return {
form: form,
nodes: nodes
};
};
}
function LabelRule(exclusive, otherService) {
this.exclusive = exclusive;
this.otherLabel = otherService.name;
}
function MachineRule(exclusive, optionalArgs) {
this.exclusive = exclusive;
if (optionalArgs.provider) {
this.provider = optionalArgs.provider;
}
if (optionalArgs.size) {
this.size = optionalArgs.size;
}
if (optionalArgs.region) {
this.region = optionalArgs.region;
}
if (optionalArgs.floatingIp) | {
this.floatingIp = optionalArgs.floatingIp;
} | conditional_block |
|
bindings.js | ) {
if (plcm.floatingIp) {
hasFloatingIp = true;
}
var otherLabel = plcm.otherLabel;
if (otherLabel !== undefined && !labelMap[otherLabel]) {
throw service.name + " has a placement in terms of an " +
"undeployed service: " + otherLabel;
}
});
if (hasFloatingIp && service.incomingPublic.length
&& service.containers.length > 1) {
throw service.name + " has a floating IP and multiple containers. " +
"This is not yet supported."
}
});
};
// deploy adds an object, or list of objects, to the deployment.
// Deployable objects must implement the deploy(deployment) interface.
Deployment.prototype.deploy = function(toDeployList) {
if (toDeployList.constructor !== Array) {
toDeployList = [toDeployList];
}
var that = this;
toDeployList.forEach(function(toDeploy) {
if (!toDeploy.deploy) {
throw "only objects that implement \"deploy(deployment)\" can be deployed";
}
toDeploy.deploy(that);
});
};
Deployment.prototype.assert = function(rule, desired) {
this.invariants.push(new Assertion(rule, desired));
};
function Service(name, containers) {
this.name = uniqueLabelName(name);
this.containers = containers;
this.annotations = [];
this.placements = [];
this.connections = [];
this.outgoingPublic = [];
this.incomingPublic = [];
}
// Get the Quilt hostname that represents the entire service.
Service.prototype.hostname = function() {
return this.name + ".q";
};
// Get a list of Quilt hostnames that address the containers within the service.
Service.prototype.children = function() {
var i;
var res = [];
for (i = 1; i < this.containers.length + 1; i++) {
res.push(i + "." + this.name + ".q");
}
return res;
};
Service.prototype.annotate = function(annotation) {
this.annotations.push(annotation);
};
Service.prototype.canReach = function(target) {
if (target === publicInternet) {
return reachable(this.name, publicInternetLabel);
}
return reachable(this.name, target.name);
};
Service.prototype.canReachACL = function(target) {
return reachableACL(this.name, target.name);
};
Service.prototype.between = function(src, dst) {
return between(src.name, this.name, dst.name);
};
Service.prototype.neighborOf = function(target) {
return neighbor(this.name, target.name);
};
Service.prototype.deploy = function(deployment) {
deployment.services.push(this);
};
Service.prototype.connect = function(range, to) {
range = boxRange(range);
if (to === publicInternet) {
return this.connectToPublic(range);
}
this.connections.push(new Connection(range, to));
};
// publicInternet is an object that looks like another service that can be
// connected to or from. However, it is actually just syntactic sugar to hide
// the connectToPublic and connectFromPublic functions.
var publicInternet = {
connect: function(range, to) {
to.connectFromPublic(range);
},
canReach: function(to) {
return reachable(publicInternetLabel, to.name);
}
};
// Allow outbound traffic from the service to public internet.
Service.prototype.connectToPublic = function(range) {
range = boxRange(range);
if (range.min != range.max) {
throw "public internet cannot connect on port ranges";
}
this.outgoingPublic.push(range);
};
// Allow inbound traffic from public internet to the service.
Service.prototype.connectFromPublic = function(range) {
range = boxRange(range);
if (range.min != range.max) {
throw "public internet cannot connect on port ranges";
}
this.incomingPublic.push(range);
};
Service.prototype.place = function(rule) {
this.placements.push(rule);
};
Service.prototype.getQuiltConnections = function() {
var connections = [];
var that = this;
this.connections.forEach(function(conn) {
connections.push({
from: that.name,
to: conn.to.name,
minPort: conn.minPort,
maxPort: conn.maxPort
});
});
this.outgoingPublic.forEach(function(rng) {
connections.push({
from: that.name,
to: publicInternetLabel,
minPort: rng.min,
maxPort: rng.max
});
});
this.incomingPublic.forEach(function(rng) {
connections.push({
from: publicInternetLabel,
to: that.name,
minPort: rng.min,
maxPort: rng.max
});
});
return connections;
};
Service.prototype.getQuiltPlacements = function() {
var placements = [];
var that = this;
this.placements.forEach(function(placement) {
placements.push({
targetLabel: that.name,
exclusive: placement.exclusive,
otherLabel: placement.otherLabel || "",
provider: placement.provider || "",
size: placement.size || "",
region: placement.region || "",
floatingIp: placement.floatingIp || ""
});
});
return placements;
};
var labelNameCount = {};
function uniqueLabelName(name) {
if (!(name in labelNameCount)) {
labelNameCount[name] = 0;
}
var count = ++labelNameCount[name];
if (count == 1) {
return name;
}
return name + labelNameCount[name];
}
// Box raw integers into range.
function boxRange(x) {
if (x === undefined) {
return new Range(0, 0);
}
if (typeof x === "number") {
x = new Range(x, x);
}
return x;
}
function Machine(optionalArgs) {
this._refID = _.uniqueId();
this.provider = optionalArgs.provider || "";
this.role = optionalArgs.role || "";
this.region = optionalArgs.region || "";
this.size = optionalArgs.size || "";
this.floatingIp = optionalArgs.floatingIp || "";
this.diskSize = optionalArgs.diskSize || 0;
this.sshKeys = optionalArgs.sshKeys || [];
this.cpu = boxRange(optionalArgs.cpu);
this.ram = boxRange(optionalArgs.ram);
}
Machine.prototype.deploy = function(deployment) {
deployment.machines.push(this);
};
// Create a new machine with the same attributes.
Machine.prototype.clone = function() {
// _.clone only creates a shallow copy, so we must clone sshKeys ourselves.
var keyClone = _.clone(this.sshKeys);
var cloned = _.clone(this);
cloned.sshKeys = keyClone;
return new Machine(cloned);
};
Machine.prototype.withRole = function(role) {
var copy = this.clone();
copy.role = role;
return copy;
};
Machine.prototype.asWorker = function() {
return this.withRole("Worker");
};
Machine.prototype.asMaster = function() {
return this.withRole("Master");
};
// Create n new machines with the same attributes.
Machine.prototype.replicate = function(n) {
var i;
var res = [];
for (i = 0 ; i < n ; i++) {
res.push(this.clone());
}
return res;
};
function Container(image, command) {
// refID is used to distinguish deployments with multiple references to the
// same container, and deployments with multiple containers with the exact
// same attributes.
this._refID = _.uniqueId();
this.image = image;
this.command = command || [];
this.env = {};
}
// Create a new Container with the same attributes.
Container.prototype.clone = function() {
var cloned = new Container(this.image, _.clone(this.command));
cloned.env = _.clone(this.env);
return cloned;
};
// Create n new Containers with the same attributes.
Container.prototype.replicate = function(n) {
var i;
var res = [];
for (i = 0 ; i < n ; i++) {
res.push(this.clone());
}
return res;
};
Container.prototype.setEnv = function(key, val) {
this.env[key] = val;
};
Container.prototype.withEnv = function(env) {
var cloned = this.clone();
cloned.env = env;
return cloned;
};
var enough = { form: "enough" };
var between = invariantType("between");
var neighbor = invariantType("reachDirect");
var reachableACL = invariantType("reachACL");
var reachable = invariantType("reach");
function Assertion(invariant, desired) {
this.form = invariant.form;
this.nodes = invariant.nodes;
this.target = desired;
}
function invariantType(form) {
return function() {
// Convert the arguments object into a real array. We can't simply use
// Array.from because it isn't defined in Otto.
var nodes = [];
var i;
for (i = 0 ; i < arguments.length ; i++) {
nodes.push(arguments[i]);
}
return {
form: form,
nodes: nodes
};
};
}
function LabelRule(exclusive, otherService) {
this.exclusive = exclusive;
this.otherLabel = otherService.name;
}
function MachineRule(exclusive, optionalArgs) {
this.exclusive = exclusive;
if (optionalArgs.provider) {
this.provider = optionalArgs.provider;
}
if (optionalArgs.size) {
this.size = optionalArgs.size;
}
if (optionalArgs.region) {
this.region = optionalArgs.region;
}
if (optionalArgs.floatingIp) {
this.floatingIp = optionalArgs.floatingIp;
}
}
function | Connection | identifier_name |
|
test-ae-rbm.py | import os
from rbm import RBM
from au import AutoEncoder
import tensorflow as tf
import input_data
from utilsnn import show_image, min_max_scale
import matplotlib.pyplot as plt
flags = tf.app.flags
FLAGS = flags.FLAGS
flags.DEFINE_string('data_dir', '/tmp/data/', 'Directory for storing data')
flags.DEFINE_integer('epochs', 50, 'The number of training epochs')
flags.DEFINE_integer('batchsize', 30, 'The batch size')
flags.DEFINE_boolean('restore_rbm', False, 'Whether to restore the RBM weights or not.')
# ensure output dir exists
if not os.path.isdir('out'):
os.mkdir('out')
mnist = input_data.read_data_sets(FLAGS.data_dir, one_hot=True)
trX, trY, teX, teY = mnist.train.images, mnist.train.labels, mnist.test.images, mnist.test.labels
trX, teY = min_max_scale(trX, teX)
# RBMs
rbmobject1 = RBM(784, 900, ['rbmw1', 'rbvb1', 'rbmhb1'], 0.3)
rbmobject2 = RBM(900, 500, ['rbmw2', 'rbvb2', 'rbmhb2'], 0.3)
rbmobject3 = RBM(500, 250, ['rbmw3', 'rbvb3', 'rbmhb3'], 0.3)
rbmobject4 = RBM(250, 2, ['rbmw4', 'rbvb4', 'rbmhb4'], 0.3)
if FLAGS.restore_rbm:
rbmobject1.restore_weights('./out/rbmw1.chp')
rbmobject2.restore_weights('./out/rbmw2.chp')
rbmobject3.restore_weights('./out/rbmw3.chp')
rbmobject4.restore_weights('./out/rbmw4.chp')
# Autoencoder
autoencoder = AutoEncoder(784, [900, 500, 250, 2], [['rbmw1', 'rbmhb1'],
['rbmw2', 'rbmhb2'],
['rbmw3', 'rbmhb3'],
['rbmw4', 'rbmhb4']], tied_weights=False)
iterations = len(trX) / FLAGS.batchsize
# Train First RBM
print('first rbm')
for i in range(FLAGS.epochs):
for j in range(iterations):
batch_xs, batch_ys = mnist.train.next_batch(FLAGS.batchsize)
rbmobject1.partial_fit(batch_xs)
print(rbmobject1.compute_cost(trX))
show_image("out/1rbm.jpg", rbmobject1.n_w, (28, 28), (30, 30))
rbmobject1.save_weights('./out/rbmw1.chp')
# Train Second RBM2
print('second rbm')
for i in range(FLAGS.epochs):
for j in range(iterations):
batch_xs, batch_ys = mnist.train.next_batch(FLAGS.batchsize)
# Transform features with first rbm for second rbm
batch_xs = rbmobject1.transform(batch_xs)
rbmobject2.partial_fit(batch_xs)
print(rbmobject2.compute_cost(rbmobject1.transform(trX)))
show_image("out/2rbm.jpg", rbmobject2.n_w, (30, 30), (25, 20))
rbmobject2.save_weights('./out/rbmw2.chp')
# Train Third RBM
print('third rbm')
for i in range(FLAGS.epochs):
for j in range(iterations):
# Transform features
|
print(rbmobject3.compute_cost(rbmobject2.transform(rbmobject1.transform(trX))))
show_image("out/3rbm.jpg", rbmobject3.n_w, (25, 20), (25, 10))
rbmobject3.save_weights('./out/rbmw3.chp')
# Train Third RBM
print('fourth rbm')
for i in range(FLAGS.epochs):
for j in range(iterations):
batch_xs, batch_ys = mnist.train.next_batch(FLAGS.batchsize)
# Transform features
batch_xs = rbmobject1.transform(batch_xs)
batch_xs = rbmobject2.transform(batch_xs)
batch_xs = rbmobject3.transform(batch_xs)
rbmobject4.partial_fit(batch_xs)
print(rbmobject4.compute_cost(rbmobject3.transform(rbmobject2.transform(rbmobject1.transform(trX)))))
rbmobject4.save_weights('./out/rbmw4.chp')
# Load RBM weights to Autoencoder
autoencoder.load_rbm_weights('./out/rbmw1.chp', ['rbmw1', 'rbmhb1'], 0)
autoencoder.load_rbm_weights('./out/rbmw2.chp', ['rbmw2', 'rbmhb2'], 1)
autoencoder.load_rbm_weights('./out/rbmw3.chp', ['rbmw3', 'rbmhb3'], 2)
autoencoder.load_rbm_weights('./out/rbmw4.chp', ['rbmw4', 'rbmhb4'], 3)
# Train Autoencoder
print('autoencoder')
for i in range(FLAGS.epochs):
cost = 0.0
for j in range(iterations):
batch_xs, batch_ys = mnist.train.next_batch(FLAGS.batchsize)
cost += autoencoder.partial_fit(batch_xs)
print(cost)
autoencoder.save_weights('./out/au.chp')
autoencoder.load_weights('./out/au.chp')
fig, ax = plt.subplots()
print(autoencoder.transform(teX)[:, 0])
print(autoencoder.transform(teX)[:, 1])
plt.scatter(autoencoder.transform(teX)[:, 0], autoencoder.transform(teX)[:, 1], alpha=0.5)
plt.show()
raw_input("Press Enter to continue...")
plt.savefig('out/myfig')
| batch_xs, batch_ys = mnist.train.next_batch(FLAGS.batchsize)
batch_xs = rbmobject1.transform(batch_xs)
batch_xs = rbmobject2.transform(batch_xs)
rbmobject3.partial_fit(batch_xs) | conditional_block |
test-ae-rbm.py | import os
from rbm import RBM
from au import AutoEncoder
import tensorflow as tf
import input_data
from utilsnn import show_image, min_max_scale
import matplotlib.pyplot as plt
flags = tf.app.flags
FLAGS = flags.FLAGS
flags.DEFINE_string('data_dir', '/tmp/data/', 'Directory for storing data')
flags.DEFINE_integer('epochs', 50, 'The number of training epochs')
flags.DEFINE_integer('batchsize', 30, 'The batch size')
flags.DEFINE_boolean('restore_rbm', False, 'Whether to restore the RBM weights or not.')
# ensure output dir exists
if not os.path.isdir('out'):
os.mkdir('out')
mnist = input_data.read_data_sets(FLAGS.data_dir, one_hot=True)
trX, trY, teX, teY = mnist.train.images, mnist.train.labels, mnist.test.images, mnist.test.labels
trX, teY = min_max_scale(trX, teX)
# RBMs
rbmobject1 = RBM(784, 900, ['rbmw1', 'rbvb1', 'rbmhb1'], 0.3)
rbmobject2 = RBM(900, 500, ['rbmw2', 'rbvb2', 'rbmhb2'], 0.3)
rbmobject3 = RBM(500, 250, ['rbmw3', 'rbvb3', 'rbmhb3'], 0.3)
rbmobject4 = RBM(250, 2, ['rbmw4', 'rbvb4', 'rbmhb4'], 0.3)
if FLAGS.restore_rbm:
rbmobject1.restore_weights('./out/rbmw1.chp')
rbmobject2.restore_weights('./out/rbmw2.chp')
rbmobject3.restore_weights('./out/rbmw3.chp')
rbmobject4.restore_weights('./out/rbmw4.chp')
# Autoencoder
autoencoder = AutoEncoder(784, [900, 500, 250, 2], [['rbmw1', 'rbmhb1'],
['rbmw2', 'rbmhb2'],
['rbmw3', 'rbmhb3'],
['rbmw4', 'rbmhb4']], tied_weights=False)
iterations = len(trX) / FLAGS.batchsize
# Train First RBM
print('first rbm')
for i in range(FLAGS.epochs):
for j in range(iterations):
batch_xs, batch_ys = mnist.train.next_batch(FLAGS.batchsize) |
# Train Second RBM2
print('second rbm')
for i in range(FLAGS.epochs):
for j in range(iterations):
batch_xs, batch_ys = mnist.train.next_batch(FLAGS.batchsize)
# Transform features with first rbm for second rbm
batch_xs = rbmobject1.transform(batch_xs)
rbmobject2.partial_fit(batch_xs)
print(rbmobject2.compute_cost(rbmobject1.transform(trX)))
show_image("out/2rbm.jpg", rbmobject2.n_w, (30, 30), (25, 20))
rbmobject2.save_weights('./out/rbmw2.chp')
# Train Third RBM
print('third rbm')
for i in range(FLAGS.epochs):
for j in range(iterations):
# Transform features
batch_xs, batch_ys = mnist.train.next_batch(FLAGS.batchsize)
batch_xs = rbmobject1.transform(batch_xs)
batch_xs = rbmobject2.transform(batch_xs)
rbmobject3.partial_fit(batch_xs)
print(rbmobject3.compute_cost(rbmobject2.transform(rbmobject1.transform(trX))))
show_image("out/3rbm.jpg", rbmobject3.n_w, (25, 20), (25, 10))
rbmobject3.save_weights('./out/rbmw3.chp')
# Train Third RBM
print('fourth rbm')
for i in range(FLAGS.epochs):
for j in range(iterations):
batch_xs, batch_ys = mnist.train.next_batch(FLAGS.batchsize)
# Transform features
batch_xs = rbmobject1.transform(batch_xs)
batch_xs = rbmobject2.transform(batch_xs)
batch_xs = rbmobject3.transform(batch_xs)
rbmobject4.partial_fit(batch_xs)
print(rbmobject4.compute_cost(rbmobject3.transform(rbmobject2.transform(rbmobject1.transform(trX)))))
rbmobject4.save_weights('./out/rbmw4.chp')
# Load RBM weights to Autoencoder
autoencoder.load_rbm_weights('./out/rbmw1.chp', ['rbmw1', 'rbmhb1'], 0)
autoencoder.load_rbm_weights('./out/rbmw2.chp', ['rbmw2', 'rbmhb2'], 1)
autoencoder.load_rbm_weights('./out/rbmw3.chp', ['rbmw3', 'rbmhb3'], 2)
autoencoder.load_rbm_weights('./out/rbmw4.chp', ['rbmw4', 'rbmhb4'], 3)
# Train Autoencoder
print('autoencoder')
for i in range(FLAGS.epochs):
cost = 0.0
for j in range(iterations):
batch_xs, batch_ys = mnist.train.next_batch(FLAGS.batchsize)
cost += autoencoder.partial_fit(batch_xs)
print(cost)
autoencoder.save_weights('./out/au.chp')
autoencoder.load_weights('./out/au.chp')
fig, ax = plt.subplots()
print(autoencoder.transform(teX)[:, 0])
print(autoencoder.transform(teX)[:, 1])
plt.scatter(autoencoder.transform(teX)[:, 0], autoencoder.transform(teX)[:, 1], alpha=0.5)
plt.show()
raw_input("Press Enter to continue...")
plt.savefig('out/myfig') | rbmobject1.partial_fit(batch_xs)
print(rbmobject1.compute_cost(trX))
show_image("out/1rbm.jpg", rbmobject1.n_w, (28, 28), (30, 30))
rbmobject1.save_weights('./out/rbmw1.chp') | random_line_split |
run_and_gather_logs.py | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test runner for TensorFlow tests."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import shlex
import sys
import tensorflow as tf
# pylint: disable=g-import-not-at-top
# pylint: disable=g-bad-import-order
# pylint: disable=unused-import
# Note: cpuinfo and psutil are not installed for you in the TensorFlow
# OSS tree. They are installable via pip.
try:
import cpuinfo
import psutil
except ImportError as e:
tf.logging.error("\n\n\nERROR: Unable to import necessary library: {}. "
"Issuing a soft exit.\n\n\n".format(e))
sys.exit(0)
# pylint: enable=g-bad-import-order
# pylint: enable=unused-import
from google.protobuf import text_format
from tensorflow.core.util import test_log_pb2
from tensorflow.tools.test import run_and_gather_logs_lib
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string("test_name", "", """Test target to run.""")
tf.app.flags.DEFINE_string(
"test_args", "", """Test arguments, space separated.""")
tf.app.flags.DEFINE_string(
"test_log_output", "", """Filename to write logs.""")
tf.app.flags.DEFINE_bool(
"test_log_output_use_tmpdir", False,
"""Store the log output into tmpdir?.""")
tf.app.flags.DEFINE_string(
"compilation_mode", "", """Mode used during this build (e.g. opt, dbg).""")
tf.app.flags.DEFINE_string(
"cc_flags", "", """CC flags used during this build.""")
def gather_build_configuration():
build_config = test_log_pb2.BuildConfiguration()
build_config.mode = FLAGS.compilation_mode
# Include all flags except includes
cc_flags = [
flag for flag in shlex.split(FLAGS.cc_flags)
if not flag.startswith("-i")]
build_config.cc_flags.extend(cc_flags)
return build_config
def | (unused_args):
test_name = FLAGS.test_name
test_args = FLAGS.test_args
test_results, _ = run_and_gather_logs_lib.run_and_gather_logs(
test_name, test_args)
# Additional bits we receive from bazel
test_results.build_configuration.CopyFrom(gather_build_configuration())
serialized_test_results = text_format.MessageToString(test_results)
if not FLAGS.test_log_output:
print(serialized_test_results)
return
if FLAGS.test_log_output_use_tmpdir:
tmpdir = tf.test.get_temp_dir()
output_path = os.path.join(tmpdir, FLAGS.test_log_output)
else:
output_path = os.path.abspath(FLAGS.test_log_output)
tf.gfile.GFile(output_path, "w").write(serialized_test_results)
tf.logging.info("Test results written to: %s" % output_path)
if __name__ == "__main__":
tf.app.run()
| main | identifier_name |
run_and_gather_logs.py | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test runner for TensorFlow tests."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import shlex
import sys
import tensorflow as tf
# pylint: disable=g-import-not-at-top
# pylint: disable=g-bad-import-order
# pylint: disable=unused-import
# Note: cpuinfo and psutil are not installed for you in the TensorFlow
# OSS tree. They are installable via pip.
try:
import cpuinfo
import psutil
except ImportError as e:
tf.logging.error("\n\n\nERROR: Unable to import necessary library: {}. "
"Issuing a soft exit.\n\n\n".format(e))
sys.exit(0)
# pylint: enable=g-bad-import-order
# pylint: enable=unused-import
from google.protobuf import text_format
from tensorflow.core.util import test_log_pb2
from tensorflow.tools.test import run_and_gather_logs_lib
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string("test_name", "", """Test target to run.""")
tf.app.flags.DEFINE_string(
"test_args", "", """Test arguments, space separated.""")
tf.app.flags.DEFINE_string(
"test_log_output", "", """Filename to write logs.""")
tf.app.flags.DEFINE_bool(
"test_log_output_use_tmpdir", False,
"""Store the log output into tmpdir?.""")
tf.app.flags.DEFINE_string(
"compilation_mode", "", """Mode used during this build (e.g. opt, dbg).""")
tf.app.flags.DEFINE_string(
"cc_flags", "", """CC flags used during this build.""")
def gather_build_configuration():
build_config = test_log_pb2.BuildConfiguration()
build_config.mode = FLAGS.compilation_mode
# Include all flags except includes
cc_flags = [
flag for flag in shlex.split(FLAGS.cc_flags)
if not flag.startswith("-i")]
build_config.cc_flags.extend(cc_flags)
return build_config
def main(unused_args):
test_name = FLAGS.test_name
test_args = FLAGS.test_args
test_results, _ = run_and_gather_logs_lib.run_and_gather_logs(
test_name, test_args)
# Additional bits we receive from bazel
test_results.build_configuration.CopyFrom(gather_build_configuration())
serialized_test_results = text_format.MessageToString(test_results)
if not FLAGS.test_log_output:
print(serialized_test_results)
return
if FLAGS.test_log_output_use_tmpdir:
tmpdir = tf.test.get_temp_dir() | output_path = os.path.abspath(FLAGS.test_log_output)
tf.gfile.GFile(output_path, "w").write(serialized_test_results)
tf.logging.info("Test results written to: %s" % output_path)
if __name__ == "__main__":
tf.app.run() | output_path = os.path.join(tmpdir, FLAGS.test_log_output)
else: | random_line_split |
run_and_gather_logs.py | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test runner for TensorFlow tests."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import shlex
import sys
import tensorflow as tf
# pylint: disable=g-import-not-at-top
# pylint: disable=g-bad-import-order
# pylint: disable=unused-import
# Note: cpuinfo and psutil are not installed for you in the TensorFlow
# OSS tree. They are installable via pip.
try:
import cpuinfo
import psutil
except ImportError as e:
tf.logging.error("\n\n\nERROR: Unable to import necessary library: {}. "
"Issuing a soft exit.\n\n\n".format(e))
sys.exit(0)
# pylint: enable=g-bad-import-order
# pylint: enable=unused-import
from google.protobuf import text_format
from tensorflow.core.util import test_log_pb2
from tensorflow.tools.test import run_and_gather_logs_lib
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string("test_name", "", """Test target to run.""")
tf.app.flags.DEFINE_string(
"test_args", "", """Test arguments, space separated.""")
tf.app.flags.DEFINE_string(
"test_log_output", "", """Filename to write logs.""")
tf.app.flags.DEFINE_bool(
"test_log_output_use_tmpdir", False,
"""Store the log output into tmpdir?.""")
tf.app.flags.DEFINE_string(
"compilation_mode", "", """Mode used during this build (e.g. opt, dbg).""")
tf.app.flags.DEFINE_string(
"cc_flags", "", """CC flags used during this build.""")
def gather_build_configuration():
build_config = test_log_pb2.BuildConfiguration()
build_config.mode = FLAGS.compilation_mode
# Include all flags except includes
cc_flags = [
flag for flag in shlex.split(FLAGS.cc_flags)
if not flag.startswith("-i")]
build_config.cc_flags.extend(cc_flags)
return build_config
def main(unused_args):
test_name = FLAGS.test_name
test_args = FLAGS.test_args
test_results, _ = run_and_gather_logs_lib.run_and_gather_logs(
test_name, test_args)
# Additional bits we receive from bazel
test_results.build_configuration.CopyFrom(gather_build_configuration())
serialized_test_results = text_format.MessageToString(test_results)
if not FLAGS.test_log_output:
|
if FLAGS.test_log_output_use_tmpdir:
tmpdir = tf.test.get_temp_dir()
output_path = os.path.join(tmpdir, FLAGS.test_log_output)
else:
output_path = os.path.abspath(FLAGS.test_log_output)
tf.gfile.GFile(output_path, "w").write(serialized_test_results)
tf.logging.info("Test results written to: %s" % output_path)
if __name__ == "__main__":
tf.app.run()
| print(serialized_test_results)
return | conditional_block |
run_and_gather_logs.py | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test runner for TensorFlow tests."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import shlex
import sys
import tensorflow as tf
# pylint: disable=g-import-not-at-top
# pylint: disable=g-bad-import-order
# pylint: disable=unused-import
# Note: cpuinfo and psutil are not installed for you in the TensorFlow
# OSS tree. They are installable via pip.
try:
import cpuinfo
import psutil
except ImportError as e:
tf.logging.error("\n\n\nERROR: Unable to import necessary library: {}. "
"Issuing a soft exit.\n\n\n".format(e))
sys.exit(0)
# pylint: enable=g-bad-import-order
# pylint: enable=unused-import
from google.protobuf import text_format
from tensorflow.core.util import test_log_pb2
from tensorflow.tools.test import run_and_gather_logs_lib
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string("test_name", "", """Test target to run.""")
tf.app.flags.DEFINE_string(
"test_args", "", """Test arguments, space separated.""")
tf.app.flags.DEFINE_string(
"test_log_output", "", """Filename to write logs.""")
tf.app.flags.DEFINE_bool(
"test_log_output_use_tmpdir", False,
"""Store the log output into tmpdir?.""")
tf.app.flags.DEFINE_string(
"compilation_mode", "", """Mode used during this build (e.g. opt, dbg).""")
tf.app.flags.DEFINE_string(
"cc_flags", "", """CC flags used during this build.""")
def gather_build_configuration():
build_config = test_log_pb2.BuildConfiguration()
build_config.mode = FLAGS.compilation_mode
# Include all flags except includes
cc_flags = [
flag for flag in shlex.split(FLAGS.cc_flags)
if not flag.startswith("-i")]
build_config.cc_flags.extend(cc_flags)
return build_config
def main(unused_args):
| tf.logging.info("Test results written to: %s" % output_path)
if __name__ == "__main__":
tf.app.run()
| test_name = FLAGS.test_name
test_args = FLAGS.test_args
test_results, _ = run_and_gather_logs_lib.run_and_gather_logs(
test_name, test_args)
# Additional bits we receive from bazel
test_results.build_configuration.CopyFrom(gather_build_configuration())
serialized_test_results = text_format.MessageToString(test_results)
if not FLAGS.test_log_output:
print(serialized_test_results)
return
if FLAGS.test_log_output_use_tmpdir:
tmpdir = tf.test.get_temp_dir()
output_path = os.path.join(tmpdir, FLAGS.test_log_output)
else:
output_path = os.path.abspath(FLAGS.test_log_output)
tf.gfile.GFile(output_path, "w").write(serialized_test_results) | identifier_body |
models.py | import django.db.models as models
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _
from django.db.models.signals import post_save
class | (models.Model):
"""
parameters we can get from gigya:
birthMonth,isLoggedIn,city,UID,zip,birthYear,state,provider,email,
UIDSig,photoURL,timestamp,loginProviderUID,signature,isSiteUID,proxiedEmail
,thumbnailURL,nickname,firstName,loginProvider,gender,lastName,profileURL
birthDay,country,isSiteUser
One unique user can have several UID's
"""
user = models.ForeignKey(User, unique=True, null=True)
uid = models.CharField(max_length=255)
login_provider = models.CharField(max_length=150)
timestamp = models.DateTimeField(null=True,blank=True)
isLoggedIn = models.BooleanField(default=False)
birthday = models.DateField(null=True,blank=True)
city = models.CharField(max_length=150, null=True,blank=True)
state = models.CharField(max_length=150, null=True,blank=True)
zip = models.CharField(max_length=30, null=True,blank=True)
country = models.CharField(max_length=30, null=True,blank=True)
photourl = models.CharField(max_length=255, null=True,blank=True)
first_name = models.CharField(max_length=80, null=True,blank=True)
last_name = models.CharField(max_length=80, null=True,blank=True)
gender = models.CharField(max_length=2, null=True,blank=True)
profileUrl = models.CharField(max_length=2, null=True, blank=True)
def create_profile(sender, instance=None, **kwargs):
if instance is None:
return
profile, created = Profile.objects.get_or_create(user=instance)
post_save.connect(create_profile, sender=User) | Profile | identifier_name |
models.py | import django.db.models as models
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _
from django.db.models.signals import post_save
class Profile(models.Model):
"""
parameters we can get from gigya:
birthMonth,isLoggedIn,city,UID,zip,birthYear,state,provider,email,
UIDSig,photoURL,timestamp,loginProviderUID,signature,isSiteUID,proxiedEmail
,thumbnailURL,nickname,firstName,loginProvider,gender,lastName,profileURL
birthDay,country,isSiteUser
One unique user can have several UID's
"""
user = models.ForeignKey(User, unique=True, null=True)
uid = models.CharField(max_length=255)
login_provider = models.CharField(max_length=150)
timestamp = models.DateTimeField(null=True,blank=True)
isLoggedIn = models.BooleanField(default=False)
birthday = models.DateField(null=True,blank=True)
city = models.CharField(max_length=150, null=True,blank=True)
state = models.CharField(max_length=150, null=True,blank=True)
zip = models.CharField(max_length=30, null=True,blank=True)
country = models.CharField(max_length=30, null=True,blank=True)
photourl = models.CharField(max_length=255, null=True,blank=True)
first_name = models.CharField(max_length=80, null=True,blank=True)
last_name = models.CharField(max_length=80, null=True,blank=True)
gender = models.CharField(max_length=2, null=True,blank=True)
profileUrl = models.CharField(max_length=2, null=True, blank=True)
def create_profile(sender, instance=None, **kwargs):
if instance is None:
|
profile, created = Profile.objects.get_or_create(user=instance)
post_save.connect(create_profile, sender=User) | return | conditional_block |
models.py | import django.db.models as models | from django.utils.translation import ugettext_lazy as _
from django.db.models.signals import post_save
class Profile(models.Model):
"""
parameters we can get from gigya:
birthMonth,isLoggedIn,city,UID,zip,birthYear,state,provider,email,
UIDSig,photoURL,timestamp,loginProviderUID,signature,isSiteUID,proxiedEmail
,thumbnailURL,nickname,firstName,loginProvider,gender,lastName,profileURL
birthDay,country,isSiteUser
One unique user can have several UID's
"""
user = models.ForeignKey(User, unique=True, null=True)
uid = models.CharField(max_length=255)
login_provider = models.CharField(max_length=150)
timestamp = models.DateTimeField(null=True,blank=True)
isLoggedIn = models.BooleanField(default=False)
birthday = models.DateField(null=True,blank=True)
city = models.CharField(max_length=150, null=True,blank=True)
state = models.CharField(max_length=150, null=True,blank=True)
zip = models.CharField(max_length=30, null=True,blank=True)
country = models.CharField(max_length=30, null=True,blank=True)
photourl = models.CharField(max_length=255, null=True,blank=True)
first_name = models.CharField(max_length=80, null=True,blank=True)
last_name = models.CharField(max_length=80, null=True,blank=True)
gender = models.CharField(max_length=2, null=True,blank=True)
profileUrl = models.CharField(max_length=2, null=True, blank=True)
def create_profile(sender, instance=None, **kwargs):
if instance is None:
return
profile, created = Profile.objects.get_or_create(user=instance)
post_save.connect(create_profile, sender=User) | from django.contrib.auth.models import User | random_line_split |
models.py | import django.db.models as models
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _
from django.db.models.signals import post_save
class Profile(models.Model):
"""
parameters we can get from gigya:
birthMonth,isLoggedIn,city,UID,zip,birthYear,state,provider,email,
UIDSig,photoURL,timestamp,loginProviderUID,signature,isSiteUID,proxiedEmail
,thumbnailURL,nickname,firstName,loginProvider,gender,lastName,profileURL
birthDay,country,isSiteUser
One unique user can have several UID's
"""
user = models.ForeignKey(User, unique=True, null=True)
uid = models.CharField(max_length=255)
login_provider = models.CharField(max_length=150)
timestamp = models.DateTimeField(null=True,blank=True)
isLoggedIn = models.BooleanField(default=False)
birthday = models.DateField(null=True,blank=True)
city = models.CharField(max_length=150, null=True,blank=True)
state = models.CharField(max_length=150, null=True,blank=True)
zip = models.CharField(max_length=30, null=True,blank=True)
country = models.CharField(max_length=30, null=True,blank=True)
photourl = models.CharField(max_length=255, null=True,blank=True)
first_name = models.CharField(max_length=80, null=True,blank=True)
last_name = models.CharField(max_length=80, null=True,blank=True)
gender = models.CharField(max_length=2, null=True,blank=True)
profileUrl = models.CharField(max_length=2, null=True, blank=True)
def create_profile(sender, instance=None, **kwargs):
|
post_save.connect(create_profile, sender=User) | if instance is None:
return
profile, created = Profile.objects.get_or_create(user=instance) | identifier_body |
size_of.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use selectors;
use servo_arc::Arc;
use style;
use style::applicable_declarations::ApplicableDeclarationBlock;
use style::data::{ElementData, ElementStyles};
use style::gecko::selector_parser::{self, SelectorImpl};
use style::properties::ComputedValues;
use style::rule_tree::{RuleNode, StrongRuleNode};
use style::values::computed;
use style::values::specified;
size_of_test!(size_of_selector, selectors::parser::Selector<SelectorImpl>, 8);
size_of_test!(size_of_pseudo_element, selector_parser::PseudoElement, 24);
size_of_test!(size_of_component, selectors::parser::Component<SelectorImpl>, 32);
size_of_test!(size_of_pseudo_class, selector_parser::NonTSPseudoClass, 24);
// The size of this is critical to performance on the bloom-basic microbenchmark.
// When iterating over a large Rule array, we want to be able to fast-reject
// selectors (with the inline hashes) with as few cache misses as possible.
size_of_test!(test_size_of_rule, style::stylist::Rule, 32);
// Large pages generate tens of thousands of ComputedValues.
size_of_test!(test_size_of_cv, ComputedValues, 248);
size_of_test!(test_size_of_option_arc_cv, Option<Arc<ComputedValues>>, 8);
size_of_test!(test_size_of_option_rule_node, Option<StrongRuleNode>, 8);
size_of_test!(test_size_of_element_styles, ElementStyles, 16);
size_of_test!(test_size_of_element_data, ElementData, 24);
size_of_test!(test_size_of_property_declaration, style::properties::PropertyDeclaration, 32);
size_of_test!(test_size_of_application_declaration_block, ApplicableDeclarationBlock, 24);
// FIXME(bholley): This can shrink with a little bit of work.
// See https://github.com/servo/servo/issues/17280
size_of_test!(test_size_of_rule_node, RuleNode, 80);
// This is huge, but we allocate it on the stack and then never move it,
// we only pass `&mut SourcePropertyDeclaration` references around.
size_of_test!(test_size_of_parsed_declaration, style::properties::SourcePropertyDeclaration, 576);
size_of_test!(test_size_of_computed_image, computed::image::Image, 40);
size_of_test!(test_size_of_specified_image, specified::image::Image, 40);
// FIXME(bz): These can shrink if we move the None_ value inside the
// enum instead of paying an extra word for the Either discriminant.
size_of_test!(test_size_of_computed_image_layer, computed::image::ImageLayer, | if cfg!(rustc_has_pr45225) { 40 } else { 48 });
size_of_test!(test_size_of_specified_image_layer, specified::image::ImageLayer,
if cfg!(rustc_has_pr45225) { 40 } else { 48 }); | random_line_split |
|
form.element.view.hidden.js | /*
* Copyright (c) 2011-2013 Lp digital system
*
* This file is part of BackBee.
*
* BackBee is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* BackBee is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with BackBee. If not, see <http://www.gnu.org/licenses/>.
*/
define(['Core', 'Core/Renderer', 'BackBone'], function (Core, Renderer, Backbone) {
'use strict';
var HiddenView = Backbone.View.extend({
initialize: function (template, formTag, element) {
this.el = formTag;
this.template = template;
this.element = element;
this.bindEvents();
},
bindEvents: function () {
var self = this;
Core.Mediator.subscribe('before:form:submit', function (form) {
if (form.attr('id') === self.el) {
var element = form.find('.element_' + self.element.getKey()),
input = element.find('input[name="' + self.element.getKey() + '"]'),
span = element.find('span.updated'),
oldValue = self.element.value;
if (input.val() !== oldValue) {
span.text('updated');
} else |
}
});
},
/**
* Render the template into the DOM with the Renderer
* @returns {String} html
*/
render: function () {
return Renderer.render(this.template, {element: this.element});
}
});
return HiddenView;
}); | {
span.text('');
} | conditional_block |
form.element.view.hidden.js | /*
* Copyright (c) 2011-2013 Lp digital system
*
* This file is part of BackBee.
*
* BackBee is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* | *
* You should have received a copy of the GNU General Public License
* along with BackBee. If not, see <http://www.gnu.org/licenses/>.
*/
define(['Core', 'Core/Renderer', 'BackBone'], function (Core, Renderer, Backbone) {
'use strict';
var HiddenView = Backbone.View.extend({
initialize: function (template, formTag, element) {
this.el = formTag;
this.template = template;
this.element = element;
this.bindEvents();
},
bindEvents: function () {
var self = this;
Core.Mediator.subscribe('before:form:submit', function (form) {
if (form.attr('id') === self.el) {
var element = form.find('.element_' + self.element.getKey()),
input = element.find('input[name="' + self.element.getKey() + '"]'),
span = element.find('span.updated'),
oldValue = self.element.value;
if (input.val() !== oldValue) {
span.text('updated');
} else {
span.text('');
}
}
});
},
/**
* Render the template into the DOM with the Renderer
* @returns {String} html
*/
render: function () {
return Renderer.render(this.template, {element: this.element});
}
});
return HiddenView;
}); | * BackBee is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details. | random_line_split |
Score.py | # coding: utf-8
import pygame
import os
from color import *
from pygame.locals import *
class Score(pygame.sprite.Sprite):
def __init__(self, score, player, width, height):
super(pygame.sprite.Sprite).__init__(Score)
self.score = int(score)
self.color = None
self.player = player
self.bossHeight = height
self.bossWidth = width
self.size = 70
self.update()
def update(self):
self.score = int(self.score)
self.whatColor()
self.score = str(self.score)
scoreFont = pygame.font.Font('./fonts/Dearest.ttf', self.size)
# We need to convert it to do the condition in 'self.wharColor'
# and 'scoreFont.rend' only takes 'str' as argument
self.surface = scoreFont.render(self.score, True, self.color)
self.rect = self.surface.get_rect()
if self.player == 1:
self.rect.center = (55, self.bossHeight - 50)
elif self.player == -1:
self.rect.center = (self.bossWidth - 55, self.bossHeight - 50)
def whatColor(self):
self.size = 80
if self.score < 6:
self.color = white
elif self.score < 8:
self.color = aqua
elif self.score < 10:
self.color = blueGreen
else:
self.color = lime
self.size = 100
def updateScore(self, score):
self.score = score
def __repr__(self): | return "<Score de ", str(self.player), "= ", str(self.score) | random_line_split |
|
Score.py | # coding: utf-8
import pygame
import os
from color import *
from pygame.locals import *
class Score(pygame.sprite.Sprite):
def __init__(self, score, player, width, height):
super(pygame.sprite.Sprite).__init__(Score)
self.score = int(score)
self.color = None
self.player = player
self.bossHeight = height
self.bossWidth = width
self.size = 70
self.update()
def update(self):
self.score = int(self.score)
self.whatColor()
self.score = str(self.score)
scoreFont = pygame.font.Font('./fonts/Dearest.ttf', self.size)
# We need to convert it to do the condition in 'self.wharColor'
# and 'scoreFont.rend' only takes 'str' as argument
self.surface = scoreFont.render(self.score, True, self.color)
self.rect = self.surface.get_rect()
if self.player == 1:
self.rect.center = (55, self.bossHeight - 50)
elif self.player == -1:
self.rect.center = (self.bossWidth - 55, self.bossHeight - 50)
def whatColor(self):
self.size = 80
if self.score < 6:
self.color = white
elif self.score < 8:
self.color = aqua
elif self.score < 10:
|
else:
self.color = lime
self.size = 100
def updateScore(self, score):
self.score = score
def __repr__(self):
return "<Score de ", str(self.player), "= ", str(self.score)
| self.color = blueGreen | conditional_block |
Score.py | # coding: utf-8
import pygame
import os
from color import *
from pygame.locals import *
class Score(pygame.sprite.Sprite):
def __init__(self, score, player, width, height):
super(pygame.sprite.Sprite).__init__(Score)
self.score = int(score)
self.color = None
self.player = player
self.bossHeight = height
self.bossWidth = width
self.size = 70
self.update()
def update(self):
self.score = int(self.score)
self.whatColor()
self.score = str(self.score)
scoreFont = pygame.font.Font('./fonts/Dearest.ttf', self.size)
# We need to convert it to do the condition in 'self.wharColor'
# and 'scoreFont.rend' only takes 'str' as argument
self.surface = scoreFont.render(self.score, True, self.color)
self.rect = self.surface.get_rect()
if self.player == 1:
self.rect.center = (55, self.bossHeight - 50)
elif self.player == -1:
self.rect.center = (self.bossWidth - 55, self.bossHeight - 50)
def whatColor(self):
self.size = 80
if self.score < 6:
self.color = white
elif self.score < 8:
self.color = aqua
elif self.score < 10:
self.color = blueGreen
else:
self.color = lime
self.size = 100
def updateScore(self, score):
self.score = score
def __repr__(self):
| return "<Score de ", str(self.player), "= ", str(self.score) | identifier_body |
|
Score.py | # coding: utf-8
import pygame
import os
from color import *
from pygame.locals import *
class Score(pygame.sprite.Sprite):
def | (self, score, player, width, height):
super(pygame.sprite.Sprite).__init__(Score)
self.score = int(score)
self.color = None
self.player = player
self.bossHeight = height
self.bossWidth = width
self.size = 70
self.update()
def update(self):
self.score = int(self.score)
self.whatColor()
self.score = str(self.score)
scoreFont = pygame.font.Font('./fonts/Dearest.ttf', self.size)
# We need to convert it to do the condition in 'self.wharColor'
# and 'scoreFont.rend' only takes 'str' as argument
self.surface = scoreFont.render(self.score, True, self.color)
self.rect = self.surface.get_rect()
if self.player == 1:
self.rect.center = (55, self.bossHeight - 50)
elif self.player == -1:
self.rect.center = (self.bossWidth - 55, self.bossHeight - 50)
def whatColor(self):
self.size = 80
if self.score < 6:
self.color = white
elif self.score < 8:
self.color = aqua
elif self.score < 10:
self.color = blueGreen
else:
self.color = lime
self.size = 100
def updateScore(self, score):
self.score = score
def __repr__(self):
return "<Score de ", str(self.player), "= ", str(self.score)
| __init__ | identifier_name |
OrganizationListItem.tsx | import * as React from "react";
import { ListGroupItem } from "react-bootstrap";
import { Organization } from "../State/Organization";
interface IOrganizationListItemProps
{
organization: Organization;
}
export class OrganizationListItem extends React.Component<IOrganizationListItemProps, {}>
{
public render()
{
let email;
if (this.props.organization.email)
| >{this.props.organization.email}</a></p>;
}
let phone;
if (this.props.organization.phone)
{
phone = <p>Phone: <a href={`tel:${this.props.organization.phone}`}>{this.props.organization.phone}</a></p>;
}
let address;
if (this.props.organization.address)
{
address = <p>Address: {this.props.organization.address}</p>;
}
return (
<ListGroupItem>
<h3>{this.props.organization.name}</h3>
<p>{this.props.organization.description}</p>
{email}
{phone}
{address}
</ListGroupItem>
);
}
}
| {
email = <p>Email: <a href={`mailto:${this.props.organization.email}`} | conditional_block |
OrganizationListItem.tsx | import * as React from "react";
import { ListGroupItem } from "react-bootstrap";
import { Organization } from "../State/Organization";
interface IOrganizationListItemProps
{
organization: Organization;
}
export class OrganizationListItem extends React.Component<IOrganizationListItemProps, {}>
{
public render()
{
let email;
if (this.props.organization.email)
{
email = <p>Email: <a href={`mailto:${this.props.organization.email}`}>{this.props.organization.email}</a></p>;
}
let phone;
if (this.props.organization.phone)
{
phone = <p>Phone: <a href={`tel:${this.props.organization.phone}`}>{this.props.organization.phone}</a></p>;
}
let address;
| (this.props.organization.address)
{
address = <p>Address: {this.props.organization.address}</p>;
}
return (
<ListGroupItem>
<h3>{this.props.organization.name}</h3>
<p>{this.props.organization.description}</p>
{email}
{phone}
{address}
</ListGroupItem>
);
}
}
| if | identifier_name |
OrganizationListItem.tsx | import * as React from "react"; | import { ListGroupItem } from "react-bootstrap";
import { Organization } from "../State/Organization";
interface IOrganizationListItemProps
{
organization: Organization;
}
export class OrganizationListItem extends React.Component<IOrganizationListItemProps, {}>
{
public render()
{
let email;
if (this.props.organization.email)
{
email = <p>Email: <a href={`mailto:${this.props.organization.email}`}>{this.props.organization.email}</a></p>;
}
let phone;
if (this.props.organization.phone)
{
phone = <p>Phone: <a href={`tel:${this.props.organization.phone}`}>{this.props.organization.phone}</a></p>;
}
let address;
if (this.props.organization.address)
{
address = <p>Address: {this.props.organization.address}</p>;
}
return (
<ListGroupItem>
<h3>{this.props.organization.name}</h3>
<p>{this.props.organization.description}</p>
{email}
{phone}
{address}
</ListGroupItem>
);
}
} | random_line_split |
|
OrganizationListItem.tsx | import * as React from "react";
import { ListGroupItem } from "react-bootstrap";
import { Organization } from "../State/Organization";
interface IOrganizationListItemProps
{
organization: Organization;
}
export class OrganizationListItem extends React.Component<IOrganizationListItemProps, {}>
{
public render()
{
let email;
if (this.props.organization.email)
{
email = <p>Email: <a href={`mailto:${this.props.organization.email}`}>{this.props.organization.email}</a></p>;
}
let phone;
if (this.props.organization.phone)
{
phone = <p>Phone: <a href={`tel:${this.props.organization.phone}`}>{this.props.organization.phone}</a></p>;
}
let address;
if (this.props.organization.address)
|
return (
<ListGroupItem>
<h3>{this.props.organization.name}</h3>
<p>{this.props.organization.description}</p>
{email}
{phone}
{address}
</ListGroupItem>
);
}
}
| {
address = <p>Address: {this.props.organization.address}</p>;
} | identifier_body |
main.rs | use rand::Rng;
use std::cmp::Ordering;
use std::io;
fn main() {
println!("Guess the number!");
let secret_number = rand::thread_rng().gen_range(1..101);
println!("The secret number is: {}", secret_number);
loop {
println!("Please input your guess.");
let mut guess = String::new();
// ANCHOR: here
// --snip--
io::stdin()
.read_line(&mut guess)
.expect("Failed to read line"); | let guess: u32 = match guess.trim().parse() {
Ok(num) => num,
Err(_) => continue,
};
// ANCHOR_END: ch19
println!("You guessed: {}", guess);
// --snip--
// ANCHOR_END: here
match guess.cmp(&secret_number) {
Ordering::Less => println!("Too small!"),
Ordering::Greater => println!("Too big!"),
Ordering::Equal => {
println!("You win!");
break;
}
}
}
} |
// ANCHOR: ch19 | random_line_split |
main.rs | use rand::Rng;
use std::cmp::Ordering;
use std::io;
fn main() | let guess: u32 = match guess.trim().parse() {
Ok(num) => num,
Err(_) => continue,
};
// ANCHOR_END: ch19
println!("You guessed: {}", guess);
// --snip--
// ANCHOR_END: here
match guess.cmp(&secret_number) {
Ordering::Less => println!("Too small!"),
Ordering::Greater => println!("Too big!"),
Ordering::Equal => {
println!("You win!");
break;
}
}
}
}
| {
println!("Guess the number!");
let secret_number = rand::thread_rng().gen_range(1..101);
println!("The secret number is: {}", secret_number);
loop {
println!("Please input your guess.");
let mut guess = String::new();
// ANCHOR: here
// --snip--
io::stdin()
.read_line(&mut guess)
.expect("Failed to read line");
// ANCHOR: ch19 | identifier_body |
main.rs | use rand::Rng;
use std::cmp::Ordering;
use std::io;
fn | () {
println!("Guess the number!");
let secret_number = rand::thread_rng().gen_range(1..101);
println!("The secret number is: {}", secret_number);
loop {
println!("Please input your guess.");
let mut guess = String::new();
// ANCHOR: here
// --snip--
io::stdin()
.read_line(&mut guess)
.expect("Failed to read line");
// ANCHOR: ch19
let guess: u32 = match guess.trim().parse() {
Ok(num) => num,
Err(_) => continue,
};
// ANCHOR_END: ch19
println!("You guessed: {}", guess);
// --snip--
// ANCHOR_END: here
match guess.cmp(&secret_number) {
Ordering::Less => println!("Too small!"),
Ordering::Greater => println!("Too big!"),
Ordering::Equal => {
println!("You win!");
break;
}
}
}
}
| main | identifier_name |
single_thread_download.py | #!/usr/bin/python3
# -*- coding: UTF-8 -*-
# single_thread_download.py
import os
import urllib.request
import urllib.error
import shutil
# 单线程
def single_thread_download(url, file_name=None, overwrite=False):
# 如果文件名为空,则从 URL 中获取文件名
if file_name is None:
file_name = ur | l.rpartition('/')[-1]
# 潜在 bug:如果不覆盖己有文件,而已有文件不完整(eg. 没下载全),会有潜在影响
if os.path.exists(file_name) and (not overwrite):
return
try:
with urllib.request.urlopen(url) as response, open(file_name, 'wb') as out_stream:
shutil.copyfileobj(response, out_stream)
except urllib.error.URLError as e:
print(e.errno, '\n', e.reason, '\n')
# single_thread_download("http://screencasts.b0.upaiyun.com/podcasts/nil_podcast_1.m4a")
| identifier_body |
|
single_thread_download.py | #!/usr/bin/python3
# -*- coding: UTF-8 -*-
# single_thread_download.py
import os
import urllib.request
import urllib.error
import shutil
# 单线程
def single | file_name=None, overwrite=False):
# 如果文件名为空,则从 URL 中获取文件名
if file_name is None:
file_name = url.rpartition('/')[-1]
# 潜在 bug:如果不覆盖己有文件,而已有文件不完整(eg. 没下载全),会有潜在影响
if os.path.exists(file_name) and (not overwrite):
return
try:
with urllib.request.urlopen(url) as response, open(file_name, 'wb') as out_stream:
shutil.copyfileobj(response, out_stream)
except urllib.error.URLError as e:
print(e.errno, '\n', e.reason, '\n')
# single_thread_download("http://screencasts.b0.upaiyun.com/podcasts/nil_podcast_1.m4a")
| _thread_download(url, | identifier_name |
single_thread_download.py | #!/usr/bin/python3
# -*- coding: UTF-8 -*-
# single_thread_download.py
import os
import urllib.request
import urllib.error
import shutil
# 单线程
def single_thread_download(url, file_name=None, overwrite=False):
# 如果文件名为空,则从 URL 中获取文件名
if file_name is None:
file_name = url.rpartition('/')[-1]
# 潜在 bug:如果不覆盖己有文件,而已有文件不完整(eg. 没下载全),会有潜在影响
if os.path.exists(file_name) and (not overwrite):
return
try:
with urllib.request.urlopen(url) as response, open(file_name, 'wb') as out_stream:
shuti | fileobj(response, out_stream)
except urllib.error.URLError as e:
print(e.errno, '\n', e.reason, '\n')
# single_thread_download("http://screencasts.b0.upaiyun.com/podcasts/nil_podcast_1.m4a")
| l.copy | conditional_block |
single_thread_download.py | #!/usr/bin/python3
# -*- coding: UTF-8 -*-
# single_thread_download.py
import os
import urllib.request
import urllib.error | # 单线程
def single_thread_download(url, file_name=None, overwrite=False):
# 如果文件名为空,则从 URL 中获取文件名
if file_name is None:
file_name = url.rpartition('/')[-1]
# 潜在 bug:如果不覆盖己有文件,而已有文件不完整(eg. 没下载全),会有潜在影响
if os.path.exists(file_name) and (not overwrite):
return
try:
with urllib.request.urlopen(url) as response, open(file_name, 'wb') as out_stream:
shutil.copyfileobj(response, out_stream)
except urllib.error.URLError as e:
print(e.errno, '\n', e.reason, '\n')
# single_thread_download("http://screencasts.b0.upaiyun.com/podcasts/nil_podcast_1.m4a") | import shutil
| random_line_split |
environment.ts | // Angular 2
// rc2 workaround
import {enableDebugTools, disableDebugTools} from "@angular/platform-browser";
import {enableProdMode, ApplicationRef} from "@angular/core";
// Environment Providers
let PROVIDERS = [
// common env directives
];
// Angular debug tools in the dev console
// https://github.com/angular/angular/blob/86405345b781a9dc2438c0fbe3e9409245647019/TOOLS_JS.md
let _decorateModuleRef = function identity(value) {
return value;
};
if ('production' === ENV) | else {
_decorateModuleRef = (modRef: any) => {
var appRef = modRef.injector.get(ApplicationRef);
var cmpRef = appRef.components[0];
let _ng = (<any>window).ng;
enableDebugTools(cmpRef);
(<any>window).ng.probe = _ng.probe;
(<any>window).ng.coreTokens = _ng.coreTokens;
return modRef
};
// Development
PROVIDERS = [
...PROVIDERS,
// custom providers in development
];
}
export const decorateModuleRef = _decorateModuleRef;
export const ENV_PROVIDERS = [
...PROVIDERS
];
| {
// Production
//disableDebugTools();
enableProdMode();
PROVIDERS = [
...PROVIDERS,
// custom providers in production
];
} | conditional_block |
environment.ts | // Angular 2
// rc2 workaround
import {enableDebugTools, disableDebugTools} from "@angular/platform-browser";
import {enableProdMode, ApplicationRef} from "@angular/core";
// Environment Providers
let PROVIDERS = [
// common env directives
];
// Angular debug tools in the dev console
// https://github.com/angular/angular/blob/86405345b781a9dc2438c0fbe3e9409245647019/TOOLS_JS.md
let _decorateModuleRef = function identity(value) {
return value;
};
if ('production' === ENV) {
// Production
//disableDebugTools();
enableProdMode();
PROVIDERS = [
...PROVIDERS,
// custom providers in production
];
} else {
_decorateModuleRef = (modRef: any) => {
var appRef = modRef.injector.get(ApplicationRef);
var cmpRef = appRef.components[0];
let _ng = (<any>window).ng;
enableDebugTools(cmpRef);
(<any>window).ng.probe = _ng.probe;
(<any>window).ng.coreTokens = _ng.coreTokens;
return modRef
};
// Development
PROVIDERS = [
...PROVIDERS, | }
export const decorateModuleRef = _decorateModuleRef;
export const ENV_PROVIDERS = [
...PROVIDERS
]; | // custom providers in development
];
| random_line_split |
conv2d.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
""" Schedules for conv2d. """
import tvm
def schedule_conv2d_nhwc(outs):
| """Schedule for Conv2d NHWC operator."""
s = tvm.te.create_schedule([x.op for x in outs])
return s | identifier_body |
|
conv2d.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
""" Schedules for conv2d. """
import tvm
def | (outs):
"""Schedule for Conv2d NHWC operator."""
s = tvm.te.create_schedule([x.op for x in outs])
return s
| schedule_conv2d_nhwc | identifier_name |
conv2d.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at | # http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
""" Schedules for conv2d. """
import tvm
def schedule_conv2d_nhwc(outs):
"""Schedule for Conv2d NHWC operator."""
s = tvm.te.create_schedule([x.op for x in outs])
return s | # | random_line_split |
query.py | from .utils import DslBase, BoolMixin, _make_dsl_class
from .function import SF, ScoreFunction
__all__ = [
'Q', 'Bool', 'Boosting', 'Common', 'ConstantScore', 'DisMax', 'Filtered',
'FunctionScore', 'Fuzzy', 'FuzzyLikeThis', 'FuzzyLikeThisField',
'GeoShape', 'HasChild', 'HasParent', 'Ids', 'Indices', 'Match', 'MatchAll',
'MatchPhrase', 'MatchPhrasePrefix', 'MoreLikeThis', 'MoreLikeThisField',
'MultiMatch', 'Nested', 'Prefix', 'Query', 'QueryString', 'Range',
'Regexp', 'SF', 'ScoreFunction', 'SimpleQueryString', 'SpanFirst',
'SpanMulti', 'SpanNear', 'SpanNot', 'SpanOr', 'SpanTerm', 'Template',
'Term', 'Terms', 'TopChildren', 'Wildcard'
]
def Q(name_or_query='match_all', **params):
# {"match": {"title": "python"}}
if isinstance(name_or_query, dict):
if params:
raise ValueError('Q() cannot accept parameters when passing in a dict.')
if len(name_or_query) != 1:
raise ValueError('Q() can only accept dict with a single query ({"match": {...}}). '
'Instead it got (%r)' % name_or_query)
name, params = name_or_query.copy().popitem()
return Query.get_dsl_class(name)(**params)
# MatchAll()
if isinstance(name_or_query, Query):
if params:
raise ValueError('Q() cannot accept parameters when passing in a Query object.')
return name_or_query
# s.query = Q('filtered', query=s.query)
if hasattr(name_or_query, '_proxied'):
return name_or_query._proxied
# "match", title="python"
return Query.get_dsl_class(name_or_query)(**params)
class Query(DslBase):
_type_name = 'query'
_type_shortcut = staticmethod(Q)
name = None
class MatchAll(Query):
name = 'match_all'
def __add__(self, other):
return other._clone()
__and__ = __rand__ = __radd__ = __add__
def __or__(self, other):
return self
__ror__ = __or__
EMPTY_QUERY = MatchAll()
class Bool(BoolMixin, Query):
| q.minimum_should_match = min_should_match
q.should = qx.should
# not all are required, add a should list to the must with proper min_should_match
else:
q.must.append(Bool(should=qx.should, minimum_should_match=min_should_match))
else:
q.must.append(other)
return q
__rand__ = __and__
# register this as Bool for Query
Query._bool = Bool
class FunctionScore(Query):
name = 'function_score'
_param_defs = {
'query': {'type': 'query'},
'filter': {'type': 'filter'},
'functions': {'type': 'score_function', 'multi': True},
}
def __init__(self, **kwargs):
if 'functions' in kwargs:
pass
else:
fns = kwargs['functions'] = []
for name in ScoreFunction._classes:
if name in kwargs:
fns.append({name: kwargs.pop(name)})
super(FunctionScore, self).__init__(**kwargs)
QUERIES = (
# compound queries
('boosting', {'positive': {'type': 'query'}, 'negative': {'type': 'query'}}),
('constant_score', {'query': {'type': 'query'}, 'filter': {'type': 'filter'}}),
('dis_max', {'queries': {'type': 'query', 'multi': True}}),
('filtered', {'query': {'type': 'query'}, 'filter': {'type': 'filter'}}),
('indices', {'query': {'type': 'query'}, 'no_match_query': {'type': 'query'}}),
# relationship queries
('nested', {'query': {'type': 'query'}}),
('has_child', {'query': {'type': 'query'}}),
('has_parent', {'query': {'type': 'query'}}),
('top_children', {'query': {'type': 'query'}}),
# compount span queries
('span_first', {'match': {'type': 'query'}}),
('span_multi', {'match': {'type': 'query'}}),
('span_near', {'clauses': {'type': 'query', 'multi': True}}),
('span_not', {'exclude': {'type': 'query'}, 'include': {'type': 'query'}}),
('span_or', {'clauses': {'type': 'query', 'multi': True}}),
# core queries
('common', None),
('fuzzy', None),
('fuzzy_like_this', None),
('fuzzy_like_this_field', None),
('geo_shape', None),
('ids', None),
('match', None),
('match_phrase', None),
('match_phrase_prefix', None),
('more_like_this', None),
('more_like_this_field', None),
('multi_match', None),
('prefix', None),
('query_string', None),
('range', None),
('regexp', None),
('simple_query_string', None),
('span_term', None),
('template', None),
('term', None),
('terms', None),
('wildcard', None),
)
# generate the query classes dynamicaly
for qname, params_def in QUERIES:
qclass = _make_dsl_class(Query, qname, params_def)
globals()[qclass.__name__] = qclass
| name = 'bool'
_param_defs = {
'must': {'type': 'query', 'multi': True},
'should': {'type': 'query', 'multi': True},
'must_not': {'type': 'query', 'multi': True},
}
def __and__(self, other):
q = self._clone()
if isinstance(other, self.__class__):
q.must += other.must
q.must_not += other.must_not
q.should = []
for qx in (self, other):
min_should_match = getattr(qx, 'minimum_should_match', 0 if any((qx.must, qx.must_not)) else 1)
# all subqueries are required
if len(qx.should) <= min_should_match:
q.must.extend(qx.should)
# not all of them are required, use it and remember min_should_match
elif not q.should: | identifier_body |
query.py | from .utils import DslBase, BoolMixin, _make_dsl_class
from .function import SF, ScoreFunction
__all__ = [
'Q', 'Bool', 'Boosting', 'Common', 'ConstantScore', 'DisMax', 'Filtered',
'FunctionScore', 'Fuzzy', 'FuzzyLikeThis', 'FuzzyLikeThisField',
'GeoShape', 'HasChild', 'HasParent', 'Ids', 'Indices', 'Match', 'MatchAll',
'MatchPhrase', 'MatchPhrasePrefix', 'MoreLikeThis', 'MoreLikeThisField',
'MultiMatch', 'Nested', 'Prefix', 'Query', 'QueryString', 'Range',
'Regexp', 'SF', 'ScoreFunction', 'SimpleQueryString', 'SpanFirst',
'SpanMulti', 'SpanNear', 'SpanNot', 'SpanOr', 'SpanTerm', 'Template',
'Term', 'Terms', 'TopChildren', 'Wildcard'
]
def Q(name_or_query='match_all', **params):
# {"match": {"title": "python"}}
if isinstance(name_or_query, dict):
if params:
raise ValueError('Q() cannot accept parameters when passing in a dict.')
if len(name_or_query) != 1:
raise ValueError('Q() can only accept dict with a single query ({"match": {...}}). '
'Instead it got (%r)' % name_or_query)
name, params = name_or_query.copy().popitem()
return Query.get_dsl_class(name)(**params)
# MatchAll()
if isinstance(name_or_query, Query):
if params:
raise ValueError('Q() cannot accept parameters when passing in a Query object.')
return name_or_query
# s.query = Q('filtered', query=s.query)
if hasattr(name_or_query, '_proxied'):
return name_or_query._proxied
# "match", title="python"
return Query.get_dsl_class(name_or_query)(**params)
class Query(DslBase):
_type_name = 'query'
_type_shortcut = staticmethod(Q)
name = None
class MatchAll(Query):
name = 'match_all'
def __add__(self, other):
return other._clone()
__and__ = __rand__ = __radd__ = __add__
def __or__(self, other):
return self
__ror__ = __or__
EMPTY_QUERY = MatchAll()
class Bool(BoolMixin, Query):
name = 'bool'
_param_defs = {
'must': {'type': 'query', 'multi': True},
'should': {'type': 'query', 'multi': True},
'must_not': {'type': 'query', 'multi': True},
}
def __and__(self, other):
q = self._clone()
if isinstance(other, self.__class__):
q.must += other.must
q.must_not += other.must_not
q.should = []
for qx in (self, other):
min_should_match = getattr(qx, 'minimum_should_match', 0 if any((qx.must, qx.must_not)) else 1)
# all subqueries are required
if len(qx.should) <= min_should_match:
|
# not all of them are required, use it and remember min_should_match
elif not q.should:
q.minimum_should_match = min_should_match
q.should = qx.should
# not all are required, add a should list to the must with proper min_should_match
else:
q.must.append(Bool(should=qx.should, minimum_should_match=min_should_match))
else:
q.must.append(other)
return q
__rand__ = __and__
# register this as Bool for Query
Query._bool = Bool
class FunctionScore(Query):
name = 'function_score'
_param_defs = {
'query': {'type': 'query'},
'filter': {'type': 'filter'},
'functions': {'type': 'score_function', 'multi': True},
}
def __init__(self, **kwargs):
if 'functions' in kwargs:
pass
else:
fns = kwargs['functions'] = []
for name in ScoreFunction._classes:
if name in kwargs:
fns.append({name: kwargs.pop(name)})
super(FunctionScore, self).__init__(**kwargs)
QUERIES = (
# compound queries
('boosting', {'positive': {'type': 'query'}, 'negative': {'type': 'query'}}),
('constant_score', {'query': {'type': 'query'}, 'filter': {'type': 'filter'}}),
('dis_max', {'queries': {'type': 'query', 'multi': True}}),
('filtered', {'query': {'type': 'query'}, 'filter': {'type': 'filter'}}),
('indices', {'query': {'type': 'query'}, 'no_match_query': {'type': 'query'}}),
# relationship queries
('nested', {'query': {'type': 'query'}}),
('has_child', {'query': {'type': 'query'}}),
('has_parent', {'query': {'type': 'query'}}),
('top_children', {'query': {'type': 'query'}}),
# compount span queries
('span_first', {'match': {'type': 'query'}}),
('span_multi', {'match': {'type': 'query'}}),
('span_near', {'clauses': {'type': 'query', 'multi': True}}),
('span_not', {'exclude': {'type': 'query'}, 'include': {'type': 'query'}}),
('span_or', {'clauses': {'type': 'query', 'multi': True}}),
# core queries
('common', None),
('fuzzy', None),
('fuzzy_like_this', None),
('fuzzy_like_this_field', None),
('geo_shape', None),
('ids', None),
('match', None),
('match_phrase', None),
('match_phrase_prefix', None),
('more_like_this', None),
('more_like_this_field', None),
('multi_match', None),
('prefix', None),
('query_string', None),
('range', None),
('regexp', None),
('simple_query_string', None),
('span_term', None),
('template', None),
('term', None),
('terms', None),
('wildcard', None),
)
# generate the query classes dynamicaly
for qname, params_def in QUERIES:
qclass = _make_dsl_class(Query, qname, params_def)
globals()[qclass.__name__] = qclass
| q.must.extend(qx.should) | conditional_block |
query.py | from .utils import DslBase, BoolMixin, _make_dsl_class
from .function import SF, ScoreFunction
__all__ = [
'Q', 'Bool', 'Boosting', 'Common', 'ConstantScore', 'DisMax', 'Filtered',
'FunctionScore', 'Fuzzy', 'FuzzyLikeThis', 'FuzzyLikeThisField',
'GeoShape', 'HasChild', 'HasParent', 'Ids', 'Indices', 'Match', 'MatchAll',
'MatchPhrase', 'MatchPhrasePrefix', 'MoreLikeThis', 'MoreLikeThisField',
'MultiMatch', 'Nested', 'Prefix', 'Query', 'QueryString', 'Range',
'Regexp', 'SF', 'ScoreFunction', 'SimpleQueryString', 'SpanFirst',
'SpanMulti', 'SpanNear', 'SpanNot', 'SpanOr', 'SpanTerm', 'Template',
'Term', 'Terms', 'TopChildren', 'Wildcard'
]
def Q(name_or_query='match_all', **params):
# {"match": {"title": "python"}}
if isinstance(name_or_query, dict):
if params:
raise ValueError('Q() cannot accept parameters when passing in a dict.')
if len(name_or_query) != 1:
raise ValueError('Q() can only accept dict with a single query ({"match": {...}}). '
'Instead it got (%r)' % name_or_query)
name, params = name_or_query.copy().popitem()
return Query.get_dsl_class(name)(**params)
# MatchAll()
if isinstance(name_or_query, Query):
if params:
raise ValueError('Q() cannot accept parameters when passing in a Query object.')
return name_or_query
# s.query = Q('filtered', query=s.query)
if hasattr(name_or_query, '_proxied'):
return name_or_query._proxied
# "match", title="python"
return Query.get_dsl_class(name_or_query)(**params)
class Query(DslBase):
_type_name = 'query'
_type_shortcut = staticmethod(Q)
name = None
class MatchAll(Query):
name = 'match_all' | return self
__ror__ = __or__
EMPTY_QUERY = MatchAll()
class Bool(BoolMixin, Query):
name = 'bool'
_param_defs = {
'must': {'type': 'query', 'multi': True},
'should': {'type': 'query', 'multi': True},
'must_not': {'type': 'query', 'multi': True},
}
def __and__(self, other):
q = self._clone()
if isinstance(other, self.__class__):
q.must += other.must
q.must_not += other.must_not
q.should = []
for qx in (self, other):
min_should_match = getattr(qx, 'minimum_should_match', 0 if any((qx.must, qx.must_not)) else 1)
# all subqueries are required
if len(qx.should) <= min_should_match:
q.must.extend(qx.should)
# not all of them are required, use it and remember min_should_match
elif not q.should:
q.minimum_should_match = min_should_match
q.should = qx.should
# not all are required, add a should list to the must with proper min_should_match
else:
q.must.append(Bool(should=qx.should, minimum_should_match=min_should_match))
else:
q.must.append(other)
return q
__rand__ = __and__
# register this as Bool for Query
Query._bool = Bool
class FunctionScore(Query):
name = 'function_score'
_param_defs = {
'query': {'type': 'query'},
'filter': {'type': 'filter'},
'functions': {'type': 'score_function', 'multi': True},
}
def __init__(self, **kwargs):
if 'functions' in kwargs:
pass
else:
fns = kwargs['functions'] = []
for name in ScoreFunction._classes:
if name in kwargs:
fns.append({name: kwargs.pop(name)})
super(FunctionScore, self).__init__(**kwargs)
QUERIES = (
# compound queries
('boosting', {'positive': {'type': 'query'}, 'negative': {'type': 'query'}}),
('constant_score', {'query': {'type': 'query'}, 'filter': {'type': 'filter'}}),
('dis_max', {'queries': {'type': 'query', 'multi': True}}),
('filtered', {'query': {'type': 'query'}, 'filter': {'type': 'filter'}}),
('indices', {'query': {'type': 'query'}, 'no_match_query': {'type': 'query'}}),
# relationship queries
('nested', {'query': {'type': 'query'}}),
('has_child', {'query': {'type': 'query'}}),
('has_parent', {'query': {'type': 'query'}}),
('top_children', {'query': {'type': 'query'}}),
# compount span queries
('span_first', {'match': {'type': 'query'}}),
('span_multi', {'match': {'type': 'query'}}),
('span_near', {'clauses': {'type': 'query', 'multi': True}}),
('span_not', {'exclude': {'type': 'query'}, 'include': {'type': 'query'}}),
('span_or', {'clauses': {'type': 'query', 'multi': True}}),
# core queries
('common', None),
('fuzzy', None),
('fuzzy_like_this', None),
('fuzzy_like_this_field', None),
('geo_shape', None),
('ids', None),
('match', None),
('match_phrase', None),
('match_phrase_prefix', None),
('more_like_this', None),
('more_like_this_field', None),
('multi_match', None),
('prefix', None),
('query_string', None),
('range', None),
('regexp', None),
('simple_query_string', None),
('span_term', None),
('template', None),
('term', None),
('terms', None),
('wildcard', None),
)
# generate the query classes dynamicaly
for qname, params_def in QUERIES:
qclass = _make_dsl_class(Query, qname, params_def)
globals()[qclass.__name__] = qclass | def __add__(self, other):
return other._clone()
__and__ = __rand__ = __radd__ = __add__
def __or__(self, other): | random_line_split |
query.py | from .utils import DslBase, BoolMixin, _make_dsl_class
from .function import SF, ScoreFunction
__all__ = [
'Q', 'Bool', 'Boosting', 'Common', 'ConstantScore', 'DisMax', 'Filtered',
'FunctionScore', 'Fuzzy', 'FuzzyLikeThis', 'FuzzyLikeThisField',
'GeoShape', 'HasChild', 'HasParent', 'Ids', 'Indices', 'Match', 'MatchAll',
'MatchPhrase', 'MatchPhrasePrefix', 'MoreLikeThis', 'MoreLikeThisField',
'MultiMatch', 'Nested', 'Prefix', 'Query', 'QueryString', 'Range',
'Regexp', 'SF', 'ScoreFunction', 'SimpleQueryString', 'SpanFirst',
'SpanMulti', 'SpanNear', 'SpanNot', 'SpanOr', 'SpanTerm', 'Template',
'Term', 'Terms', 'TopChildren', 'Wildcard'
]
def Q(name_or_query='match_all', **params):
# {"match": {"title": "python"}}
if isinstance(name_or_query, dict):
if params:
raise ValueError('Q() cannot accept parameters when passing in a dict.')
if len(name_or_query) != 1:
raise ValueError('Q() can only accept dict with a single query ({"match": {...}}). '
'Instead it got (%r)' % name_or_query)
name, params = name_or_query.copy().popitem()
return Query.get_dsl_class(name)(**params)
# MatchAll()
if isinstance(name_or_query, Query):
if params:
raise ValueError('Q() cannot accept parameters when passing in a Query object.')
return name_or_query
# s.query = Q('filtered', query=s.query)
if hasattr(name_or_query, '_proxied'):
return name_or_query._proxied
# "match", title="python"
return Query.get_dsl_class(name_or_query)(**params)
class Query(DslBase):
_type_name = 'query'
_type_shortcut = staticmethod(Q)
name = None
class MatchAll(Query):
name = 'match_all'
def | (self, other):
return other._clone()
__and__ = __rand__ = __radd__ = __add__
def __or__(self, other):
return self
__ror__ = __or__
EMPTY_QUERY = MatchAll()
class Bool(BoolMixin, Query):
name = 'bool'
_param_defs = {
'must': {'type': 'query', 'multi': True},
'should': {'type': 'query', 'multi': True},
'must_not': {'type': 'query', 'multi': True},
}
def __and__(self, other):
q = self._clone()
if isinstance(other, self.__class__):
q.must += other.must
q.must_not += other.must_not
q.should = []
for qx in (self, other):
min_should_match = getattr(qx, 'minimum_should_match', 0 if any((qx.must, qx.must_not)) else 1)
# all subqueries are required
if len(qx.should) <= min_should_match:
q.must.extend(qx.should)
# not all of them are required, use it and remember min_should_match
elif not q.should:
q.minimum_should_match = min_should_match
q.should = qx.should
# not all are required, add a should list to the must with proper min_should_match
else:
q.must.append(Bool(should=qx.should, minimum_should_match=min_should_match))
else:
q.must.append(other)
return q
__rand__ = __and__
# register this as Bool for Query
Query._bool = Bool
class FunctionScore(Query):
name = 'function_score'
_param_defs = {
'query': {'type': 'query'},
'filter': {'type': 'filter'},
'functions': {'type': 'score_function', 'multi': True},
}
def __init__(self, **kwargs):
if 'functions' in kwargs:
pass
else:
fns = kwargs['functions'] = []
for name in ScoreFunction._classes:
if name in kwargs:
fns.append({name: kwargs.pop(name)})
super(FunctionScore, self).__init__(**kwargs)
QUERIES = (
# compound queries
('boosting', {'positive': {'type': 'query'}, 'negative': {'type': 'query'}}),
('constant_score', {'query': {'type': 'query'}, 'filter': {'type': 'filter'}}),
('dis_max', {'queries': {'type': 'query', 'multi': True}}),
('filtered', {'query': {'type': 'query'}, 'filter': {'type': 'filter'}}),
('indices', {'query': {'type': 'query'}, 'no_match_query': {'type': 'query'}}),
# relationship queries
('nested', {'query': {'type': 'query'}}),
('has_child', {'query': {'type': 'query'}}),
('has_parent', {'query': {'type': 'query'}}),
('top_children', {'query': {'type': 'query'}}),
# compount span queries
('span_first', {'match': {'type': 'query'}}),
('span_multi', {'match': {'type': 'query'}}),
('span_near', {'clauses': {'type': 'query', 'multi': True}}),
('span_not', {'exclude': {'type': 'query'}, 'include': {'type': 'query'}}),
('span_or', {'clauses': {'type': 'query', 'multi': True}}),
# core queries
('common', None),
('fuzzy', None),
('fuzzy_like_this', None),
('fuzzy_like_this_field', None),
('geo_shape', None),
('ids', None),
('match', None),
('match_phrase', None),
('match_phrase_prefix', None),
('more_like_this', None),
('more_like_this_field', None),
('multi_match', None),
('prefix', None),
('query_string', None),
('range', None),
('regexp', None),
('simple_query_string', None),
('span_term', None),
('template', None),
('term', None),
('terms', None),
('wildcard', None),
)
# generate the query classes dynamicaly
for qname, params_def in QUERIES:
qclass = _make_dsl_class(Query, qname, params_def)
globals()[qclass.__name__] = qclass
| __add__ | identifier_name |
request-tests.ts | --- --- --- --- --- --- --- ---
req = request(uri);
req = request(uri, options);
req = request(uri, options, callback);
req = request(uri, callback);
req = request(options);
req = request(options, callback);
req = request.get(uri);
req = request.get(uri, options);
req = request.get(uri, options, callback);
req = request.get(uri, callback);
req = request.get(options);
req = request.get(options, callback);
req = request.post(uri);
req = request.post(uri, options);
req = request.post(uri, options, callback);
req = request.post(uri, callback);
req = request.post(options);
req = request.post(options, callback);
req = request.put(uri);
req = request.put(uri, options);
req = request.put(uri, options, callback);
req = request.put(uri, callback);
req = request.put(options);
req = request.put(options, callback);
req = request.head(uri);
req = request.head(uri, options);
req = request.head(uri, options, callback);
req = request.head(uri, callback);
req = request.head(options);
req = request.head(options, callback);
req = request.patch(uri);
req = request.patch(uri, options);
req = request.patch(uri, options, callback);
req = request.patch(uri, callback);
req = request.patch(options);
req = request.patch(options, callback);
req = request.del(uri);
req = request.del(uri, options);
req = request.del(uri, options, callback);
req = request.del(uri, callback);
req = request.del(options);
req = request.del(options, callback);
req = request.delete(uri);
req = request.delete(uri, options);
req = request.delete(uri, options, callback);
req = request.delete(uri, callback);
req = request.delete(options);
req = request.delete(options, callback);
req = request.forever(value, value);
jar = request.jar();
const r = request.defaults(options);
r(str);
r.get(str);
r.post(str);
r(options);
r.get(options);
r.post(options);
request
.get('http://example.com/example.png')
.on('response', (response) => {
res = response;
num = response.statusCode;
str = response.statusMessage;
req = response.request;
value = response.body;
strOrUndef = response.caseless.get('foo');
strOrFalse = response.caseless.has('content-type');
if (response.timings) {
num = response.timings.socket;
num = response.timings.lookup;
num = response.timings.connect;
num = response.timings.response;
num = response.timings.end;
}
if (response.timingPhases) {
num = response.timingPhases.wait;
num = response.timingPhases.dns;
num = response.timingPhases.tcp;
num = response.timingPhases.firstByte;
num = response.timingPhases.download;
num = response.timingPhases.total;
}
})
.pipe(request.put('http://another.com/another.png'));
// The following examples from https://github.com/request/request
request('http://www.google.com', (error, response, body) => {
if (!error && response.statusCode === 200) {
console.log(body); // Show the HTML for the Google homepage.
}
});
request('http://google.com/doodle.png').pipe(fs.createWriteStream('doodle.png'));
fs.createReadStream('file.json').pipe(request.put('http://mysite.com/obj.json'));
request.get('http://google.com/img.png').pipe(request.put('http://mysite.com/img.png'));
request
.get('http://google.com/img.png')
.on('response', (response) => {
console.log(response.statusCode); // 200
console.log(response.headers['content-type']); // 'image/png'
})
.pipe(request.put('http://mysite.com/img.png'));
request
.get('http://mysite.com/doodle.png')
.on('error', (err: any) => {
console.log(err);
})
.pipe(fs.createWriteStream('doodle.png'));
http.createServer((req, resp) => {
if (req.url === '/doodle.png') {
switch (req.method) {
case 'PUT':
req.pipe(request.put('http://mysite.com/doodle.png'));
break;
case 'GET':
case 'HEAD':
request.get('http://mysite.com/doodle.png').pipe(resp);
}
}
});
http.createServer((req, resp) => {
if (req.url === '/doodle.png') {
const x = request('http://mysite.com/doodle.png');
req.pipe(x);
x.pipe(resp);
}
});
http.createServer((req, resp) => {
req.pipe(request('http://mysite.com/doodle.png')).pipe(resp);
});
http.createServer((req, resp) => {
if (req.url === '/doodle.png') {
r.get('http://google.com/doodle.png').pipe(resp);
}
});
request.post('http://service.com/upload', {form: {key: 'value'}});
// or
request.post('http://service.com/upload').form({key: 'value'});
// or
request.post({url: 'http://service.com/upload', form: {key: 'value'}}, (err, httpResponse, body) => { /* ... */ });
const data = {
// Pass a simple key-value pair
my_field: 'my_value',
// Pass data via Buffers
my_buffer: new Buffer([1, 2, 3]),
// Pass data via Streams
my_file: fs.createReadStream(__dirname + '/unicycle.jpg'),
// Pass multiple values /w an Array
attachments: [
fs.createReadStream(__dirname + '/attachment1.jpg'),
fs.createReadStream(__dirname + '/attachment2.jpg')
],
// Pass optional meta-data with an 'options' object with style: {value: DATA, options: OPTIONS}
// Use case: for some types of streams, you'll need to provide "file"-related information manually.
// See the `form-data` README for more information about options: https://github.com/felixge/node-form-data
custom_file: {
value: fs.createReadStream('/dev/urandom'),
options: {
filename: 'topsecret.jpg',
contentType: 'image/jpg'
}
}
};
request.post({url: 'http://service.com/upload', formData: data}, function optionalCallback(err, httpResponse, body) {
if (err) {
console.error('upload failed:', err);
return;
}
console.log('Upload successful! Server responded with:', body);
});
const requestMultipart = request.post('http://service.com/upload', function optionalCallback(err, httpResponse, body) {});
form = requestMultipart.form();
form.append('my_field', 'my_value');
form.append('my_buffer', new Buffer([1, 2, 3]));
form.append('custom_file', fs.createReadStream(__dirname + '/unicycle.jpg'), {filename: 'unicycle.jpg'});
request({
method: 'PUT',
preambleCRLF: true,
postambleCRLF: true,
uri: 'http://service.com/upload',
multipart: {
chunked: false,
data: [
{
'content-type': 'application/json',
body: JSON.stringify({foo: 'bar', _attachments: {'message.txt': {follows: true, length: 18, content_type: 'text/plain' }}})
},
{ body: 'I am an attachment' }
]
}
},
(error, response, body) => {
if (error) {
console.error('upload failed:', error);
return;
}
console.log('Upload successful! Server responded with:', body);
});
request({
method: 'PUT',
preambleCRLF: true,
postambleCRLF: true,
uri: 'http://service.com/upload',
multipart: [
{
headers: { 'content-type': 'application/json' },
body: JSON.stringify({foo: 'bar', _attachments: {'message.txt': {follows: true, length: 18, content_type: 'text/plain' }}})
},
{ body: 'I am an attachment' },
{ body: fs.createReadStream('image.png') }
]
},
(error, response, body) => {
if (error) {
console.error('upload failed:', error);
return;
}
console.log('Upload successful! Server responded with:', body);
});
request.get('http://some.server.com/').auth('username', 'password', false);
// or
request.get('http://some.server.com/', {
auth: {
user: 'username',
pass: 'password',
sendImmediately: false
}
});
// or
request.get('http://some.server.com/').auth('foo', 'bar', true, 'bearerToken');
// or
request.get('http://some.server.com/', {
auth: {
bearer: 'bearerToken'
}
});
// or
request.get('http://some.server.com/', {
auth: {
bearer: () => 'bearerToken'
}
});
const username = 'username';
const password = 'password';
let url = `http://'${username}:${password}'@some.server.com`;
request({url}, (error, response, body) => {
// Do more stuff with 'body' here
});
options = {
url: 'https://api.github.com/repos/request/request',
headers: {
'User-Agent': 'request'
}
};
function | callback | identifier_name |
|
request-tests.ts | request.get(uri, options, callback);
req = request.get(uri, callback);
req = request.get(options);
req = request.get(options, callback);
req = request.post(uri);
req = request.post(uri, options);
req = request.post(uri, options, callback);
req = request.post(uri, callback);
req = request.post(options);
req = request.post(options, callback);
req = request.put(uri);
req = request.put(uri, options);
req = request.put(uri, options, callback);
req = request.put(uri, callback);
req = request.put(options);
req = request.put(options, callback);
req = request.head(uri);
req = request.head(uri, options);
req = request.head(uri, options, callback);
req = request.head(uri, callback);
req = request.head(options);
req = request.head(options, callback);
req = request.patch(uri);
req = request.patch(uri, options);
req = request.patch(uri, options, callback);
req = request.patch(uri, callback);
req = request.patch(options);
req = request.patch(options, callback);
req = request.del(uri);
req = request.del(uri, options);
req = request.del(uri, options, callback);
req = request.del(uri, callback);
req = request.del(options);
req = request.del(options, callback);
req = request.delete(uri);
req = request.delete(uri, options);
req = request.delete(uri, options, callback);
req = request.delete(uri, callback);
req = request.delete(options);
req = request.delete(options, callback);
req = request.forever(value, value);
jar = request.jar();
const r = request.defaults(options);
r(str);
r.get(str);
r.post(str);
r(options);
r.get(options);
r.post(options);
request
.get('http://example.com/example.png')
.on('response', (response) => {
res = response;
num = response.statusCode;
str = response.statusMessage;
req = response.request;
value = response.body;
strOrUndef = response.caseless.get('foo');
strOrFalse = response.caseless.has('content-type');
if (response.timings) {
num = response.timings.socket;
num = response.timings.lookup;
num = response.timings.connect;
num = response.timings.response;
num = response.timings.end;
}
if (response.timingPhases) {
num = response.timingPhases.wait;
num = response.timingPhases.dns;
num = response.timingPhases.tcp;
num = response.timingPhases.firstByte;
num = response.timingPhases.download;
num = response.timingPhases.total;
}
})
.pipe(request.put('http://another.com/another.png'));
// The following examples from https://github.com/request/request
request('http://www.google.com', (error, response, body) => {
if (!error && response.statusCode === 200) {
console.log(body); // Show the HTML for the Google homepage.
}
});
request('http://google.com/doodle.png').pipe(fs.createWriteStream('doodle.png'));
fs.createReadStream('file.json').pipe(request.put('http://mysite.com/obj.json'));
request.get('http://google.com/img.png').pipe(request.put('http://mysite.com/img.png'));
request
.get('http://google.com/img.png')
.on('response', (response) => {
console.log(response.statusCode); // 200
console.log(response.headers['content-type']); // 'image/png'
})
.pipe(request.put('http://mysite.com/img.png'));
request
.get('http://mysite.com/doodle.png')
.on('error', (err: any) => {
console.log(err);
})
.pipe(fs.createWriteStream('doodle.png'));
http.createServer((req, resp) => {
if (req.url === '/doodle.png') {
switch (req.method) {
case 'PUT':
req.pipe(request.put('http://mysite.com/doodle.png'));
break;
case 'GET':
case 'HEAD':
request.get('http://mysite.com/doodle.png').pipe(resp);
}
}
});
http.createServer((req, resp) => {
if (req.url === '/doodle.png') {
const x = request('http://mysite.com/doodle.png');
req.pipe(x);
x.pipe(resp);
}
});
http.createServer((req, resp) => {
req.pipe(request('http://mysite.com/doodle.png')).pipe(resp);
});
http.createServer((req, resp) => {
if (req.url === '/doodle.png') {
r.get('http://google.com/doodle.png').pipe(resp);
}
});
request.post('http://service.com/upload', {form: {key: 'value'}});
// or
request.post('http://service.com/upload').form({key: 'value'});
// or
request.post({url: 'http://service.com/upload', form: {key: 'value'}}, (err, httpResponse, body) => { /* ... */ });
const data = {
// Pass a simple key-value pair
my_field: 'my_value',
// Pass data via Buffers
my_buffer: new Buffer([1, 2, 3]),
// Pass data via Streams
my_file: fs.createReadStream(__dirname + '/unicycle.jpg'),
// Pass multiple values /w an Array
attachments: [
fs.createReadStream(__dirname + '/attachment1.jpg'),
fs.createReadStream(__dirname + '/attachment2.jpg')
],
// Pass optional meta-data with an 'options' object with style: {value: DATA, options: OPTIONS}
// Use case: for some types of streams, you'll need to provide "file"-related information manually.
// See the `form-data` README for more information about options: https://github.com/felixge/node-form-data
custom_file: {
value: fs.createReadStream('/dev/urandom'),
options: {
filename: 'topsecret.jpg',
contentType: 'image/jpg'
}
}
};
request.post({url: 'http://service.com/upload', formData: data}, function optionalCallback(err, httpResponse, body) {
if (err) {
console.error('upload failed:', err);
return;
}
console.log('Upload successful! Server responded with:', body);
});
const requestMultipart = request.post('http://service.com/upload', function optionalCallback(err, httpResponse, body) {});
form = requestMultipart.form();
form.append('my_field', 'my_value');
form.append('my_buffer', new Buffer([1, 2, 3]));
form.append('custom_file', fs.createReadStream(__dirname + '/unicycle.jpg'), {filename: 'unicycle.jpg'});
request({
method: 'PUT',
preambleCRLF: true,
postambleCRLF: true,
uri: 'http://service.com/upload',
multipart: {
chunked: false,
data: [
{
'content-type': 'application/json',
body: JSON.stringify({foo: 'bar', _attachments: {'message.txt': {follows: true, length: 18, content_type: 'text/plain' }}})
},
{ body: 'I am an attachment' }
]
}
},
(error, response, body) => {
if (error) {
console.error('upload failed:', error);
return;
}
console.log('Upload successful! Server responded with:', body);
});
request({
method: 'PUT',
preambleCRLF: true,
postambleCRLF: true,
uri: 'http://service.com/upload',
multipart: [
{
headers: { 'content-type': 'application/json' },
body: JSON.stringify({foo: 'bar', _attachments: {'message.txt': {follows: true, length: 18, content_type: 'text/plain' }}})
},
{ body: 'I am an attachment' },
{ body: fs.createReadStream('image.png') }
]
},
(error, response, body) => {
if (error) {
console.error('upload failed:', error);
return;
}
console.log('Upload successful! Server responded with:', body);
});
request.get('http://some.server.com/').auth('username', 'password', false);
// or
request.get('http://some.server.com/', {
auth: {
user: 'username',
pass: 'password',
sendImmediately: false
}
});
// or
request.get('http://some.server.com/').auth('foo', 'bar', true, 'bearerToken');
// or
request.get('http://some.server.com/', {
auth: {
bearer: 'bearerToken'
}
});
// or
request.get('http://some.server.com/', {
auth: {
bearer: () => 'bearerToken'
}
});
const username = 'username';
const password = 'password';
let url = `http://'${username}:${password}'@some.server.com`;
request({url}, (error, response, body) => {
// Do more stuff with 'body' here
});
options = {
url: 'https://api.github.com/repos/request/request',
headers: {
'User-Agent': 'request'
}
};
function callback(error: any, response: http.IncomingMessage, body: string) | {
if (!error && response.statusCode === 200) {
const info = JSON.parse(body);
console.log(info.stargazers_count + " Stars");
console.log(info.forks_count + " Forks");
}
} | identifier_body |
|
request-tests.ts | ) {
res = req.response;
}
// --- --- --- --- --- --- --- --- --- --- --- ---
req = request(uri);
req = request(uri, options);
req = request(uri, options, callback);
req = request(uri, callback);
req = request(options);
req = request(options, callback);
req = request.get(uri);
req = request.get(uri, options);
req = request.get(uri, options, callback);
req = request.get(uri, callback);
req = request.get(options);
req = request.get(options, callback);
req = request.post(uri);
req = request.post(uri, options);
req = request.post(uri, options, callback);
req = request.post(uri, callback);
req = request.post(options);
req = request.post(options, callback);
req = request.put(uri);
req = request.put(uri, options);
req = request.put(uri, options, callback);
req = request.put(uri, callback);
req = request.put(options);
req = request.put(options, callback);
req = request.head(uri);
req = request.head(uri, options);
req = request.head(uri, options, callback);
req = request.head(uri, callback);
req = request.head(options);
req = request.head(options, callback);
req = request.patch(uri);
req = request.patch(uri, options);
req = request.patch(uri, options, callback);
req = request.patch(uri, callback);
req = request.patch(options);
req = request.patch(options, callback);
req = request.del(uri);
req = request.del(uri, options);
req = request.del(uri, options, callback);
req = request.del(uri, callback);
req = request.del(options);
req = request.del(options, callback);
req = request.delete(uri);
req = request.delete(uri, options);
req = request.delete(uri, options, callback);
req = request.delete(uri, callback);
req = request.delete(options);
req = request.delete(options, callback);
req = request.forever(value, value);
jar = request.jar();
const r = request.defaults(options);
r(str);
r.get(str);
r.post(str);
r(options);
r.get(options);
r.post(options);
request
.get('http://example.com/example.png')
.on('response', (response) => {
res = response;
num = response.statusCode;
str = response.statusMessage;
req = response.request;
value = response.body;
strOrUndef = response.caseless.get('foo');
strOrFalse = response.caseless.has('content-type');
if (response.timings) {
num = response.timings.socket;
num = response.timings.lookup;
num = response.timings.connect;
num = response.timings.response;
num = response.timings.end;
}
if (response.timingPhases) {
num = response.timingPhases.wait;
num = response.timingPhases.dns;
num = response.timingPhases.tcp;
num = response.timingPhases.firstByte;
num = response.timingPhases.download;
num = response.timingPhases.total;
}
})
.pipe(request.put('http://another.com/another.png'));
// The following examples from https://github.com/request/request
request('http://www.google.com', (error, response, body) => {
if (!error && response.statusCode === 200) {
console.log(body); // Show the HTML for the Google homepage.
}
});
request('http://google.com/doodle.png').pipe(fs.createWriteStream('doodle.png'));
fs.createReadStream('file.json').pipe(request.put('http://mysite.com/obj.json'));
request.get('http://google.com/img.png').pipe(request.put('http://mysite.com/img.png'));
request
.get('http://google.com/img.png')
.on('response', (response) => {
console.log(response.statusCode); // 200
console.log(response.headers['content-type']); // 'image/png'
})
.pipe(request.put('http://mysite.com/img.png'));
request
.get('http://mysite.com/doodle.png')
.on('error', (err: any) => {
console.log(err);
})
.pipe(fs.createWriteStream('doodle.png'));
http.createServer((req, resp) => {
if (req.url === '/doodle.png') {
switch (req.method) {
case 'PUT':
req.pipe(request.put('http://mysite.com/doodle.png'));
break;
case 'GET':
case 'HEAD':
request.get('http://mysite.com/doodle.png').pipe(resp);
}
}
});
http.createServer((req, resp) => {
if (req.url === '/doodle.png') {
const x = request('http://mysite.com/doodle.png');
req.pipe(x);
x.pipe(resp);
}
});
http.createServer((req, resp) => {
req.pipe(request('http://mysite.com/doodle.png')).pipe(resp);
});
http.createServer((req, resp) => {
if (req.url === '/doodle.png') {
r.get('http://google.com/doodle.png').pipe(resp);
}
});
request.post('http://service.com/upload', {form: {key: 'value'}});
// or
request.post('http://service.com/upload').form({key: 'value'});
// or
request.post({url: 'http://service.com/upload', form: {key: 'value'}}, (err, httpResponse, body) => { /* ... */ });
const data = {
// Pass a simple key-value pair
my_field: 'my_value',
// Pass data via Buffers
my_buffer: new Buffer([1, 2, 3]),
// Pass data via Streams
my_file: fs.createReadStream(__dirname + '/unicycle.jpg'),
// Pass multiple values /w an Array
attachments: [
fs.createReadStream(__dirname + '/attachment1.jpg'),
fs.createReadStream(__dirname + '/attachment2.jpg')
],
// Pass optional meta-data with an 'options' object with style: {value: DATA, options: OPTIONS}
// Use case: for some types of streams, you'll need to provide "file"-related information manually.
// See the `form-data` README for more information about options: https://github.com/felixge/node-form-data
custom_file: {
value: fs.createReadStream('/dev/urandom'),
options: {
filename: 'topsecret.jpg',
contentType: 'image/jpg'
}
}
};
request.post({url: 'http://service.com/upload', formData: data}, function optionalCallback(err, httpResponse, body) {
if (err) {
console.error('upload failed:', err);
return;
}
console.log('Upload successful! Server responded with:', body);
});
const requestMultipart = request.post('http://service.com/upload', function optionalCallback(err, httpResponse, body) {});
form = requestMultipart.form();
form.append('my_field', 'my_value');
form.append('my_buffer', new Buffer([1, 2, 3]));
form.append('custom_file', fs.createReadStream(__dirname + '/unicycle.jpg'), {filename: 'unicycle.jpg'});
request({
method: 'PUT',
preambleCRLF: true,
postambleCRLF: true,
uri: 'http://service.com/upload',
multipart: {
chunked: false,
data: [
{
'content-type': 'application/json',
body: JSON.stringify({foo: 'bar', _attachments: {'message.txt': {follows: true, length: 18, content_type: 'text/plain' }}})
},
{ body: 'I am an attachment' }
]
}
},
(error, response, body) => {
if (error) {
console.error('upload failed:', error);
return;
}
console.log('Upload successful! Server responded with:', body);
});
request({
method: 'PUT',
preambleCRLF: true,
postambleCRLF: true,
uri: 'http://service.com/upload',
multipart: [
{
headers: { 'content-type': 'application/json' },
body: JSON.stringify({foo: 'bar', _attachments: {'message.txt': {follows: true, length: 18, content_type: 'text/plain' }}})
},
{ body: 'I am an attachment' },
{ body: fs.createReadStream('image.png') }
]
},
(error, response, body) => {
if (error) {
console.error('upload failed:', error);
return;
}
console.log('Upload successful! Server responded with:', body);
});
request.get('http://some.server.com/').auth('username', 'password', false);
// or
request.get('http://some.server.com/', {
auth: {
user: 'username',
pass: 'password',
sendImmediately: false
}
});
// or
request.get('http://some.server.com/').auth('foo', 'bar', true, 'bearerToken');
// or
request.get('http://some.server.com/', {
auth: {
bearer: 'bearerToken'
}
});
// or
request.get('http://some.server.com/', {
auth: {
bearer: () => 'bearerToken'
}
});
const username = 'username';
const password = 'password';
let url = `http://'${username}:${password}'@some.server.com`;
request({url}, (error, response, body) => {
// Do more stuff with 'body' here
});
| options = {
url: 'https://api.github.com/repos/request/request',
headers: { | random_line_split |
|
request-tests.ts | (req.response) {
res = req.response;
}
// --- --- --- --- --- --- --- --- --- --- --- ---
req = request(uri);
req = request(uri, options);
req = request(uri, options, callback);
req = request(uri, callback);
req = request(options);
req = request(options, callback);
req = request.get(uri);
req = request.get(uri, options);
req = request.get(uri, options, callback);
req = request.get(uri, callback);
req = request.get(options);
req = request.get(options, callback);
req = request.post(uri);
req = request.post(uri, options);
req = request.post(uri, options, callback);
req = request.post(uri, callback);
req = request.post(options);
req = request.post(options, callback);
req = request.put(uri);
req = request.put(uri, options);
req = request.put(uri, options, callback);
req = request.put(uri, callback);
req = request.put(options);
req = request.put(options, callback);
req = request.head(uri);
req = request.head(uri, options);
req = request.head(uri, options, callback);
req = request.head(uri, callback);
req = request.head(options);
req = request.head(options, callback);
req = request.patch(uri);
req = request.patch(uri, options);
req = request.patch(uri, options, callback);
req = request.patch(uri, callback);
req = request.patch(options);
req = request.patch(options, callback);
req = request.del(uri);
req = request.del(uri, options);
req = request.del(uri, options, callback);
req = request.del(uri, callback);
req = request.del(options);
req = request.del(options, callback);
req = request.delete(uri);
req = request.delete(uri, options);
req = request.delete(uri, options, callback);
req = request.delete(uri, callback);
req = request.delete(options);
req = request.delete(options, callback);
req = request.forever(value, value);
jar = request.jar();
const r = request.defaults(options);
r(str);
r.get(str);
r.post(str);
r(options);
r.get(options);
r.post(options);
request
.get('http://example.com/example.png')
.on('response', (response) => {
res = response;
num = response.statusCode;
str = response.statusMessage;
req = response.request;
value = response.body;
strOrUndef = response.caseless.get('foo');
strOrFalse = response.caseless.has('content-type');
if (response.timings) |
if (response.timingPhases) {
num = response.timingPhases.wait;
num = response.timingPhases.dns;
num = response.timingPhases.tcp;
num = response.timingPhases.firstByte;
num = response.timingPhases.download;
num = response.timingPhases.total;
}
})
.pipe(request.put('http://another.com/another.png'));
// The following examples from https://github.com/request/request
request('http://www.google.com', (error, response, body) => {
if (!error && response.statusCode === 200) {
console.log(body); // Show the HTML for the Google homepage.
}
});
request('http://google.com/doodle.png').pipe(fs.createWriteStream('doodle.png'));
fs.createReadStream('file.json').pipe(request.put('http://mysite.com/obj.json'));
request.get('http://google.com/img.png').pipe(request.put('http://mysite.com/img.png'));
request
.get('http://google.com/img.png')
.on('response', (response) => {
console.log(response.statusCode); // 200
console.log(response.headers['content-type']); // 'image/png'
})
.pipe(request.put('http://mysite.com/img.png'));
request
.get('http://mysite.com/doodle.png')
.on('error', (err: any) => {
console.log(err);
})
.pipe(fs.createWriteStream('doodle.png'));
http.createServer((req, resp) => {
if (req.url === '/doodle.png') {
switch (req.method) {
case 'PUT':
req.pipe(request.put('http://mysite.com/doodle.png'));
break;
case 'GET':
case 'HEAD':
request.get('http://mysite.com/doodle.png').pipe(resp);
}
}
});
http.createServer((req, resp) => {
if (req.url === '/doodle.png') {
const x = request('http://mysite.com/doodle.png');
req.pipe(x);
x.pipe(resp);
}
});
http.createServer((req, resp) => {
req.pipe(request('http://mysite.com/doodle.png')).pipe(resp);
});
http.createServer((req, resp) => {
if (req.url === '/doodle.png') {
r.get('http://google.com/doodle.png').pipe(resp);
}
});
request.post('http://service.com/upload', {form: {key: 'value'}});
// or
request.post('http://service.com/upload').form({key: 'value'});
// or
request.post({url: 'http://service.com/upload', form: {key: 'value'}}, (err, httpResponse, body) => { /* ... */ });
const data = {
// Pass a simple key-value pair
my_field: 'my_value',
// Pass data via Buffers
my_buffer: new Buffer([1, 2, 3]),
// Pass data via Streams
my_file: fs.createReadStream(__dirname + '/unicycle.jpg'),
// Pass multiple values /w an Array
attachments: [
fs.createReadStream(__dirname + '/attachment1.jpg'),
fs.createReadStream(__dirname + '/attachment2.jpg')
],
// Pass optional meta-data with an 'options' object with style: {value: DATA, options: OPTIONS}
// Use case: for some types of streams, you'll need to provide "file"-related information manually.
// See the `form-data` README for more information about options: https://github.com/felixge/node-form-data
custom_file: {
value: fs.createReadStream('/dev/urandom'),
options: {
filename: 'topsecret.jpg',
contentType: 'image/jpg'
}
}
};
request.post({url: 'http://service.com/upload', formData: data}, function optionalCallback(err, httpResponse, body) {
if (err) {
console.error('upload failed:', err);
return;
}
console.log('Upload successful! Server responded with:', body);
});
const requestMultipart = request.post('http://service.com/upload', function optionalCallback(err, httpResponse, body) {});
form = requestMultipart.form();
form.append('my_field', 'my_value');
form.append('my_buffer', new Buffer([1, 2, 3]));
form.append('custom_file', fs.createReadStream(__dirname + '/unicycle.jpg'), {filename: 'unicycle.jpg'});
request({
method: 'PUT',
preambleCRLF: true,
postambleCRLF: true,
uri: 'http://service.com/upload',
multipart: {
chunked: false,
data: [
{
'content-type': 'application/json',
body: JSON.stringify({foo: 'bar', _attachments: {'message.txt': {follows: true, length: 18, content_type: 'text/plain' }}})
},
{ body: 'I am an attachment' }
]
}
},
(error, response, body) => {
if (error) {
console.error('upload failed:', error);
return;
}
console.log('Upload successful! Server responded with:', body);
});
request({
method: 'PUT',
preambleCRLF: true,
postambleCRLF: true,
uri: 'http://service.com/upload',
multipart: [
{
headers: { 'content-type': 'application/json' },
body: JSON.stringify({foo: 'bar', _attachments: {'message.txt': {follows: true, length: 18, content_type: 'text/plain' }}})
},
{ body: 'I am an attachment' },
{ body: fs.createReadStream('image.png') }
]
},
(error, response, body) => {
if (error) {
console.error('upload failed:', error);
return;
}
console.log('Upload successful! Server responded with:', body);
});
request.get('http://some.server.com/').auth('username', 'password', false);
// or
request.get('http://some.server.com/', {
auth: {
user: 'username',
pass: 'password',
sendImmediately: false
}
});
// or
request.get('http://some.server.com/').auth('foo', 'bar', true, 'bearerToken');
// or
request.get('http://some.server.com/', {
auth: {
bearer: 'bearerToken'
}
});
// or
request.get('http://some.server.com/', {
auth: {
bearer: () => 'bearerToken'
}
});
const username = 'username';
const password = 'password';
let url = `http://'${username}:${password}'@some.server.com`;
request({url}, (error, response, body) => {
// Do more stuff with 'body' here
});
options = {
url: 'https://api.github.com/repos/request/request | {
num = response.timings.socket;
num = response.timings.lookup;
num = response.timings.connect;
num = response.timings.response;
num = response.timings.end;
} | conditional_block |
all_11.js | var searchData=
[
['wait_5ffor_5fcal_132',['wait_for_cal',['../class_a_d_c___module.html#a4fb69b5b2d07c3fc8f5f0bbbf05dfa2a',1,'ADC_Module']]],
['waituntilstable_133',['waitUntilStable',['../namespace_v_r_e_f.html#a108f7c1b5a2073bc092eafcae58575b0',1,'VREF']]], | ['wrong_5fadc_134',['WRONG_ADC',['../namespace_a_d_c___error.html#ad050c44d1f3422d02e5f9726edeee8f0a52df2c8ae830ed21e0c2fc269087b3ec',1,'ADC_Error']]],
['wrong_5fpin_135',['WRONG_PIN',['../namespace_a_d_c___error.html#ad050c44d1f3422d02e5f9726edeee8f0ab578c19f4fab8e2bfeddc85fa17b5acf',1,'ADC_Error']]]
]; | random_line_split |
|
systemjs.config.1.js | // #docregion
(function(global) {
// map tells the System loader where to look for things
var map = {
'app': 'app', // 'dist',
'rxjs': 'node_modules/rxjs',
'angular2-in-memory-web-api': 'node_modules/angular2-in-memory-web-api',
'@angular': 'node_modules/@angular'
};
// packages tells the System loader how to load when no filename and/or no extension
var packages = {
'app': { main: 'main.js', defaultExtension: 'js' },
'rxjs': { defaultExtension: 'js' },
'angular2-in-memory-web-api': { defaultExtension: 'js' },
};
var packageNames = [
'@angular/common',
'@angular/compiler',
'@angular/core',
'@angular/http',
'@angular/platform-browser',
'@angular/platform-browser-dynamic',
'@angular/router',
'@angular/router-deprecated',
'@angular/testing',
'@angular/upgrade',
];
// add package entries for angular packages in the form '@angular/common': { main: 'index.js', defaultExtension: 'js' }
packageNames.forEach(function(pkgName) {
packages[pkgName] = { main: 'index.js', defaultExtension: 'js' };
});
var config = {
map: map,
packages: packages
}
// filterSystemConfig - index.html's chance to modify config before we register it.
if (global.filterSystemConfig) |
System.config(config);
})(this);
| { global.filterSystemConfig(config); } | conditional_block |
systemjs.config.1.js | // #docregion
(function(global) {
// map tells the System loader where to look for things
var map = {
'app': 'app', // 'dist',
'rxjs': 'node_modules/rxjs', |
// packages tells the System loader how to load when no filename and/or no extension
var packages = {
'app': { main: 'main.js', defaultExtension: 'js' },
'rxjs': { defaultExtension: 'js' },
'angular2-in-memory-web-api': { defaultExtension: 'js' },
};
var packageNames = [
'@angular/common',
'@angular/compiler',
'@angular/core',
'@angular/http',
'@angular/platform-browser',
'@angular/platform-browser-dynamic',
'@angular/router',
'@angular/router-deprecated',
'@angular/testing',
'@angular/upgrade',
];
// add package entries for angular packages in the form '@angular/common': { main: 'index.js', defaultExtension: 'js' }
packageNames.forEach(function(pkgName) {
packages[pkgName] = { main: 'index.js', defaultExtension: 'js' };
});
var config = {
map: map,
packages: packages
}
// filterSystemConfig - index.html's chance to modify config before we register it.
if (global.filterSystemConfig) { global.filterSystemConfig(config); }
System.config(config);
})(this); | 'angular2-in-memory-web-api': 'node_modules/angular2-in-memory-web-api',
'@angular': 'node_modules/@angular'
}; | random_line_split |
Checkbox.js | /*
* Copyright (C) 2010 Google Inc. All Rights Reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/**
* @constructor
* @param {string} label
* @param {string} className
* @param {string=} tooltip
*/
WebInspector.Checkbox = function(label, className, tooltip)
{
this.element = document.createElementWithClass("label", className);
this._inputElement = this.element.createChild("input");
this._inputElement.type = "checkbox";
this.element.createTextChild(label);
if (tooltip)
this.element.title = tooltip;
}
WebInspector.Checkbox.prototype = {
set checked(checked)
{
this._inputElement.checked = checked;
},
get checked()
{
return this._inputElement.checked;
},
addEventListener: function(listener)
{ | if (listener)
listener(event);
event.consume();
return true;
}
this._inputElement.addEventListener("click", listenerWrapper, false);
this.element.addEventListener("click", listenerWrapper, false);
}
} | function listenerWrapper(event)
{ | random_line_split |
Checkbox.js | /*
* Copyright (C) 2010 Google Inc. All Rights Reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/**
* @constructor
* @param {string} label
* @param {string} className
* @param {string=} tooltip
*/
WebInspector.Checkbox = function(label, className, tooltip)
{
this.element = document.createElementWithClass("label", className);
this._inputElement = this.element.createChild("input");
this._inputElement.type = "checkbox";
this.element.createTextChild(label);
if (tooltip)
this.element.title = tooltip;
}
WebInspector.Checkbox.prototype = {
set checked(checked)
{
this._inputElement.checked = checked;
},
get checked()
{
return this._inputElement.checked;
},
addEventListener: function(listener)
{
function listenerWrapper(event)
|
this._inputElement.addEventListener("click", listenerWrapper, false);
this.element.addEventListener("click", listenerWrapper, false);
}
}
| {
if (listener)
listener(event);
event.consume();
return true;
} | identifier_body |
Checkbox.js | /*
* Copyright (C) 2010 Google Inc. All Rights Reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/**
* @constructor
* @param {string} label
* @param {string} className
* @param {string=} tooltip
*/
WebInspector.Checkbox = function(label, className, tooltip)
{
this.element = document.createElementWithClass("label", className);
this._inputElement = this.element.createChild("input");
this._inputElement.type = "checkbox";
this.element.createTextChild(label);
if (tooltip)
this.element.title = tooltip;
}
WebInspector.Checkbox.prototype = {
set | (checked)
{
this._inputElement.checked = checked;
},
get checked()
{
return this._inputElement.checked;
},
addEventListener: function(listener)
{
function listenerWrapper(event)
{
if (listener)
listener(event);
event.consume();
return true;
}
this._inputElement.addEventListener("click", listenerWrapper, false);
this.element.addEventListener("click", listenerWrapper, false);
}
}
| checked | identifier_name |
RegressionTree.py | # regression tree
# input is a dataframe of features
# the corresponding y value(called labels here) is the scores for each document
import pandas as pd
import numpy as np
from multiprocessing import Pool
from itertools import repeat
import scipy
import scipy.optimize
node_id = 0
def get_splitting_points(args):
# given a list
# return a list of possible splitting values
attribute, col = args
attribute.sort()
possible_split = []
for i in range(len(attribute)-1):
if attribute[i] != attribute[i+1]:
possible_split.append(np.mean((attribute[i],attribute[i+1])))
return possible_split, col
# create a dictionary, key is the attribute number, value is whole list of possible splits for that column
def find_best_split_parallel(args):
best_ls = 1000000
best_split = None
best_children = None
split_point, data, label = args
key,possible_split = split_point
for split in possible_split:
children = split_children(data, label, key, split)
#weighted average of left and right ls
ls = len(children[1])*least_square(children[1])/len(label) + len(children[3])*least_square(children[3])/len(label)
if ls < best_ls:
best_ls = ls
best_split = (key, split)
best_children = children
return best_ls, best_split, best_children
def find_best_split(data, label, split_points):
# split_points is a dictionary of possible splitting values
# return the best split
best_ls = 1000000
best_split = None
best_children = None
pool = Pool()
for ls, split, children in pool.map(find_best_split_parallel, zip(split_points.items(), repeat(data), repeat(label))):
if ls < best_ls:
best_ls = ls
best_split = split
best_children = children
pool.close()
return best_split, best_children # return a tuple(attribute, value)
def split_children(data, label, key, split):
left_index = [index for index in xrange(len(data.iloc[:,key])) if data.iloc[index,key] < split]
right_index = [index for index in xrange(len(data.iloc[:,key])) if data.iloc[index,key] >= split]
left_data = data.iloc[left_index,:]
right_data = data.iloc[right_index,:]
left_label = [label[i] for i in left_index]
right_label =[label[i] for i in right_index]
return left_data, left_label, right_data, right_label
def least_square(label):
if not len(label):
return 0
return (np.sum(label)**2)/len(set(label))
def create_leaf(label):
global node_id
node_id += 1
leaf = {'splittng_feature': None,
'left': None,
'right':None,
'is_leaf':True,
'index':node_id}
leaf['value'] = round(np.mean(label),3)
return leaf
def find_splits_parallel(args):
var_space, label, col = args
# var_space = data.iloc[:,col].tolist()
return scipy.optimize.fminbound(error_function, min(var_space), max(var_space), args = (col, var_space, label), full_output = 1)
# return,
# if not min_error or error < min_error:
# min_error = error
# split_var = col
# min_split = split
def create_tree(data, all_pos_split, label, max_depth, ideal_ls, current_depth = 0):
remaining_features = all_pos_split
#stopping conditions
if sum([len(v)!= 0 for v in remaining_features.values()]) == 0:
# If there are no remaining features to consider, make current node a leaf node
return create_leaf(label)
# #Additional stopping condition (limit tree depth)
elif current_depth > max_depth:
return create_leaf(label)
#######
min_error = None
split_var = None
min_split = None
var_spaces = [data.iloc[:,col].tolist() for col in xrange(data.shape[1])]
cols = [col for col in xrange(data.shape[1])]
pool = Pool()
for split, error, ierr, numf in pool.map(find_splits_parallel, zip(var_spaces, repeat(label), cols)):
if not min_error or error < min_error:
min_error = error
split_var = col
min_split = split
pool.close()
splitting_feature = (split_var, min_split)
children = split_children(data, label, split_var, min_split)
left_data, left_label, right_data, right_label = children
if len(left_label) == 0 or len(right_label) == 0:
return create_leaf(label)
left_least_square = least_square(left_label)
# Create a leaf node if the split is "perfect"
if left_least_square < ideal_ls:
return create_leaf(left_label)
if least_square(right_label) < ideal_ls:
return create_leaf(right_label)
# recurse on children
left_tree = create_tree(left_data, remaining_features, left_label, max_depth, ideal_ls, current_depth +1)
right_tree = create_tree(right_data, remaining_features, right_label, max_depth, ideal_ls, current_depth +1)
return {'is_leaf' : False,
'value' : None,
'splitting_feature': splitting_feature,
'left' : left_tree,
'right' : right_tree,
'index' : None}
def error_function(split_point, split_var, data, label):
data1 = []
data2 = []
for i in xrange(len(data)):
temp_dat = data[i]
if temp_dat <= split_point:
data1.append(label[i])
else:
data2.append(label[i])
return least_square(data1) + least_square(data2)
def make_prediction(tree, x, annotate = False):
if tree['is_leaf']:
if annotate:
print "At leaf, predicting %s" % tree['value']
return tree['value']
else:
# the splitting value of x.
split_feature_value = x[tree['splitting_feature'][0]]
if annotate:
print "Split on %s = %s" % (tree['splitting_feature'], split_feature_value)
if split_feature_value < tree['splitting_feature'][1]:
return make_prediction(tree['left'], x, annotate)
else:
return make_prediction(tree['right'], x, annotate)
class RegressionTree:
def __init__(self, training_data, labels, max_depth=5, ideal_ls=100):
self.training_data = training_data
self.labels = labels
self.max_depth = max_depth
self.ideal_ls = ideal_ls
self.tree = None
def fit(self):
global node_id
node_id = 0 | pool = Pool()
splitting_data = [self.training_data.iloc[:,col].tolist() for col in xrange(self.training_data.shape[1])]
cols = [col for col in xrange(self.training_data.shape[1])]
for dat, col in pool.map(get_splitting_points, zip(splitting_data, cols)):
all_pos_split[col] = dat
pool.close()
self.tree = create_tree(self.training_data, all_pos_split, self.labels, self.max_depth, self.ideal_ls)
def predict(self, test):
prediction = np.array([make_prediction(self.tree, x) for x in test])
return prediction
if __name__ == '__main__':
#read in data, label
data = pd.read_excel("mlr06.xls")
test = [[478, 184, 40, 74, 11, 31], [1000,10000,10000,10000,10000,1000,100000]]
label = data['X7']
del data['X7']
model = RegressionTree(data, label)
model.fit()
print model.predict(test) | all_pos_split = {} | random_line_split |
RegressionTree.py | # regression tree
# input is a dataframe of features
# the corresponding y value(called labels here) is the scores for each document
import pandas as pd
import numpy as np
from multiprocessing import Pool
from itertools import repeat
import scipy
import scipy.optimize
node_id = 0
def get_splitting_points(args):
# given a list
# return a list of possible splitting values
attribute, col = args
attribute.sort()
possible_split = []
for i in range(len(attribute)-1):
if attribute[i] != attribute[i+1]:
possible_split.append(np.mean((attribute[i],attribute[i+1])))
return possible_split, col
# create a dictionary, key is the attribute number, value is whole list of possible splits for that column
def find_best_split_parallel(args):
best_ls = 1000000
best_split = None
best_children = None
split_point, data, label = args
key,possible_split = split_point
for split in possible_split:
children = split_children(data, label, key, split)
#weighted average of left and right ls
ls = len(children[1])*least_square(children[1])/len(label) + len(children[3])*least_square(children[3])/len(label)
if ls < best_ls:
best_ls = ls
best_split = (key, split)
best_children = children
return best_ls, best_split, best_children
def find_best_split(data, label, split_points):
# split_points is a dictionary of possible splitting values
# return the best split
best_ls = 1000000
best_split = None
best_children = None
pool = Pool()
for ls, split, children in pool.map(find_best_split_parallel, zip(split_points.items(), repeat(data), repeat(label))):
if ls < best_ls:
best_ls = ls
best_split = split
best_children = children
pool.close()
return best_split, best_children # return a tuple(attribute, value)
def | (data, label, key, split):
left_index = [index for index in xrange(len(data.iloc[:,key])) if data.iloc[index,key] < split]
right_index = [index for index in xrange(len(data.iloc[:,key])) if data.iloc[index,key] >= split]
left_data = data.iloc[left_index,:]
right_data = data.iloc[right_index,:]
left_label = [label[i] for i in left_index]
right_label =[label[i] for i in right_index]
return left_data, left_label, right_data, right_label
def least_square(label):
if not len(label):
return 0
return (np.sum(label)**2)/len(set(label))
def create_leaf(label):
global node_id
node_id += 1
leaf = {'splittng_feature': None,
'left': None,
'right':None,
'is_leaf':True,
'index':node_id}
leaf['value'] = round(np.mean(label),3)
return leaf
def find_splits_parallel(args):
var_space, label, col = args
# var_space = data.iloc[:,col].tolist()
return scipy.optimize.fminbound(error_function, min(var_space), max(var_space), args = (col, var_space, label), full_output = 1)
# return,
# if not min_error or error < min_error:
# min_error = error
# split_var = col
# min_split = split
def create_tree(data, all_pos_split, label, max_depth, ideal_ls, current_depth = 0):
remaining_features = all_pos_split
#stopping conditions
if sum([len(v)!= 0 for v in remaining_features.values()]) == 0:
# If there are no remaining features to consider, make current node a leaf node
return create_leaf(label)
# #Additional stopping condition (limit tree depth)
elif current_depth > max_depth:
return create_leaf(label)
#######
min_error = None
split_var = None
min_split = None
var_spaces = [data.iloc[:,col].tolist() for col in xrange(data.shape[1])]
cols = [col for col in xrange(data.shape[1])]
pool = Pool()
for split, error, ierr, numf in pool.map(find_splits_parallel, zip(var_spaces, repeat(label), cols)):
if not min_error or error < min_error:
min_error = error
split_var = col
min_split = split
pool.close()
splitting_feature = (split_var, min_split)
children = split_children(data, label, split_var, min_split)
left_data, left_label, right_data, right_label = children
if len(left_label) == 0 or len(right_label) == 0:
return create_leaf(label)
left_least_square = least_square(left_label)
# Create a leaf node if the split is "perfect"
if left_least_square < ideal_ls:
return create_leaf(left_label)
if least_square(right_label) < ideal_ls:
return create_leaf(right_label)
# recurse on children
left_tree = create_tree(left_data, remaining_features, left_label, max_depth, ideal_ls, current_depth +1)
right_tree = create_tree(right_data, remaining_features, right_label, max_depth, ideal_ls, current_depth +1)
return {'is_leaf' : False,
'value' : None,
'splitting_feature': splitting_feature,
'left' : left_tree,
'right' : right_tree,
'index' : None}
def error_function(split_point, split_var, data, label):
data1 = []
data2 = []
for i in xrange(len(data)):
temp_dat = data[i]
if temp_dat <= split_point:
data1.append(label[i])
else:
data2.append(label[i])
return least_square(data1) + least_square(data2)
def make_prediction(tree, x, annotate = False):
if tree['is_leaf']:
if annotate:
print "At leaf, predicting %s" % tree['value']
return tree['value']
else:
# the splitting value of x.
split_feature_value = x[tree['splitting_feature'][0]]
if annotate:
print "Split on %s = %s" % (tree['splitting_feature'], split_feature_value)
if split_feature_value < tree['splitting_feature'][1]:
return make_prediction(tree['left'], x, annotate)
else:
return make_prediction(tree['right'], x, annotate)
class RegressionTree:
def __init__(self, training_data, labels, max_depth=5, ideal_ls=100):
self.training_data = training_data
self.labels = labels
self.max_depth = max_depth
self.ideal_ls = ideal_ls
self.tree = None
def fit(self):
global node_id
node_id = 0
all_pos_split = {}
pool = Pool()
splitting_data = [self.training_data.iloc[:,col].tolist() for col in xrange(self.training_data.shape[1])]
cols = [col for col in xrange(self.training_data.shape[1])]
for dat, col in pool.map(get_splitting_points, zip(splitting_data, cols)):
all_pos_split[col] = dat
pool.close()
self.tree = create_tree(self.training_data, all_pos_split, self.labels, self.max_depth, self.ideal_ls)
def predict(self, test):
prediction = np.array([make_prediction(self.tree, x) for x in test])
return prediction
if __name__ == '__main__':
#read in data, label
data = pd.read_excel("mlr06.xls")
test = [[478, 184, 40, 74, 11, 31], [1000,10000,10000,10000,10000,1000,100000]]
label = data['X7']
del data['X7']
model = RegressionTree(data, label)
model.fit()
print model.predict(test)
| split_children | identifier_name |
RegressionTree.py | # regression tree
# input is a dataframe of features
# the corresponding y value(called labels here) is the scores for each document
import pandas as pd
import numpy as np
from multiprocessing import Pool
from itertools import repeat
import scipy
import scipy.optimize
node_id = 0
def get_splitting_points(args):
# given a list
# return a list of possible splitting values
attribute, col = args
attribute.sort()
possible_split = []
for i in range(len(attribute)-1):
if attribute[i] != attribute[i+1]:
possible_split.append(np.mean((attribute[i],attribute[i+1])))
return possible_split, col
# create a dictionary, key is the attribute number, value is whole list of possible splits for that column
def find_best_split_parallel(args):
best_ls = 1000000
best_split = None
best_children = None
split_point, data, label = args
key,possible_split = split_point
for split in possible_split:
children = split_children(data, label, key, split)
#weighted average of left and right ls
ls = len(children[1])*least_square(children[1])/len(label) + len(children[3])*least_square(children[3])/len(label)
if ls < best_ls:
best_ls = ls
best_split = (key, split)
best_children = children
return best_ls, best_split, best_children
def find_best_split(data, label, split_points):
# split_points is a dictionary of possible splitting values
# return the best split
best_ls = 1000000
best_split = None
best_children = None
pool = Pool()
for ls, split, children in pool.map(find_best_split_parallel, zip(split_points.items(), repeat(data), repeat(label))):
if ls < best_ls:
best_ls = ls
best_split = split
best_children = children
pool.close()
return best_split, best_children # return a tuple(attribute, value)
def split_children(data, label, key, split):
left_index = [index for index in xrange(len(data.iloc[:,key])) if data.iloc[index,key] < split]
right_index = [index for index in xrange(len(data.iloc[:,key])) if data.iloc[index,key] >= split]
left_data = data.iloc[left_index,:]
right_data = data.iloc[right_index,:]
left_label = [label[i] for i in left_index]
right_label =[label[i] for i in right_index]
return left_data, left_label, right_data, right_label
def least_square(label):
if not len(label):
return 0
return (np.sum(label)**2)/len(set(label))
def create_leaf(label):
global node_id
node_id += 1
leaf = {'splittng_feature': None,
'left': None,
'right':None,
'is_leaf':True,
'index':node_id}
leaf['value'] = round(np.mean(label),3)
return leaf
def find_splits_parallel(args):
var_space, label, col = args
# var_space = data.iloc[:,col].tolist()
return scipy.optimize.fminbound(error_function, min(var_space), max(var_space), args = (col, var_space, label), full_output = 1)
# return,
# if not min_error or error < min_error:
# min_error = error
# split_var = col
# min_split = split
def create_tree(data, all_pos_split, label, max_depth, ideal_ls, current_depth = 0):
remaining_features = all_pos_split
#stopping conditions
if sum([len(v)!= 0 for v in remaining_features.values()]) == 0:
# If there are no remaining features to consider, make current node a leaf node
return create_leaf(label)
# #Additional stopping condition (limit tree depth)
elif current_depth > max_depth:
return create_leaf(label)
#######
min_error = None
split_var = None
min_split = None
var_spaces = [data.iloc[:,col].tolist() for col in xrange(data.shape[1])]
cols = [col for col in xrange(data.shape[1])]
pool = Pool()
for split, error, ierr, numf in pool.map(find_splits_parallel, zip(var_spaces, repeat(label), cols)):
if not min_error or error < min_error:
min_error = error
split_var = col
min_split = split
pool.close()
splitting_feature = (split_var, min_split)
children = split_children(data, label, split_var, min_split)
left_data, left_label, right_data, right_label = children
if len(left_label) == 0 or len(right_label) == 0:
return create_leaf(label)
left_least_square = least_square(left_label)
# Create a leaf node if the split is "perfect"
if left_least_square < ideal_ls:
return create_leaf(left_label)
if least_square(right_label) < ideal_ls:
return create_leaf(right_label)
# recurse on children
left_tree = create_tree(left_data, remaining_features, left_label, max_depth, ideal_ls, current_depth +1)
right_tree = create_tree(right_data, remaining_features, right_label, max_depth, ideal_ls, current_depth +1)
return {'is_leaf' : False,
'value' : None,
'splitting_feature': splitting_feature,
'left' : left_tree,
'right' : right_tree,
'index' : None}
def error_function(split_point, split_var, data, label):
data1 = []
data2 = []
for i in xrange(len(data)):
temp_dat = data[i]
if temp_dat <= split_point:
data1.append(label[i])
else:
data2.append(label[i])
return least_square(data1) + least_square(data2)
def make_prediction(tree, x, annotate = False):
if tree['is_leaf']:
if annotate:
print "At leaf, predicting %s" % tree['value']
return tree['value']
else:
# the splitting value of x.
split_feature_value = x[tree['splitting_feature'][0]]
if annotate:
print "Split on %s = %s" % (tree['splitting_feature'], split_feature_value)
if split_feature_value < tree['splitting_feature'][1]:
return make_prediction(tree['left'], x, annotate)
else:
return make_prediction(tree['right'], x, annotate)
class RegressionTree:
def __init__(self, training_data, labels, max_depth=5, ideal_ls=100):
self.training_data = training_data
self.labels = labels
self.max_depth = max_depth
self.ideal_ls = ideal_ls
self.tree = None
def fit(self):
global node_id
node_id = 0
all_pos_split = {}
pool = Pool()
splitting_data = [self.training_data.iloc[:,col].tolist() for col in xrange(self.training_data.shape[1])]
cols = [col for col in xrange(self.training_data.shape[1])]
for dat, col in pool.map(get_splitting_points, zip(splitting_data, cols)):
all_pos_split[col] = dat
pool.close()
self.tree = create_tree(self.training_data, all_pos_split, self.labels, self.max_depth, self.ideal_ls)
def predict(self, test):
|
if __name__ == '__main__':
#read in data, label
data = pd.read_excel("mlr06.xls")
test = [[478, 184, 40, 74, 11, 31], [1000,10000,10000,10000,10000,1000,100000]]
label = data['X7']
del data['X7']
model = RegressionTree(data, label)
model.fit()
print model.predict(test)
| prediction = np.array([make_prediction(self.tree, x) for x in test])
return prediction | identifier_body |
RegressionTree.py | # regression tree
# input is a dataframe of features
# the corresponding y value(called labels here) is the scores for each document
import pandas as pd
import numpy as np
from multiprocessing import Pool
from itertools import repeat
import scipy
import scipy.optimize
node_id = 0
def get_splitting_points(args):
# given a list
# return a list of possible splitting values
attribute, col = args
attribute.sort()
possible_split = []
for i in range(len(attribute)-1):
if attribute[i] != attribute[i+1]:
possible_split.append(np.mean((attribute[i],attribute[i+1])))
return possible_split, col
# create a dictionary, key is the attribute number, value is whole list of possible splits for that column
def find_best_split_parallel(args):
best_ls = 1000000
best_split = None
best_children = None
split_point, data, label = args
key,possible_split = split_point
for split in possible_split:
children = split_children(data, label, key, split)
#weighted average of left and right ls
ls = len(children[1])*least_square(children[1])/len(label) + len(children[3])*least_square(children[3])/len(label)
if ls < best_ls:
best_ls = ls
best_split = (key, split)
best_children = children
return best_ls, best_split, best_children
def find_best_split(data, label, split_points):
# split_points is a dictionary of possible splitting values
# return the best split
best_ls = 1000000
best_split = None
best_children = None
pool = Pool()
for ls, split, children in pool.map(find_best_split_parallel, zip(split_points.items(), repeat(data), repeat(label))):
if ls < best_ls:
best_ls = ls
best_split = split
best_children = children
pool.close()
return best_split, best_children # return a tuple(attribute, value)
def split_children(data, label, key, split):
left_index = [index for index in xrange(len(data.iloc[:,key])) if data.iloc[index,key] < split]
right_index = [index for index in xrange(len(data.iloc[:,key])) if data.iloc[index,key] >= split]
left_data = data.iloc[left_index,:]
right_data = data.iloc[right_index,:]
left_label = [label[i] for i in left_index]
right_label =[label[i] for i in right_index]
return left_data, left_label, right_data, right_label
def least_square(label):
if not len(label):
return 0
return (np.sum(label)**2)/len(set(label))
def create_leaf(label):
global node_id
node_id += 1
leaf = {'splittng_feature': None,
'left': None,
'right':None,
'is_leaf':True,
'index':node_id}
leaf['value'] = round(np.mean(label),3)
return leaf
def find_splits_parallel(args):
var_space, label, col = args
# var_space = data.iloc[:,col].tolist()
return scipy.optimize.fminbound(error_function, min(var_space), max(var_space), args = (col, var_space, label), full_output = 1)
# return,
# if not min_error or error < min_error:
# min_error = error
# split_var = col
# min_split = split
def create_tree(data, all_pos_split, label, max_depth, ideal_ls, current_depth = 0):
remaining_features = all_pos_split
#stopping conditions
if sum([len(v)!= 0 for v in remaining_features.values()]) == 0:
# If there are no remaining features to consider, make current node a leaf node
return create_leaf(label)
# #Additional stopping condition (limit tree depth)
elif current_depth > max_depth:
return create_leaf(label)
#######
min_error = None
split_var = None
min_split = None
var_spaces = [data.iloc[:,col].tolist() for col in xrange(data.shape[1])]
cols = [col for col in xrange(data.shape[1])]
pool = Pool()
for split, error, ierr, numf in pool.map(find_splits_parallel, zip(var_spaces, repeat(label), cols)):
if not min_error or error < min_error:
min_error = error
split_var = col
min_split = split
pool.close()
splitting_feature = (split_var, min_split)
children = split_children(data, label, split_var, min_split)
left_data, left_label, right_data, right_label = children
if len(left_label) == 0 or len(right_label) == 0:
return create_leaf(label)
left_least_square = least_square(left_label)
# Create a leaf node if the split is "perfect"
if left_least_square < ideal_ls:
return create_leaf(left_label)
if least_square(right_label) < ideal_ls:
return create_leaf(right_label)
# recurse on children
left_tree = create_tree(left_data, remaining_features, left_label, max_depth, ideal_ls, current_depth +1)
right_tree = create_tree(right_data, remaining_features, right_label, max_depth, ideal_ls, current_depth +1)
return {'is_leaf' : False,
'value' : None,
'splitting_feature': splitting_feature,
'left' : left_tree,
'right' : right_tree,
'index' : None}
def error_function(split_point, split_var, data, label):
data1 = []
data2 = []
for i in xrange(len(data)):
|
return least_square(data1) + least_square(data2)
def make_prediction(tree, x, annotate = False):
if tree['is_leaf']:
if annotate:
print "At leaf, predicting %s" % tree['value']
return tree['value']
else:
# the splitting value of x.
split_feature_value = x[tree['splitting_feature'][0]]
if annotate:
print "Split on %s = %s" % (tree['splitting_feature'], split_feature_value)
if split_feature_value < tree['splitting_feature'][1]:
return make_prediction(tree['left'], x, annotate)
else:
return make_prediction(tree['right'], x, annotate)
class RegressionTree:
def __init__(self, training_data, labels, max_depth=5, ideal_ls=100):
self.training_data = training_data
self.labels = labels
self.max_depth = max_depth
self.ideal_ls = ideal_ls
self.tree = None
def fit(self):
global node_id
node_id = 0
all_pos_split = {}
pool = Pool()
splitting_data = [self.training_data.iloc[:,col].tolist() for col in xrange(self.training_data.shape[1])]
cols = [col for col in xrange(self.training_data.shape[1])]
for dat, col in pool.map(get_splitting_points, zip(splitting_data, cols)):
all_pos_split[col] = dat
pool.close()
self.tree = create_tree(self.training_data, all_pos_split, self.labels, self.max_depth, self.ideal_ls)
def predict(self, test):
prediction = np.array([make_prediction(self.tree, x) for x in test])
return prediction
if __name__ == '__main__':
#read in data, label
data = pd.read_excel("mlr06.xls")
test = [[478, 184, 40, 74, 11, 31], [1000,10000,10000,10000,10000,1000,100000]]
label = data['X7']
del data['X7']
model = RegressionTree(data, label)
model.fit()
print model.predict(test)
| temp_dat = data[i]
if temp_dat <= split_point:
data1.append(label[i])
else:
data2.append(label[i]) | conditional_block |
example.js | #!/usr/bin/env babel-node --harmony
// import firequeue from 'firequeue'
import firequeue from './src'
import { concat, throughSync } from 'stream-util'
import parallel from 'concurrent-transform'
const queue = firequeue.init('https://firequeue-test.firebaseio.com')
const logger = (fn) => throughSync(function(data) {
console.log(fn(data))
this.push(data)
})
// create some jobs
// in a real application you might want to use push to generate random job id instead
queue.incoming.child('job1').set({ task: 'task1', data: { name: 'job1' } })
queue.incoming.child('job2').set({ task: 'task1', data: { name: 'job2' } })
queue.incoming.child('job3').set({ task: 'task1', data: { name: 'job3' }, delayed: '20s' })
queue.incoming.child('job4').set({ task: 'task2', data: { name: 'job4' } })
queue.incoming.child('job5').set({ task: 'task3', data: { name: 'job5' } })
// listen to job updates
queue.jobs
.child('job1/state')
.on('value', (s) => console.log(`job1 changed state to ${s.val()}`))
// log 'queued'
// log 'activated'
// log 'completed'
// start queue engine
queue
.start()
.pipe(logger(({ task, key, state }) => `task: ${task}, job: ${key}, state: ${state}`))
// log task: task1, job: job1, state: queued
// log task: task1, job: job2, state: queued
// log task: task1, job: job3, state: delayed
// ...
// process task1
const task1 = queue
.read('task1')
.pipe(queue.process((job) => {
// do some work with job.key(), job.val()
return Promise.resolve()
}))
.pipe(logger(({ task, key, state }) => `task: ${task}, job: ${key}, state: ${state}`))
// log task: task1, job: job1, state: completed
// log task: task1, job: job2, state: completed
// ...
// process task2 with maxAttempts and backoff
const task2 = queue
.read('task2')
.pipe(queue.maxAttempts(2))
.pipe(queue.backoff('2s')) // wait 2s before retrying
.pipe(queue.process((job) => {
console.log('do some work with', job.key(), job.val())
const attempts = job.child('attempts').val() || 0
return attempts < 2
? Promise.reject()
: Promise.resolve()
}))
// process task3 with a concurrency of 10
const task3 = queue
.read('task3')
.pipe(parallel(queue.process((job) => {
console.log('do some work with', job.key(), job.val())
return Promise.resolve()
}), 10))
// remove completed jobs
concat(task1, task2, task3) | // log task: task1, job: job1, state: cleaned
// log task: task1, job: job2, state: cleaned
// ...
// 30sec later...
setTimeout(() => {
queue.stop().then(() => {
console.log('queue was stopped successfuly')
})
}, 30000) | .pipe(queue.clean('completed'))
.pipe(logger(({ task, key, state }) => `task: ${task}, job: ${key}, state: ${state}`))
| random_line_split |
fuzzing.rs | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::{
constants,
peer::Peer,
protocols::wire::{
handshake::v1::{MessagingProtocolVersion, SupportedProtocols},
messaging::v1::{NetworkMessage, NetworkMessageSink},
},
testutils::fake_socket::ReadOnlyTestSocketVec,
transport::{Connection, ConnectionId, ConnectionMetadata},
ProtocolId,
};
use channel::{diem_channel, message_queues::QueueStyle};
use diem_config::{config::PeerRole, network_id::NetworkContext};
use diem_proptest_helpers::ValueGenerator;
use diem_time_service::TimeService;
use diem_types::{network_address::NetworkAddress, PeerId};
use futures::{executor::block_on, future, io::AsyncReadExt, sink::SinkExt, stream::StreamExt};
use memsocket::MemorySocket;
use netcore::transport::ConnectionOrigin;
use proptest::{arbitrary::any, collection::vec};
use std::time::Duration;
/// Generate a sequence of `NetworkMessage`, bcs serialize them, and write them
/// out to a buffer using our length-prefixed message codec.
pub fn generate_corpus(gen: &mut ValueGenerator) -> Vec<u8> {
let network_msgs = gen.generate(vec(any::<NetworkMessage>(), 1..20));
let (write_socket, mut read_socket) = MemorySocket::new_pair();
let mut writer = NetworkMessageSink::new(write_socket, constants::MAX_FRAME_SIZE, None);
// Write the `NetworkMessage`s to a fake socket
let f_send = async move {
for network_msg in &network_msgs {
writer.send(network_msg).await.unwrap();
}
};
// Read the serialized `NetworkMessage`s from the fake socket
let f_recv = async move {
let mut buf = Vec::new();
read_socket.read_to_end(&mut buf).await.unwrap();
buf
};
let (_, buf) = block_on(future::join(f_send, f_recv));
buf
}
/// Fuzz the `Peer` actor's inbound message handling.
///
/// For each fuzzer iteration, we spin up a new `Peer` actor and pipe the raw
/// fuzzer data into it. This mostly tests that the `Peer` inbound message handling
/// doesn't panic or leak memory when reading, deserializing, and handling messages
/// from remote peers.
pub fn fuzz(data: &[u8]) {
// Use the basic single-threaded runtime, since our current tokio version has
// a chance to leak memory and/or thread handles when using the threaded
// runtime and sometimes blocks when trying to shutdown the runtime.
//
// https://github.com/tokio-rs/tokio/pull/2649
let rt = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.unwrap();
let executor = rt.handle().clone();
// We want to choose a constant peer id for _our_ peer id, since we will
// generate unbounded metrics otherwise and OOM during fuzzing.
let peer_id = PeerId::ZERO;
// However, we want to choose a random _remote_ peer id to ensure we _don't_
// have metrics logging the remote peer id (which would eventually OOM in
// production for public-facing nodes).
let remote_peer_id = PeerId::random();
// Mock data
let network_context = NetworkContext::mock_with_peer_id(peer_id);
let socket = ReadOnlyTestSocketVec::new(data.to_vec());
let metadata = ConnectionMetadata::new(
remote_peer_id,
ConnectionId::from(123),
NetworkAddress::mock(),
ConnectionOrigin::Inbound,
MessagingProtocolVersion::V1,
SupportedProtocols::from(
[
ProtocolId::ConsensusRpc,
ProtocolId::ConsensusDirectSend,
ProtocolId::MempoolDirectSend,
ProtocolId::StateSyncDirectSend,
ProtocolId::DiscoveryDirectSend,
ProtocolId::HealthCheckerRpc,
]
.iter(),
),
PeerRole::Unknown,
);
let connection = Connection { socket, metadata };
let (connection_notifs_tx, connection_notifs_rx) = channel::new_test(8);
let channel_size = 8;
let (peer_reqs_tx, peer_reqs_rx) = diem_channel::new(QueueStyle::FIFO, channel_size, None);
let (peer_notifs_tx, peer_notifs_rx) = diem_channel::new(QueueStyle::FIFO, channel_size, None);
// Spin up a new `Peer` actor
let peer = Peer::new(
network_context,
executor.clone(),
TimeService::mock(),
connection,
connection_notifs_tx,
peer_reqs_rx,
peer_notifs_tx,
Duration::from_millis(constants::INBOUND_RPC_TIMEOUT_MS),
constants::MAX_CONCURRENT_INBOUND_RPCS,
constants::MAX_CONCURRENT_OUTBOUND_RPCS,
constants::MAX_FRAME_SIZE,
None,
None,
);
executor.spawn(peer.start());
rt.block_on(async move {
// Wait for "remote" to disconnect (we read all data and socket read
// returns EOF), we read a disconnect request, or we fail to deserialize
// something.
connection_notifs_rx.collect::<Vec<_>>().await;
| // for all network notifs to drain out and finish.
drop(peer_reqs_tx);
peer_notifs_rx.collect::<Vec<_>>().await;
});
}
#[test]
fn test_peer_fuzzers() {
let mut value_gen = ValueGenerator::deterministic();
for _ in 0..50 {
let corpus = generate_corpus(&mut value_gen);
fuzz(&corpus);
}
} | // ACK the "remote" d/c and drop our handle to the Peer actor. Then wait | random_line_split |
fuzzing.rs | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::{
constants,
peer::Peer,
protocols::wire::{
handshake::v1::{MessagingProtocolVersion, SupportedProtocols},
messaging::v1::{NetworkMessage, NetworkMessageSink},
},
testutils::fake_socket::ReadOnlyTestSocketVec,
transport::{Connection, ConnectionId, ConnectionMetadata},
ProtocolId,
};
use channel::{diem_channel, message_queues::QueueStyle};
use diem_config::{config::PeerRole, network_id::NetworkContext};
use diem_proptest_helpers::ValueGenerator;
use diem_time_service::TimeService;
use diem_types::{network_address::NetworkAddress, PeerId};
use futures::{executor::block_on, future, io::AsyncReadExt, sink::SinkExt, stream::StreamExt};
use memsocket::MemorySocket;
use netcore::transport::ConnectionOrigin;
use proptest::{arbitrary::any, collection::vec};
use std::time::Duration;
/// Generate a sequence of `NetworkMessage`, bcs serialize them, and write them
/// out to a buffer using our length-prefixed message codec.
pub fn generate_corpus(gen: &mut ValueGenerator) -> Vec<u8> | buf
}
/// Fuzz the `Peer` actor's inbound message handling.
///
/// For each fuzzer iteration, we spin up a new `Peer` actor and pipe the raw
/// fuzzer data into it. This mostly tests that the `Peer` inbound message handling
/// doesn't panic or leak memory when reading, deserializing, and handling messages
/// from remote peers.
pub fn fuzz(data: &[u8]) {
// Use the basic single-threaded runtime, since our current tokio version has
// a chance to leak memory and/or thread handles when using the threaded
// runtime and sometimes blocks when trying to shutdown the runtime.
//
// https://github.com/tokio-rs/tokio/pull/2649
let rt = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.unwrap();
let executor = rt.handle().clone();
// We want to choose a constant peer id for _our_ peer id, since we will
// generate unbounded metrics otherwise and OOM during fuzzing.
let peer_id = PeerId::ZERO;
// However, we want to choose a random _remote_ peer id to ensure we _don't_
// have metrics logging the remote peer id (which would eventually OOM in
// production for public-facing nodes).
let remote_peer_id = PeerId::random();
// Mock data
let network_context = NetworkContext::mock_with_peer_id(peer_id);
let socket = ReadOnlyTestSocketVec::new(data.to_vec());
let metadata = ConnectionMetadata::new(
remote_peer_id,
ConnectionId::from(123),
NetworkAddress::mock(),
ConnectionOrigin::Inbound,
MessagingProtocolVersion::V1,
SupportedProtocols::from(
[
ProtocolId::ConsensusRpc,
ProtocolId::ConsensusDirectSend,
ProtocolId::MempoolDirectSend,
ProtocolId::StateSyncDirectSend,
ProtocolId::DiscoveryDirectSend,
ProtocolId::HealthCheckerRpc,
]
.iter(),
),
PeerRole::Unknown,
);
let connection = Connection { socket, metadata };
let (connection_notifs_tx, connection_notifs_rx) = channel::new_test(8);
let channel_size = 8;
let (peer_reqs_tx, peer_reqs_rx) = diem_channel::new(QueueStyle::FIFO, channel_size, None);
let (peer_notifs_tx, peer_notifs_rx) = diem_channel::new(QueueStyle::FIFO, channel_size, None);
// Spin up a new `Peer` actor
let peer = Peer::new(
network_context,
executor.clone(),
TimeService::mock(),
connection,
connection_notifs_tx,
peer_reqs_rx,
peer_notifs_tx,
Duration::from_millis(constants::INBOUND_RPC_TIMEOUT_MS),
constants::MAX_CONCURRENT_INBOUND_RPCS,
constants::MAX_CONCURRENT_OUTBOUND_RPCS,
constants::MAX_FRAME_SIZE,
None,
None,
);
executor.spawn(peer.start());
rt.block_on(async move {
// Wait for "remote" to disconnect (we read all data and socket read
// returns EOF), we read a disconnect request, or we fail to deserialize
// something.
connection_notifs_rx.collect::<Vec<_>>().await;
// ACK the "remote" d/c and drop our handle to the Peer actor. Then wait
// for all network notifs to drain out and finish.
drop(peer_reqs_tx);
peer_notifs_rx.collect::<Vec<_>>().await;
});
}
#[test]
fn test_peer_fuzzers() {
let mut value_gen = ValueGenerator::deterministic();
for _ in 0..50 {
let corpus = generate_corpus(&mut value_gen);
fuzz(&corpus);
}
}
| {
let network_msgs = gen.generate(vec(any::<NetworkMessage>(), 1..20));
let (write_socket, mut read_socket) = MemorySocket::new_pair();
let mut writer = NetworkMessageSink::new(write_socket, constants::MAX_FRAME_SIZE, None);
// Write the `NetworkMessage`s to a fake socket
let f_send = async move {
for network_msg in &network_msgs {
writer.send(network_msg).await.unwrap();
}
};
// Read the serialized `NetworkMessage`s from the fake socket
let f_recv = async move {
let mut buf = Vec::new();
read_socket.read_to_end(&mut buf).await.unwrap();
buf
};
let (_, buf) = block_on(future::join(f_send, f_recv)); | identifier_body |
fuzzing.rs | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::{
constants,
peer::Peer,
protocols::wire::{
handshake::v1::{MessagingProtocolVersion, SupportedProtocols},
messaging::v1::{NetworkMessage, NetworkMessageSink},
},
testutils::fake_socket::ReadOnlyTestSocketVec,
transport::{Connection, ConnectionId, ConnectionMetadata},
ProtocolId,
};
use channel::{diem_channel, message_queues::QueueStyle};
use diem_config::{config::PeerRole, network_id::NetworkContext};
use diem_proptest_helpers::ValueGenerator;
use diem_time_service::TimeService;
use diem_types::{network_address::NetworkAddress, PeerId};
use futures::{executor::block_on, future, io::AsyncReadExt, sink::SinkExt, stream::StreamExt};
use memsocket::MemorySocket;
use netcore::transport::ConnectionOrigin;
use proptest::{arbitrary::any, collection::vec};
use std::time::Duration;
/// Generate a sequence of `NetworkMessage`, bcs serialize them, and write them
/// out to a buffer using our length-prefixed message codec.
pub fn generate_corpus(gen: &mut ValueGenerator) -> Vec<u8> {
let network_msgs = gen.generate(vec(any::<NetworkMessage>(), 1..20));
let (write_socket, mut read_socket) = MemorySocket::new_pair();
let mut writer = NetworkMessageSink::new(write_socket, constants::MAX_FRAME_SIZE, None);
// Write the `NetworkMessage`s to a fake socket
let f_send = async move {
for network_msg in &network_msgs {
writer.send(network_msg).await.unwrap();
}
};
// Read the serialized `NetworkMessage`s from the fake socket
let f_recv = async move {
let mut buf = Vec::new();
read_socket.read_to_end(&mut buf).await.unwrap();
buf
};
let (_, buf) = block_on(future::join(f_send, f_recv));
buf
}
/// Fuzz the `Peer` actor's inbound message handling.
///
/// For each fuzzer iteration, we spin up a new `Peer` actor and pipe the raw
/// fuzzer data into it. This mostly tests that the `Peer` inbound message handling
/// doesn't panic or leak memory when reading, deserializing, and handling messages
/// from remote peers.
pub fn fuzz(data: &[u8]) {
// Use the basic single-threaded runtime, since our current tokio version has
// a chance to leak memory and/or thread handles when using the threaded
// runtime and sometimes blocks when trying to shutdown the runtime.
//
// https://github.com/tokio-rs/tokio/pull/2649
let rt = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.unwrap();
let executor = rt.handle().clone();
// We want to choose a constant peer id for _our_ peer id, since we will
// generate unbounded metrics otherwise and OOM during fuzzing.
let peer_id = PeerId::ZERO;
// However, we want to choose a random _remote_ peer id to ensure we _don't_
// have metrics logging the remote peer id (which would eventually OOM in
// production for public-facing nodes).
let remote_peer_id = PeerId::random();
// Mock data
let network_context = NetworkContext::mock_with_peer_id(peer_id);
let socket = ReadOnlyTestSocketVec::new(data.to_vec());
let metadata = ConnectionMetadata::new(
remote_peer_id,
ConnectionId::from(123),
NetworkAddress::mock(),
ConnectionOrigin::Inbound,
MessagingProtocolVersion::V1,
SupportedProtocols::from(
[
ProtocolId::ConsensusRpc,
ProtocolId::ConsensusDirectSend,
ProtocolId::MempoolDirectSend,
ProtocolId::StateSyncDirectSend,
ProtocolId::DiscoveryDirectSend,
ProtocolId::HealthCheckerRpc,
]
.iter(),
),
PeerRole::Unknown,
);
let connection = Connection { socket, metadata };
let (connection_notifs_tx, connection_notifs_rx) = channel::new_test(8);
let channel_size = 8;
let (peer_reqs_tx, peer_reqs_rx) = diem_channel::new(QueueStyle::FIFO, channel_size, None);
let (peer_notifs_tx, peer_notifs_rx) = diem_channel::new(QueueStyle::FIFO, channel_size, None);
// Spin up a new `Peer` actor
let peer = Peer::new(
network_context,
executor.clone(),
TimeService::mock(),
connection,
connection_notifs_tx,
peer_reqs_rx,
peer_notifs_tx,
Duration::from_millis(constants::INBOUND_RPC_TIMEOUT_MS),
constants::MAX_CONCURRENT_INBOUND_RPCS,
constants::MAX_CONCURRENT_OUTBOUND_RPCS,
constants::MAX_FRAME_SIZE,
None,
None,
);
executor.spawn(peer.start());
rt.block_on(async move {
// Wait for "remote" to disconnect (we read all data and socket read
// returns EOF), we read a disconnect request, or we fail to deserialize
// something.
connection_notifs_rx.collect::<Vec<_>>().await;
// ACK the "remote" d/c and drop our handle to the Peer actor. Then wait
// for all network notifs to drain out and finish.
drop(peer_reqs_tx);
peer_notifs_rx.collect::<Vec<_>>().await;
});
}
#[test]
fn | () {
let mut value_gen = ValueGenerator::deterministic();
for _ in 0..50 {
let corpus = generate_corpus(&mut value_gen);
fuzz(&corpus);
}
}
| test_peer_fuzzers | identifier_name |
config_flow.py | """Config flow for the Daikin platform."""
import asyncio
import logging
from uuid import uuid4
from aiohttp import ClientError, web_exceptions
from async_timeout import timeout
from pydaikin.daikin_base import Appliance
from pydaikin.discovery import Discovery
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import CONF_API_KEY, CONF_HOST, CONF_PASSWORD
from .const import CONF_UUID, DOMAIN, KEY_MAC, TIMEOUT
_LOGGER = logging.getLogger(__name__)
class FlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow."""
VERSION = 1
def __init__(self):
"""Initialize the Daikin config flow."""
self.host = None
@property
def schema(self):
"""Return current schema."""
return vol.Schema(
{
vol.Required(CONF_HOST, default=self.host): str,
vol.Optional(CONF_API_KEY): str,
vol.Optional(CONF_PASSWORD): str,
}
)
async def | (self, host, mac, key=None, uuid=None, password=None):
"""Register new entry."""
if not self.unique_id:
await self.async_set_unique_id(mac)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=host,
data={
CONF_HOST: host,
KEY_MAC: mac,
CONF_API_KEY: key,
CONF_UUID: uuid,
CONF_PASSWORD: password,
},
)
async def _create_device(self, host, key=None, password=None):
"""Create device."""
# BRP07Cxx devices needs uuid together with key
if key:
uuid = str(uuid4())
else:
uuid = None
key = None
if not password:
password = None
try:
with timeout(TIMEOUT):
device = await Appliance.factory(
host,
self.hass.helpers.aiohttp_client.async_get_clientsession(),
key=key,
uuid=uuid,
password=password,
)
except asyncio.TimeoutError:
return self.async_show_form(
step_id="user",
data_schema=self.schema,
errors={"base": "cannot_connect"},
)
except web_exceptions.HTTPForbidden:
return self.async_show_form(
step_id="user",
data_schema=self.schema,
errors={"base": "invalid_auth"},
)
except ClientError:
_LOGGER.exception("ClientError")
return self.async_show_form(
step_id="user",
data_schema=self.schema,
errors={"base": "unknown"},
)
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected error creating device")
return self.async_show_form(
step_id="user",
data_schema=self.schema,
errors={"base": "unknown"},
)
mac = device.mac
return await self._create_entry(host, mac, key, uuid, password)
async def async_step_user(self, user_input=None):
"""User initiated config flow."""
if user_input is None:
return self.async_show_form(step_id="user", data_schema=self.schema)
return await self._create_device(
user_input[CONF_HOST],
user_input.get(CONF_API_KEY),
user_input.get(CONF_PASSWORD),
)
async def async_step_zeroconf(self, discovery_info):
"""Prepare configuration for a discovered Daikin device."""
_LOGGER.debug("Zeroconf user_input: %s", discovery_info)
devices = Discovery().poll(ip=discovery_info[CONF_HOST])
if not devices:
_LOGGER.debug(
"Could not find MAC-address for %s,"
" make sure the required UDP ports are open (see integration documentation)",
discovery_info[CONF_HOST],
)
return self.async_abort(reason="cannot_connect")
await self.async_set_unique_id(next(iter(devices))[KEY_MAC])
self._abort_if_unique_id_configured()
self.host = discovery_info[CONF_HOST]
return await self.async_step_user()
| _create_entry | identifier_name |
config_flow.py | """Config flow for the Daikin platform."""
import asyncio
import logging
from uuid import uuid4
from aiohttp import ClientError, web_exceptions
from async_timeout import timeout
from pydaikin.daikin_base import Appliance
from pydaikin.discovery import Discovery
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import CONF_API_KEY, CONF_HOST, CONF_PASSWORD
from .const import CONF_UUID, DOMAIN, KEY_MAC, TIMEOUT
_LOGGER = logging.getLogger(__name__)
class FlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow."""
VERSION = 1
def __init__(self):
"""Initialize the Daikin config flow."""
self.host = None
@property
def schema(self):
"""Return current schema."""
return vol.Schema(
{
vol.Required(CONF_HOST, default=self.host): str,
vol.Optional(CONF_API_KEY): str,
vol.Optional(CONF_PASSWORD): str,
}
)
async def _create_entry(self, host, mac, key=None, uuid=None, password=None):
"""Register new entry."""
if not self.unique_id:
await self.async_set_unique_id(mac)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=host,
data={
CONF_HOST: host,
KEY_MAC: mac,
CONF_API_KEY: key,
CONF_UUID: uuid,
CONF_PASSWORD: password,
},
)
async def _create_device(self, host, key=None, password=None):
"""Create device."""
# BRP07Cxx devices needs uuid together with key
if key:
uuid = str(uuid4())
else:
uuid = None
key = None
if not password:
|
try:
with timeout(TIMEOUT):
device = await Appliance.factory(
host,
self.hass.helpers.aiohttp_client.async_get_clientsession(),
key=key,
uuid=uuid,
password=password,
)
except asyncio.TimeoutError:
return self.async_show_form(
step_id="user",
data_schema=self.schema,
errors={"base": "cannot_connect"},
)
except web_exceptions.HTTPForbidden:
return self.async_show_form(
step_id="user",
data_schema=self.schema,
errors={"base": "invalid_auth"},
)
except ClientError:
_LOGGER.exception("ClientError")
return self.async_show_form(
step_id="user",
data_schema=self.schema,
errors={"base": "unknown"},
)
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected error creating device")
return self.async_show_form(
step_id="user",
data_schema=self.schema,
errors={"base": "unknown"},
)
mac = device.mac
return await self._create_entry(host, mac, key, uuid, password)
async def async_step_user(self, user_input=None):
"""User initiated config flow."""
if user_input is None:
return self.async_show_form(step_id="user", data_schema=self.schema)
return await self._create_device(
user_input[CONF_HOST],
user_input.get(CONF_API_KEY),
user_input.get(CONF_PASSWORD),
)
async def async_step_zeroconf(self, discovery_info):
"""Prepare configuration for a discovered Daikin device."""
_LOGGER.debug("Zeroconf user_input: %s", discovery_info)
devices = Discovery().poll(ip=discovery_info[CONF_HOST])
if not devices:
_LOGGER.debug(
"Could not find MAC-address for %s,"
" make sure the required UDP ports are open (see integration documentation)",
discovery_info[CONF_HOST],
)
return self.async_abort(reason="cannot_connect")
await self.async_set_unique_id(next(iter(devices))[KEY_MAC])
self._abort_if_unique_id_configured()
self.host = discovery_info[CONF_HOST]
return await self.async_step_user()
| password = None | conditional_block |
config_flow.py | """Config flow for the Daikin platform."""
import asyncio
import logging
from uuid import uuid4
from aiohttp import ClientError, web_exceptions
from async_timeout import timeout
from pydaikin.daikin_base import Appliance
from pydaikin.discovery import Discovery
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import CONF_API_KEY, CONF_HOST, CONF_PASSWORD
from .const import CONF_UUID, DOMAIN, KEY_MAC, TIMEOUT
_LOGGER = logging.getLogger(__name__)
class FlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
| """Register new entry."""
if not self.unique_id:
await self.async_set_unique_id(mac)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=host,
data={
CONF_HOST: host,
KEY_MAC: mac,
CONF_API_KEY: key,
CONF_UUID: uuid,
CONF_PASSWORD: password,
},
)
async def _create_device(self, host, key=None, password=None):
"""Create device."""
# BRP07Cxx devices needs uuid together with key
if key:
uuid = str(uuid4())
else:
uuid = None
key = None
if not password:
password = None
try:
with timeout(TIMEOUT):
device = await Appliance.factory(
host,
self.hass.helpers.aiohttp_client.async_get_clientsession(),
key=key,
uuid=uuid,
password=password,
)
except asyncio.TimeoutError:
return self.async_show_form(
step_id="user",
data_schema=self.schema,
errors={"base": "cannot_connect"},
)
except web_exceptions.HTTPForbidden:
return self.async_show_form(
step_id="user",
data_schema=self.schema,
errors={"base": "invalid_auth"},
)
except ClientError:
_LOGGER.exception("ClientError")
return self.async_show_form(
step_id="user",
data_schema=self.schema,
errors={"base": "unknown"},
)
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected error creating device")
return self.async_show_form(
step_id="user",
data_schema=self.schema,
errors={"base": "unknown"},
)
mac = device.mac
return await self._create_entry(host, mac, key, uuid, password)
async def async_step_user(self, user_input=None):
"""User initiated config flow."""
if user_input is None:
return self.async_show_form(step_id="user", data_schema=self.schema)
return await self._create_device(
user_input[CONF_HOST],
user_input.get(CONF_API_KEY),
user_input.get(CONF_PASSWORD),
)
async def async_step_zeroconf(self, discovery_info):
"""Prepare configuration for a discovered Daikin device."""
_LOGGER.debug("Zeroconf user_input: %s", discovery_info)
devices = Discovery().poll(ip=discovery_info[CONF_HOST])
if not devices:
_LOGGER.debug(
"Could not find MAC-address for %s,"
" make sure the required UDP ports are open (see integration documentation)",
discovery_info[CONF_HOST],
)
return self.async_abort(reason="cannot_connect")
await self.async_set_unique_id(next(iter(devices))[KEY_MAC])
self._abort_if_unique_id_configured()
self.host = discovery_info[CONF_HOST]
return await self.async_step_user()
| """Handle a config flow."""
VERSION = 1
def __init__(self):
"""Initialize the Daikin config flow."""
self.host = None
@property
def schema(self):
"""Return current schema."""
return vol.Schema(
{
vol.Required(CONF_HOST, default=self.host): str,
vol.Optional(CONF_API_KEY): str,
vol.Optional(CONF_PASSWORD): str,
}
)
async def _create_entry(self, host, mac, key=None, uuid=None, password=None): | identifier_body |
config_flow.py | """Config flow for the Daikin platform."""
import asyncio
import logging
from uuid import uuid4
from aiohttp import ClientError, web_exceptions
from async_timeout import timeout
from pydaikin.daikin_base import Appliance
from pydaikin.discovery import Discovery
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import CONF_API_KEY, CONF_HOST, CONF_PASSWORD
from .const import CONF_UUID, DOMAIN, KEY_MAC, TIMEOUT
_LOGGER = logging.getLogger(__name__)
class FlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow."""
VERSION = 1
def __init__(self):
"""Initialize the Daikin config flow."""
self.host = None
@property
def schema(self):
"""Return current schema."""
return vol.Schema(
{
vol.Required(CONF_HOST, default=self.host): str,
vol.Optional(CONF_API_KEY): str,
vol.Optional(CONF_PASSWORD): str,
}
)
async def _create_entry(self, host, mac, key=None, uuid=None, password=None):
"""Register new entry."""
if not self.unique_id:
await self.async_set_unique_id(mac)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=host,
data={
CONF_HOST: host,
KEY_MAC: mac,
CONF_API_KEY: key,
CONF_UUID: uuid,
CONF_PASSWORD: password,
},
)
async def _create_device(self, host, key=None, password=None):
"""Create device."""
# BRP07Cxx devices needs uuid together with key
if key:
uuid = str(uuid4())
else:
uuid = None
key = None
if not password:
password = None
try:
with timeout(TIMEOUT):
device = await Appliance.factory(
host,
self.hass.helpers.aiohttp_client.async_get_clientsession(),
key=key,
uuid=uuid,
password=password,
)
except asyncio.TimeoutError:
return self.async_show_form(
step_id="user",
data_schema=self.schema,
errors={"base": "cannot_connect"},
)
except web_exceptions.HTTPForbidden:
return self.async_show_form(
step_id="user",
data_schema=self.schema,
errors={"base": "invalid_auth"},
)
except ClientError:
_LOGGER.exception("ClientError")
return self.async_show_form(
step_id="user",
data_schema=self.schema,
errors={"base": "unknown"},
)
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected error creating device")
return self.async_show_form(
step_id="user",
data_schema=self.schema,
errors={"base": "unknown"},
)
mac = device.mac
return await self._create_entry(host, mac, key, uuid, password)
async def async_step_user(self, user_input=None):
"""User initiated config flow."""
if user_input is None: | return await self._create_device(
user_input[CONF_HOST],
user_input.get(CONF_API_KEY),
user_input.get(CONF_PASSWORD),
)
async def async_step_zeroconf(self, discovery_info):
"""Prepare configuration for a discovered Daikin device."""
_LOGGER.debug("Zeroconf user_input: %s", discovery_info)
devices = Discovery().poll(ip=discovery_info[CONF_HOST])
if not devices:
_LOGGER.debug(
"Could not find MAC-address for %s,"
" make sure the required UDP ports are open (see integration documentation)",
discovery_info[CONF_HOST],
)
return self.async_abort(reason="cannot_connect")
await self.async_set_unique_id(next(iter(devices))[KEY_MAC])
self._abort_if_unique_id_configured()
self.host = discovery_info[CONF_HOST]
return await self.async_step_user() | return self.async_show_form(step_id="user", data_schema=self.schema) | random_line_split |
try-catch-before-try.js | // Copyright (C) 2015 the V8 project authors. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/*---
es6id: 25.3.1.3
description: >
When a generator is paused before a `try..catch` statement, `return` should
interrupt control flow as if a `return` statement had appeared at that
location in the function body.
---*/
function* g() {
yield;
try {
$ERROR('This code is unreachable (within `try` block)');
} catch (e) {
throw e;
}
$ERROR('This code is unreachable (following `try` statement)');
}
var iter = g();
var result; | assert.sameValue(result.value, 45, 'Result `value` following `return`');
assert.sameValue(result.done, true, 'Result `done` flag following `return`');
result = iter.next();
assert.sameValue(result.value,
undefined, 'Result `value` is undefined when complete'
);
assert.sameValue(
result.done, true, 'Result `done` flag is `true` when complete'
); |
iter.next();
result = iter.return(45); | random_line_split |
owned_slice.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
#![allow(unsafe_code)]
//! A replacement for `Box<[T]>` that cbindgen can understand.
use malloc_size_of::{MallocShallowSizeOf, MallocSizeOf, MallocSizeOfOps};
use std::marker::PhantomData;
use std::ops::{Deref, DerefMut};
use std::ptr::NonNull;
use std::{fmt, iter, mem, slice};
use to_shmem::{SharedMemoryBuilder, ToShmem};
/// A struct that basically replaces a `Box<[T]>`, but which cbindgen can
/// understand.
///
/// We could rely on the struct layout of `Box<[T]>` per:
///
/// https://github.com/rust-lang/unsafe-code-guidelines/blob/master/reference/src/layout/pointers.md
///
/// But handling fat pointers with cbindgen both in structs and argument
/// positions more generally is a bit tricky.
///
/// cbindgen:derive-eq=false
/// cbindgen:derive-neq=false
#[repr(C)]
pub struct OwnedSlice<T: Sized> {
ptr: NonNull<T>,
len: usize,
_phantom: PhantomData<T>,
}
impl<T: Sized> Default for OwnedSlice<T> {
#[inline]
fn default() -> Self {
Self {
len: 0,
ptr: NonNull::dangling(),
_phantom: PhantomData,
}
}
}
impl<T: Sized> Drop for OwnedSlice<T> {
#[inline]
fn drop(&mut self) {
if self.len != 0 {
let _ = mem::replace(self, Self::default()).into_vec();
}
}
}
unsafe impl<T: Sized + Send> Send for OwnedSlice<T> {}
unsafe impl<T: Sized + Sync> Sync for OwnedSlice<T> {}
impl<T: Clone> Clone for OwnedSlice<T> {
#[inline]
fn clone(&self) -> Self {
Self::from_slice(&**self)
}
}
impl<T: fmt::Debug> fmt::Debug for OwnedSlice<T> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
self.deref().fmt(formatter)
}
}
impl<T: PartialEq> PartialEq for OwnedSlice<T> {
fn eq(&self, other: &Self) -> bool {
self.deref().eq(other.deref())
}
}
impl<T: Eq> Eq for OwnedSlice<T> {}
impl<T: Sized> OwnedSlice<T> {
/// Convert the OwnedSlice into a boxed slice.
#[inline]
pub fn into_box(self) -> Box<[T]> {
self.into_vec().into_boxed_slice()
}
/// Convert the OwnedSlice into a Vec.
#[inline]
pub fn into_vec(self) -> Vec<T> {
let ret = unsafe { Vec::from_raw_parts(self.ptr.as_ptr(), self.len, self.len) };
mem::forget(self);
ret
}
/// Iterate over all the elements in the slice taking ownership of them.
#[inline]
pub fn into_iter(self) -> impl Iterator<Item = T> + ExactSizeIterator {
self.into_vec().into_iter()
}
/// Convert the regular slice into an owned slice.
#[inline]
pub fn from_slice(s: &[T]) -> Self
where
T: Clone,
{
Self::from(s.to_vec())
}
}
impl<T> Deref for OwnedSlice<T> {
type Target = [T];
#[inline(always)]
fn deref(&self) -> &Self::Target {
unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.len) }
}
}
impl<T> DerefMut for OwnedSlice<T> {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
unsafe { slice::from_raw_parts_mut(self.ptr.as_ptr(), self.len) }
}
}
impl<T> From<Box<[T]>> for OwnedSlice<T> {
#[inline]
fn | (mut b: Box<[T]>) -> Self {
let len = b.len();
let ptr = unsafe { NonNull::new_unchecked(b.as_mut_ptr()) };
mem::forget(b);
Self {
len,
ptr,
_phantom: PhantomData,
}
}
}
impl<T> From<Vec<T>> for OwnedSlice<T> {
#[inline]
fn from(b: Vec<T>) -> Self {
Self::from(b.into_boxed_slice())
}
}
impl<T: Sized> MallocShallowSizeOf for OwnedSlice<T> {
fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
unsafe { ops.malloc_size_of(self.ptr.as_ptr()) }
}
}
impl<T: MallocSizeOf + Sized> MallocSizeOf for OwnedSlice<T> {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
self.shallow_size_of(ops) + (**self).size_of(ops)
}
}
impl<T: ToShmem + Sized> ToShmem for OwnedSlice<T> {
fn to_shmem(&self, builder: &mut SharedMemoryBuilder) -> mem::ManuallyDrop<Self> {
unsafe {
let dest = to_shmem::to_shmem_slice(self.iter(), builder);
mem::ManuallyDrop::new(Self::from(Box::from_raw(dest)))
}
}
}
impl<T> iter::FromIterator<T> for OwnedSlice<T> {
#[inline]
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
Vec::from_iter(iter).into()
}
}
| from | identifier_name |
owned_slice.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
#![allow(unsafe_code)]
//! A replacement for `Box<[T]>` that cbindgen can understand.
use malloc_size_of::{MallocShallowSizeOf, MallocSizeOf, MallocSizeOfOps};
use std::marker::PhantomData;
use std::ops::{Deref, DerefMut};
use std::ptr::NonNull;
use std::{fmt, iter, mem, slice};
use to_shmem::{SharedMemoryBuilder, ToShmem};
/// A struct that basically replaces a `Box<[T]>`, but which cbindgen can
/// understand.
///
/// We could rely on the struct layout of `Box<[T]>` per:
///
/// https://github.com/rust-lang/unsafe-code-guidelines/blob/master/reference/src/layout/pointers.md
///
/// But handling fat pointers with cbindgen both in structs and argument
/// positions more generally is a bit tricky.
///
/// cbindgen:derive-eq=false
/// cbindgen:derive-neq=false
#[repr(C)]
pub struct OwnedSlice<T: Sized> {
ptr: NonNull<T>,
len: usize,
_phantom: PhantomData<T>,
}
impl<T: Sized> Default for OwnedSlice<T> {
#[inline]
fn default() -> Self {
Self {
len: 0,
ptr: NonNull::dangling(),
_phantom: PhantomData,
}
}
}
impl<T: Sized> Drop for OwnedSlice<T> {
#[inline]
fn drop(&mut self) {
if self.len != 0 {
let _ = mem::replace(self, Self::default()).into_vec();
}
}
}
unsafe impl<T: Sized + Send> Send for OwnedSlice<T> {}
unsafe impl<T: Sized + Sync> Sync for OwnedSlice<T> {}
impl<T: Clone> Clone for OwnedSlice<T> {
#[inline]
fn clone(&self) -> Self {
Self::from_slice(&**self)
}
}
impl<T: fmt::Debug> fmt::Debug for OwnedSlice<T> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
self.deref().fmt(formatter)
}
}
impl<T: PartialEq> PartialEq for OwnedSlice<T> {
fn eq(&self, other: &Self) -> bool {
self.deref().eq(other.deref())
}
}
impl<T: Eq> Eq for OwnedSlice<T> {}
impl<T: Sized> OwnedSlice<T> {
/// Convert the OwnedSlice into a boxed slice.
#[inline]
pub fn into_box(self) -> Box<[T]> {
self.into_vec().into_boxed_slice()
}
/// Convert the OwnedSlice into a Vec.
#[inline]
pub fn into_vec(self) -> Vec<T> {
let ret = unsafe { Vec::from_raw_parts(self.ptr.as_ptr(), self.len, self.len) };
mem::forget(self);
ret
}
/// Iterate over all the elements in the slice taking ownership of them.
#[inline]
pub fn into_iter(self) -> impl Iterator<Item = T> + ExactSizeIterator {
self.into_vec().into_iter()
}
/// Convert the regular slice into an owned slice.
#[inline]
pub fn from_slice(s: &[T]) -> Self
where
T: Clone,
{
Self::from(s.to_vec())
}
}
impl<T> Deref for OwnedSlice<T> {
type Target = [T];
#[inline(always)]
fn deref(&self) -> &Self::Target {
unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.len) }
}
}
impl<T> DerefMut for OwnedSlice<T> {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target |
}
impl<T> From<Box<[T]>> for OwnedSlice<T> {
#[inline]
fn from(mut b: Box<[T]>) -> Self {
let len = b.len();
let ptr = unsafe { NonNull::new_unchecked(b.as_mut_ptr()) };
mem::forget(b);
Self {
len,
ptr,
_phantom: PhantomData,
}
}
}
impl<T> From<Vec<T>> for OwnedSlice<T> {
#[inline]
fn from(b: Vec<T>) -> Self {
Self::from(b.into_boxed_slice())
}
}
impl<T: Sized> MallocShallowSizeOf for OwnedSlice<T> {
fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
unsafe { ops.malloc_size_of(self.ptr.as_ptr()) }
}
}
impl<T: MallocSizeOf + Sized> MallocSizeOf for OwnedSlice<T> {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
self.shallow_size_of(ops) + (**self).size_of(ops)
}
}
impl<T: ToShmem + Sized> ToShmem for OwnedSlice<T> {
fn to_shmem(&self, builder: &mut SharedMemoryBuilder) -> mem::ManuallyDrop<Self> {
unsafe {
let dest = to_shmem::to_shmem_slice(self.iter(), builder);
mem::ManuallyDrop::new(Self::from(Box::from_raw(dest)))
}
}
}
impl<T> iter::FromIterator<T> for OwnedSlice<T> {
#[inline]
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
Vec::from_iter(iter).into()
}
}
| {
unsafe { slice::from_raw_parts_mut(self.ptr.as_ptr(), self.len) }
} | identifier_body |
owned_slice.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
| use malloc_size_of::{MallocShallowSizeOf, MallocSizeOf, MallocSizeOfOps};
use std::marker::PhantomData;
use std::ops::{Deref, DerefMut};
use std::ptr::NonNull;
use std::{fmt, iter, mem, slice};
use to_shmem::{SharedMemoryBuilder, ToShmem};
/// A struct that basically replaces a `Box<[T]>`, but which cbindgen can
/// understand.
///
/// We could rely on the struct layout of `Box<[T]>` per:
///
/// https://github.com/rust-lang/unsafe-code-guidelines/blob/master/reference/src/layout/pointers.md
///
/// But handling fat pointers with cbindgen both in structs and argument
/// positions more generally is a bit tricky.
///
/// cbindgen:derive-eq=false
/// cbindgen:derive-neq=false
#[repr(C)]
pub struct OwnedSlice<T: Sized> {
ptr: NonNull<T>,
len: usize,
_phantom: PhantomData<T>,
}
impl<T: Sized> Default for OwnedSlice<T> {
#[inline]
fn default() -> Self {
Self {
len: 0,
ptr: NonNull::dangling(),
_phantom: PhantomData,
}
}
}
impl<T: Sized> Drop for OwnedSlice<T> {
#[inline]
fn drop(&mut self) {
if self.len != 0 {
let _ = mem::replace(self, Self::default()).into_vec();
}
}
}
unsafe impl<T: Sized + Send> Send for OwnedSlice<T> {}
unsafe impl<T: Sized + Sync> Sync for OwnedSlice<T> {}
impl<T: Clone> Clone for OwnedSlice<T> {
#[inline]
fn clone(&self) -> Self {
Self::from_slice(&**self)
}
}
impl<T: fmt::Debug> fmt::Debug for OwnedSlice<T> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
self.deref().fmt(formatter)
}
}
impl<T: PartialEq> PartialEq for OwnedSlice<T> {
fn eq(&self, other: &Self) -> bool {
self.deref().eq(other.deref())
}
}
impl<T: Eq> Eq for OwnedSlice<T> {}
impl<T: Sized> OwnedSlice<T> {
/// Convert the OwnedSlice into a boxed slice.
#[inline]
pub fn into_box(self) -> Box<[T]> {
self.into_vec().into_boxed_slice()
}
/// Convert the OwnedSlice into a Vec.
#[inline]
pub fn into_vec(self) -> Vec<T> {
let ret = unsafe { Vec::from_raw_parts(self.ptr.as_ptr(), self.len, self.len) };
mem::forget(self);
ret
}
/// Iterate over all the elements in the slice taking ownership of them.
#[inline]
pub fn into_iter(self) -> impl Iterator<Item = T> + ExactSizeIterator {
self.into_vec().into_iter()
}
/// Convert the regular slice into an owned slice.
#[inline]
pub fn from_slice(s: &[T]) -> Self
where
T: Clone,
{
Self::from(s.to_vec())
}
}
impl<T> Deref for OwnedSlice<T> {
type Target = [T];
#[inline(always)]
fn deref(&self) -> &Self::Target {
unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.len) }
}
}
impl<T> DerefMut for OwnedSlice<T> {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
unsafe { slice::from_raw_parts_mut(self.ptr.as_ptr(), self.len) }
}
}
impl<T> From<Box<[T]>> for OwnedSlice<T> {
#[inline]
fn from(mut b: Box<[T]>) -> Self {
let len = b.len();
let ptr = unsafe { NonNull::new_unchecked(b.as_mut_ptr()) };
mem::forget(b);
Self {
len,
ptr,
_phantom: PhantomData,
}
}
}
impl<T> From<Vec<T>> for OwnedSlice<T> {
#[inline]
fn from(b: Vec<T>) -> Self {
Self::from(b.into_boxed_slice())
}
}
impl<T: Sized> MallocShallowSizeOf for OwnedSlice<T> {
fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
unsafe { ops.malloc_size_of(self.ptr.as_ptr()) }
}
}
impl<T: MallocSizeOf + Sized> MallocSizeOf for OwnedSlice<T> {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
self.shallow_size_of(ops) + (**self).size_of(ops)
}
}
impl<T: ToShmem + Sized> ToShmem for OwnedSlice<T> {
fn to_shmem(&self, builder: &mut SharedMemoryBuilder) -> mem::ManuallyDrop<Self> {
unsafe {
let dest = to_shmem::to_shmem_slice(self.iter(), builder);
mem::ManuallyDrop::new(Self::from(Box::from_raw(dest)))
}
}
}
impl<T> iter::FromIterator<T> for OwnedSlice<T> {
#[inline]
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
Vec::from_iter(iter).into()
}
} | #![allow(unsafe_code)]
//! A replacement for `Box<[T]>` that cbindgen can understand.
| random_line_split |
owned_slice.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
#![allow(unsafe_code)]
//! A replacement for `Box<[T]>` that cbindgen can understand.
use malloc_size_of::{MallocShallowSizeOf, MallocSizeOf, MallocSizeOfOps};
use std::marker::PhantomData;
use std::ops::{Deref, DerefMut};
use std::ptr::NonNull;
use std::{fmt, iter, mem, slice};
use to_shmem::{SharedMemoryBuilder, ToShmem};
/// A struct that basically replaces a `Box<[T]>`, but which cbindgen can
/// understand.
///
/// We could rely on the struct layout of `Box<[T]>` per:
///
/// https://github.com/rust-lang/unsafe-code-guidelines/blob/master/reference/src/layout/pointers.md
///
/// But handling fat pointers with cbindgen both in structs and argument
/// positions more generally is a bit tricky.
///
/// cbindgen:derive-eq=false
/// cbindgen:derive-neq=false
#[repr(C)]
pub struct OwnedSlice<T: Sized> {
ptr: NonNull<T>,
len: usize,
_phantom: PhantomData<T>,
}
impl<T: Sized> Default for OwnedSlice<T> {
#[inline]
fn default() -> Self {
Self {
len: 0,
ptr: NonNull::dangling(),
_phantom: PhantomData,
}
}
}
impl<T: Sized> Drop for OwnedSlice<T> {
#[inline]
fn drop(&mut self) {
if self.len != 0 |
}
}
unsafe impl<T: Sized + Send> Send for OwnedSlice<T> {}
unsafe impl<T: Sized + Sync> Sync for OwnedSlice<T> {}
impl<T: Clone> Clone for OwnedSlice<T> {
#[inline]
fn clone(&self) -> Self {
Self::from_slice(&**self)
}
}
impl<T: fmt::Debug> fmt::Debug for OwnedSlice<T> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
self.deref().fmt(formatter)
}
}
impl<T: PartialEq> PartialEq for OwnedSlice<T> {
fn eq(&self, other: &Self) -> bool {
self.deref().eq(other.deref())
}
}
impl<T: Eq> Eq for OwnedSlice<T> {}
impl<T: Sized> OwnedSlice<T> {
/// Convert the OwnedSlice into a boxed slice.
#[inline]
pub fn into_box(self) -> Box<[T]> {
self.into_vec().into_boxed_slice()
}
/// Convert the OwnedSlice into a Vec.
#[inline]
pub fn into_vec(self) -> Vec<T> {
let ret = unsafe { Vec::from_raw_parts(self.ptr.as_ptr(), self.len, self.len) };
mem::forget(self);
ret
}
/// Iterate over all the elements in the slice taking ownership of them.
#[inline]
pub fn into_iter(self) -> impl Iterator<Item = T> + ExactSizeIterator {
self.into_vec().into_iter()
}
/// Convert the regular slice into an owned slice.
#[inline]
pub fn from_slice(s: &[T]) -> Self
where
T: Clone,
{
Self::from(s.to_vec())
}
}
impl<T> Deref for OwnedSlice<T> {
type Target = [T];
#[inline(always)]
fn deref(&self) -> &Self::Target {
unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.len) }
}
}
impl<T> DerefMut for OwnedSlice<T> {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
unsafe { slice::from_raw_parts_mut(self.ptr.as_ptr(), self.len) }
}
}
impl<T> From<Box<[T]>> for OwnedSlice<T> {
#[inline]
fn from(mut b: Box<[T]>) -> Self {
let len = b.len();
let ptr = unsafe { NonNull::new_unchecked(b.as_mut_ptr()) };
mem::forget(b);
Self {
len,
ptr,
_phantom: PhantomData,
}
}
}
impl<T> From<Vec<T>> for OwnedSlice<T> {
#[inline]
fn from(b: Vec<T>) -> Self {
Self::from(b.into_boxed_slice())
}
}
impl<T: Sized> MallocShallowSizeOf for OwnedSlice<T> {
fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
unsafe { ops.malloc_size_of(self.ptr.as_ptr()) }
}
}
impl<T: MallocSizeOf + Sized> MallocSizeOf for OwnedSlice<T> {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
self.shallow_size_of(ops) + (**self).size_of(ops)
}
}
impl<T: ToShmem + Sized> ToShmem for OwnedSlice<T> {
fn to_shmem(&self, builder: &mut SharedMemoryBuilder) -> mem::ManuallyDrop<Self> {
unsafe {
let dest = to_shmem::to_shmem_slice(self.iter(), builder);
mem::ManuallyDrop::new(Self::from(Box::from_raw(dest)))
}
}
}
impl<T> iter::FromIterator<T> for OwnedSlice<T> {
#[inline]
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
Vec::from_iter(iter).into()
}
}
| {
let _ = mem::replace(self, Self::default()).into_vec();
} | conditional_block |
lib.rs | ();
for entry in reader.find_array("disassembly") {
let address = entry.find_u64("address").ok().unwrap();
let line = entry.find_string("line").ok().unwrap();
let mut regs_read = String::new();
let mut regs_write = String::new();
entry.find_string("registers_read").map(|regs| {
regs_read = regs.to_owned();
}).ok();
entry.find_string("registers_write").map(|regs| {
regs_write = regs.to_owned();
}).ok();
self.lines.push(Line {
opcode: line.to_owned(),
regs_read: regs_read,
regs_write: regs_write,
address: address,
});
}
}
///
/// Calculate how many visible lines we have
///
fn get_visible_lines_count(ui: &Ui) -> usize {
let (_, height) = ui.get_window_size();
let text_height = ui.get_text_line_height_with_spacing();
// - 1.0 for title text. Would be better to get the cursor pos here instead
let visible_lines = (height / text_height) - 1.0;
// + 0.5 to round up
(visible_lines + 0.5) as usize
}
fn request_disassembly(&mut self, ui: &mut Ui, location: u64, writer: &mut Writer) {
let visible_lines = Self::get_visible_lines_count(ui) as u64;
// check if we have the the location within all lines, then we don't need to request more
for line in &self.lines {
if line.address == location {
return;
}
}
self.reset_to_center = true;
writer.event_begin(EVENT_GET_DISASSEMBLY as u16);
writer.write_u64("address_start", location - (visible_lines * 4));
writer.write_u32("instruction_count", (visible_lines * 4) as u32);
writer.event_end();
println!("requsted {}", visible_lines * 10);
}
fn color_text_reg_selection(ui: &Ui, regs_use: &Vec<&str>, line: &Line, text_height: f32) {
let (cx, cy) = ui.get_cursor_screen_pos();
let mut color_index = 0;
// TODO: Allocs memory, fix
let line_text = format!(" 0x{:x} {}", line.address, line.opcode);
let colors = [
0x00b27474,
0x00b28050,
0x00a9b250,
0x0060b250,
0x004fb292,
0x004f71b2,
0x008850b2,
0x00b25091,
];
//let mut color_index = 0;
//let font_size = 14.0;
// TODO: Offset here is hardcoded with given hex size, this should be fixed
//let start_offset = font_size * 11.0; // 0x00000000 "
for reg in regs_use {
let color = colors[color_index & 7];
line_text.find(reg).map(|offset| {
let (tx, _) = ui.calc_text_size(&line_text, offset);
ui.fill_rect(cx + tx, cy, 22.0, text_height, Color::from_au32(200, color));
});
color_index += 1;
}
ui.text(&line_text);
}
fn toggle_breakpoint(&mut self, writer: &mut Writer) {
let address = self.cursor;
for i in (0..self.breakpoints.len()).rev() {
if self.breakpoints[i].address == address {
writer.event_begin(EVENT_DELETE_BREAKPOINT as u16);
writer.write_u64("address", address);
writer.event_end();
self.breakpoints.swap_remove(i);
return;
}
}
writer.event_begin(EVENT_SET_BREAKPOINT as u16);
writer.write_u64("address", address);
writer.event_end();
println!("adding breakpoint");
// TODO: We shouldn't really add the breakpoint here but wait for reply
// from the backend that we actually managed to set the breakpoint.
// +bonus would be a "progress" icon here instead that it's being set.
self.breakpoints.push(Breakpoint { address: address } );
}
fn has_breakpoint(&self, address: u64) -> bool {
self.breakpoints.iter().find(|bp| bp.address == address).is_some()
}
/*
fn set_cursor_at(&mut self, pos: i32) {
// if we don'n have enough lines we need to fetch more
if pos < 0 {
return;
}
// need to fetch more data here also
if pos > self.lines.len() as i32 {
return;
}
self.cursor = self.lines[pos as usize].address;
}
*/
fn scroll_cursor(&mut self, steps: i32) {
for (i, line) in self.lines.iter().enumerate() {
if line.address == self.cursor {
let pos = (i as i32) + steps;
// if we don'n have enough lines we need to fetch more
if pos < 0 |
// need to fetch more data here also
if pos >= self.lines.len() as i32 {
return;
}
self.cursor = self.lines[pos as usize].address;
return;
}
}
}
fn render_arrow(ui: &Ui, pos_x: f32, pos_y: f32, scale: f32) {
let color = Color::from_argb(255, 0, 180, 180);
ui.fill_rect(pos_x + (0.0 * scale),
pos_y + (0.25 * scale),
0.5 * scale,
0.5 * scale,
color);
// Wasn't able to get this to work with just one convex poly fill
let arrow: [Vec2; 3] = [
Vec2::new((0.50 * scale) + pos_x, (0.00 * scale) + pos_y),
Vec2::new((1.00 * scale) + pos_x, (0.50 * scale) + pos_y),
Vec2::new((0.50 * scale) + pos_x, (1.00 * scale) + pos_y),
];
ui.fill_convex_poly(&arrow, color, true);
}
fn render_ui(&mut self, ui: &mut Ui) {
if self.lines.len() == 0 {
return;
}
let (size_x, size_h) = ui.get_window_size();
let text_height = ui.get_text_line_height_with_spacing();
let mut regs = String::new();
let mut regs_pc_use = Vec::new();
//let font_size = ui.get_font_size();
// find registerss for pc
for line in &self.lines {
if line.address == self.exception_location {
if line.regs_read.len() > 1 || line.regs_write.len() > 1 {
if line.regs_read.len() > 0 {
regs.push_str(&line.regs_read);
}
if line.regs_write.len() > 0 {
regs.push(' ');
regs.push_str(&line.regs_write);
}
let t = regs.trim_left();
regs_pc_use = t.split(' ').collect();
break;
}
}
}
for line in &self.lines {
let (cx, cy) = ui.get_cursor_screen_pos();
let bp_radius = self.breakpoint_radius;
if line.address == self.cursor {
if (cy - text_height) < 0.0 {
if self.reset_to_center || self.has_made_step {
ui.set_scroll_here(0.5);
self.reset_to_center = false;
} else {
ui.set_scroll_from_pos_y(cy - text_height, 0.0);
}
}
if cy > (size_h - text_height) {
if self.reset_to_center || self.has_made_step {
ui.set_scroll_here(0.5);
self.reset_to_center = false;
} else {
ui.set_scroll_from_pos_y(cy + text_height, 1.0);
}
}
ui.fill_rect(cx, cy, size_x, text_height, Color::from_argb(200, 0, 0, 127));
}
if regs_pc_use.len() > 0 {
Self::color_text_reg_selection(ui, ®s_pc_use, &line, text_height);
} else {
ui.text_fmt(format_args!(" 0x{:x} {}", line.address, line.opcode));
}
if self.has_breakpoint(line.address) {
ui.fill_circle(&Vec2{ x: cx + self.breakpoint_spacing + bp_radius, y: cy + bp_radius + 2.0},
bp_radius, Color::from_argb(255,0,0,140), 12 | {
return;
} | conditional_block |
lib.rs | ();
for entry in reader.find_array("disassembly") {
let address = entry.find_u64("address").ok().unwrap();
let line = entry.find_string("line").ok().unwrap();
let mut regs_read = String::new();
let mut regs_write = String::new();
entry.find_string("registers_read").map(|regs| {
regs_read = regs.to_owned();
}).ok();
entry.find_string("registers_write").map(|regs| {
regs_write = regs.to_owned();
}).ok();
self.lines.push(Line {
opcode: line.to_owned(),
regs_read: regs_read,
regs_write: regs_write,
address: address,
});
}
}
///
/// Calculate how many visible lines we have
///
fn get_visible_lines_count(ui: &Ui) -> usize {
let (_, height) = ui.get_window_size();
let text_height = ui.get_text_line_height_with_spacing();
// - 1.0 for title text. Would be better to get the cursor pos here instead
let visible_lines = (height / text_height) - 1.0;
// + 0.5 to round up
(visible_lines + 0.5) as usize
}
fn request_disassembly(&mut self, ui: &mut Ui, location: u64, writer: &mut Writer) {
let visible_lines = Self::get_visible_lines_count(ui) as u64;
// check if we have the the location within all lines, then we don't need to request more
for line in &self.lines {
if line.address == location {
return;
}
}
self.reset_to_center = true;
writer.event_begin(EVENT_GET_DISASSEMBLY as u16);
writer.write_u64("address_start", location - (visible_lines * 4));
writer.write_u32("instruction_count", (visible_lines * 4) as u32);
writer.event_end();
println!("requsted {}", visible_lines * 10);
}
fn color_text_reg_selection(ui: &Ui, regs_use: &Vec<&str>, line: &Line, text_height: f32) {
let (cx, cy) = ui.get_cursor_screen_pos();
let mut color_index = 0;
// TODO: Allocs memory, fix
let line_text = format!(" 0x{:x} {}", line.address, line.opcode);
let colors = [
0x00b27474,
0x00b28050,
0x00a9b250,
0x0060b250,
0x004fb292,
0x004f71b2,
0x008850b2,
0x00b25091,
];
//let mut color_index = 0;
//let font_size = 14.0;
// TODO: Offset here is hardcoded with given hex size, this should be fixed
//let start_offset = font_size * 11.0; // 0x00000000 "
for reg in regs_use {
let color = colors[color_index & 7];
line_text.find(reg).map(|offset| {
let (tx, _) = ui.calc_text_size(&line_text, offset);
ui.fill_rect(cx + tx, cy, 22.0, text_height, Color::from_au32(200, color));
});
color_index += 1;
}
ui.text(&line_text);
}
fn | (&mut self, writer: &mut Writer) {
let address = self.cursor;
for i in (0..self.breakpoints.len()).rev() {
if self.breakpoints[i].address == address {
writer.event_begin(EVENT_DELETE_BREAKPOINT as u16);
writer.write_u64("address", address);
writer.event_end();
self.breakpoints.swap_remove(i);
return;
}
}
writer.event_begin(EVENT_SET_BREAKPOINT as u16);
writer.write_u64("address", address);
writer.event_end();
println!("adding breakpoint");
// TODO: We shouldn't really add the breakpoint here but wait for reply
// from the backend that we actually managed to set the breakpoint.
// +bonus would be a "progress" icon here instead that it's being set.
self.breakpoints.push(Breakpoint { address: address } );
}
fn has_breakpoint(&self, address: u64) -> bool {
self.breakpoints.iter().find(|bp| bp.address == address).is_some()
}
/*
fn set_cursor_at(&mut self, pos: i32) {
// if we don'n have enough lines we need to fetch more
if pos < 0 {
return;
}
// need to fetch more data here also
if pos > self.lines.len() as i32 {
return;
}
self.cursor = self.lines[pos as usize].address;
}
*/
fn scroll_cursor(&mut self, steps: i32) {
for (i, line) in self.lines.iter().enumerate() {
if line.address == self.cursor {
let pos = (i as i32) + steps;
// if we don'n have enough lines we need to fetch more
if pos < 0 {
return;
}
// need to fetch more data here also
if pos >= self.lines.len() as i32 {
return;
}
self.cursor = self.lines[pos as usize].address;
return;
}
}
}
fn render_arrow(ui: &Ui, pos_x: f32, pos_y: f32, scale: f32) {
let color = Color::from_argb(255, 0, 180, 180);
ui.fill_rect(pos_x + (0.0 * scale),
pos_y + (0.25 * scale),
0.5 * scale,
0.5 * scale,
color);
// Wasn't able to get this to work with just one convex poly fill
let arrow: [Vec2; 3] = [
Vec2::new((0.50 * scale) + pos_x, (0.00 * scale) + pos_y),
Vec2::new((1.00 * scale) + pos_x, (0.50 * scale) + pos_y),
Vec2::new((0.50 * scale) + pos_x, (1.00 * scale) + pos_y),
];
ui.fill_convex_poly(&arrow, color, true);
}
fn render_ui(&mut self, ui: &mut Ui) {
if self.lines.len() == 0 {
return;
}
let (size_x, size_h) = ui.get_window_size();
let text_height = ui.get_text_line_height_with_spacing();
let mut regs = String::new();
let mut regs_pc_use = Vec::new();
//let font_size = ui.get_font_size();
// find registerss for pc
for line in &self.lines {
if line.address == self.exception_location {
if line.regs_read.len() > 1 || line.regs_write.len() > 1 {
if line.regs_read.len() > 0 {
regs.push_str(&line.regs_read);
}
if line.regs_write.len() > 0 {
regs.push(' ');
regs.push_str(&line.regs_write);
}
let t = regs.trim_left();
regs_pc_use = t.split(' ').collect();
break;
}
}
}
for line in &self.lines {
let (cx, cy) = ui.get_cursor_screen_pos();
let bp_radius = self.breakpoint_radius;
if line.address == self.cursor {
if (cy - text_height) < 0.0 {
if self.reset_to_center || self.has_made_step {
ui.set_scroll_here(0.5);
self.reset_to_center = false;
} else {
ui.set_scroll_from_pos_y(cy - text_height, 0.0);
}
}
if cy > (size_h - text_height) {
if self.reset_to_center || self.has_made_step {
ui.set_scroll_here(0.5);
self.reset_to_center = false;
} else {
ui.set_scroll_from_pos_y(cy + text_height, 1.0);
}
}
ui.fill_rect(cx, cy, size_x, text_height, Color::from_argb(200, 0, 0, 127));
}
if regs_pc_use.len() > 0 {
Self::color_text_reg_selection(ui, ®s_pc_use, &line, text_height);
} else {
ui.text_fmt(format_args!(" 0x{:x} {}", line.address, line.opcode));
}
if self.has_breakpoint(line.address) {
ui.fill_circle(&Vec2{ x: cx + self.breakpoint_spacing + bp_radius, y: cy + bp_radius + 2.0},
bp_radius, Color::from_argb(255,0,0,140), 12, | toggle_breakpoint | identifier_name |
lib.rs | opcode: String,
regs_write: String,
regs_read: String,
address: u64,
}
///
/// Breakpoint
///
struct Breakpoint {
address: u64,
}
///
/// Holds colors use for the disassembly view. This should be possible to configure later on.
///
/*
struct Colors {
breakpoint: Color,
step_cursor: Color,
cursor: Color,
address: Color,
_bytes: Color,
}
*/
struct DisassemblyView {
exception_location: u64,
has_made_step: bool,
cursor: u64,
breakpoint_radius: f32,
breakpoint_spacing: f32,
address_size: u8,
reset_to_center: bool,
lines: Vec<Line>,
breakpoints: Vec<Breakpoint>,
}
impl DisassemblyView {
fn set_disassembly(&mut self, reader: &mut Reader) {
self.lines.clear();
for entry in reader.find_array("disassembly") {
let address = entry.find_u64("address").ok().unwrap();
let line = entry.find_string("line").ok().unwrap();
let mut regs_read = String::new();
let mut regs_write = String::new();
entry.find_string("registers_read").map(|regs| {
regs_read = regs.to_owned();
}).ok();
entry.find_string("registers_write").map(|regs| {
regs_write = regs.to_owned();
}).ok();
self.lines.push(Line {
opcode: line.to_owned(),
regs_read: regs_read,
regs_write: regs_write,
address: address,
});
}
}
///
/// Calculate how many visible lines we have
///
fn get_visible_lines_count(ui: &Ui) -> usize {
let (_, height) = ui.get_window_size();
let text_height = ui.get_text_line_height_with_spacing();
// - 1.0 for title text. Would be better to get the cursor pos here instead
let visible_lines = (height / text_height) - 1.0;
// + 0.5 to round up
(visible_lines + 0.5) as usize
}
fn request_disassembly(&mut self, ui: &mut Ui, location: u64, writer: &mut Writer) {
let visible_lines = Self::get_visible_lines_count(ui) as u64;
// check if we have the the location within all lines, then we don't need to request more
for line in &self.lines {
if line.address == location {
return;
}
}
self.reset_to_center = true;
writer.event_begin(EVENT_GET_DISASSEMBLY as u16);
writer.write_u64("address_start", location - (visible_lines * 4));
writer.write_u32("instruction_count", (visible_lines * 4) as u32);
writer.event_end();
println!("requsted {}", visible_lines * 10);
}
fn color_text_reg_selection(ui: &Ui, regs_use: &Vec<&str>, line: &Line, text_height: f32) {
let (cx, cy) = ui.get_cursor_screen_pos();
let mut color_index = 0;
// TODO: Allocs memory, fix
let line_text = format!(" 0x{:x} {}", line.address, line.opcode);
let colors = [
0x00b27474,
0x00b28050,
0x00a9b250,
0x0060b250,
0x004fb292,
0x004f71b2,
0x008850b2,
0x00b25091,
];
//let mut color_index = 0;
//let font_size = 14.0;
// TODO: Offset here is hardcoded with given hex size, this should be fixed
//let start_offset = font_size * 11.0; // 0x00000000 "
for reg in regs_use {
let color = colors[color_index & 7];
line_text.find(reg).map(|offset| {
let (tx, _) = ui.calc_text_size(&line_text, offset);
ui.fill_rect(cx + tx, cy, 22.0, text_height, Color::from_au32(200, color));
});
color_index += 1;
}
ui.text(&line_text);
}
fn toggle_breakpoint(&mut self, writer: &mut Writer) {
let address = self.cursor;
for i in (0..self.breakpoints.len()).rev() {
if self.breakpoints[i].address == address {
writer.event_begin(EVENT_DELETE_BREAKPOINT as u16);
writer.write_u64("address", address);
writer.event_end();
self.breakpoints.swap_remove(i);
return;
}
}
writer.event_begin(EVENT_SET_BREAKPOINT as u16);
writer.write_u64("address", address);
writer.event_end();
println!("adding breakpoint");
// TODO: We shouldn't really add the breakpoint here but wait for reply
// from the backend that we actually managed to set the breakpoint.
// +bonus would be a "progress" icon here instead that it's being set.
self.breakpoints.push(Breakpoint { address: address } );
}
fn has_breakpoint(&self, address: u64) -> bool {
self.breakpoints.iter().find(|bp| bp.address == address).is_some()
}
/*
fn set_cursor_at(&mut self, pos: i32) {
// if we don'n have enough lines we need to fetch more
if pos < 0 {
return;
}
// need to fetch more data here also
if pos > self.lines.len() as i32 {
return;
}
self.cursor = self.lines[pos as usize].address;
}
*/
fn scroll_cursor(&mut self, steps: i32) {
for (i, line) in self.lines.iter().enumerate() {
if line.address == self.cursor {
let pos = (i as i32) + steps;
// if we don'n have enough lines we need to fetch more
if pos < 0 {
return;
}
// need to fetch more data here also
if pos >= self.lines.len() as i32 {
return;
}
self.cursor = self.lines[pos as usize].address;
return;
}
}
}
fn render_arrow(ui: &Ui, pos_x: f32, pos_y: f32, scale: f32) {
let color = Color::from_argb(255, 0, 180, 180);
ui.fill_rect(pos_x + (0.0 * scale),
pos_y + (0.25 * scale),
0.5 * scale,
0.5 * scale,
color);
// Wasn't able to get this to work with just one convex poly fill
let arrow: [Vec2; 3] = [
Vec2::new((0.50 * scale) + pos_x, (0.00 * scale) + pos_y),
Vec2::new((1.00 * scale) + pos_x, (0.50 * scale) + pos_y),
Vec2::new((0.50 * scale) + pos_x, (1.00 * scale) + pos_y),
];
ui.fill_convex_poly(&arrow, color, true);
}
fn render_ui(&mut self, ui: &mut Ui) {
if self.lines.len() == 0 {
return;
}
let (size_x, size_h) = ui.get_window_size();
let text_height = ui.get_text_line_height_with_spacing();
let mut regs = String::new();
let mut regs_pc_use = Vec::new();
//let font_size = ui.get_font_size();
// find registerss for pc
for line in &self.lines {
if line.address == self.exception_location {
if line.regs_read.len() > 1 || line.regs_write.len() > 1 {
if line.regs_read.len() > 0 {
regs.push_str(&line.regs_read);
}
if line.regs_write.len() > 0 {
regs.push(' ');
regs.push_str(&line.regs_write);
}
let t = regs.trim_left();
regs_pc_use = t.split(' ').collect();
break;
}
}
}
for line in &self.lines {
let (cx, cy) = ui.get_cursor_screen_pos();
let bp_radius = self.breakpoint_radius;
if line.address == self.cursor {
if (cy - text_height) < 0.0 {
if self.reset_to_center || self.has_made_step {
ui.set_scroll_here(0.5);
self.reset_to_center = false;
} else {
ui.set_scroll_from_pos_y(cy - text_height, 0.0);
}
}
if cy > (size_h - text_height) {
if self.reset_to_center || self.has_made_step {
ui.set_scroll_here | use prodbg_api::*;
struct Line { | random_line_split |
|
lib.rs | Allocs memory, fix
let line_text = format!(" 0x{:x} {}", line.address, line.opcode);
let colors = [
0x00b27474,
0x00b28050,
0x00a9b250,
0x0060b250,
0x004fb292,
0x004f71b2,
0x008850b2,
0x00b25091,
];
//let mut color_index = 0;
//let font_size = 14.0;
// TODO: Offset here is hardcoded with given hex size, this should be fixed
//let start_offset = font_size * 11.0; // 0x00000000 "
for reg in regs_use {
let color = colors[color_index & 7];
line_text.find(reg).map(|offset| {
let (tx, _) = ui.calc_text_size(&line_text, offset);
ui.fill_rect(cx + tx, cy, 22.0, text_height, Color::from_au32(200, color));
});
color_index += 1;
}
ui.text(&line_text);
}
fn toggle_breakpoint(&mut self, writer: &mut Writer) {
let address = self.cursor;
for i in (0..self.breakpoints.len()).rev() {
if self.breakpoints[i].address == address {
writer.event_begin(EVENT_DELETE_BREAKPOINT as u16);
writer.write_u64("address", address);
writer.event_end();
self.breakpoints.swap_remove(i);
return;
}
}
writer.event_begin(EVENT_SET_BREAKPOINT as u16);
writer.write_u64("address", address);
writer.event_end();
println!("adding breakpoint");
// TODO: We shouldn't really add the breakpoint here but wait for reply
// from the backend that we actually managed to set the breakpoint.
// +bonus would be a "progress" icon here instead that it's being set.
self.breakpoints.push(Breakpoint { address: address } );
}
fn has_breakpoint(&self, address: u64) -> bool {
self.breakpoints.iter().find(|bp| bp.address == address).is_some()
}
/*
fn set_cursor_at(&mut self, pos: i32) {
// if we don'n have enough lines we need to fetch more
if pos < 0 {
return;
}
// need to fetch more data here also
if pos > self.lines.len() as i32 {
return;
}
self.cursor = self.lines[pos as usize].address;
}
*/
fn scroll_cursor(&mut self, steps: i32) {
for (i, line) in self.lines.iter().enumerate() {
if line.address == self.cursor {
let pos = (i as i32) + steps;
// if we don'n have enough lines we need to fetch more
if pos < 0 {
return;
}
// need to fetch more data here also
if pos >= self.lines.len() as i32 {
return;
}
self.cursor = self.lines[pos as usize].address;
return;
}
}
}
fn render_arrow(ui: &Ui, pos_x: f32, pos_y: f32, scale: f32) {
let color = Color::from_argb(255, 0, 180, 180);
ui.fill_rect(pos_x + (0.0 * scale),
pos_y + (0.25 * scale),
0.5 * scale,
0.5 * scale,
color);
// Wasn't able to get this to work with just one convex poly fill
let arrow: [Vec2; 3] = [
Vec2::new((0.50 * scale) + pos_x, (0.00 * scale) + pos_y),
Vec2::new((1.00 * scale) + pos_x, (0.50 * scale) + pos_y),
Vec2::new((0.50 * scale) + pos_x, (1.00 * scale) + pos_y),
];
ui.fill_convex_poly(&arrow, color, true);
}
fn render_ui(&mut self, ui: &mut Ui) {
if self.lines.len() == 0 {
return;
}
let (size_x, size_h) = ui.get_window_size();
let text_height = ui.get_text_line_height_with_spacing();
let mut regs = String::new();
let mut regs_pc_use = Vec::new();
//let font_size = ui.get_font_size();
// find registerss for pc
for line in &self.lines {
if line.address == self.exception_location {
if line.regs_read.len() > 1 || line.regs_write.len() > 1 {
if line.regs_read.len() > 0 {
regs.push_str(&line.regs_read);
}
if line.regs_write.len() > 0 {
regs.push(' ');
regs.push_str(&line.regs_write);
}
let t = regs.trim_left();
regs_pc_use = t.split(' ').collect();
break;
}
}
}
for line in &self.lines {
let (cx, cy) = ui.get_cursor_screen_pos();
let bp_radius = self.breakpoint_radius;
if line.address == self.cursor {
if (cy - text_height) < 0.0 {
if self.reset_to_center || self.has_made_step {
ui.set_scroll_here(0.5);
self.reset_to_center = false;
} else {
ui.set_scroll_from_pos_y(cy - text_height, 0.0);
}
}
if cy > (size_h - text_height) {
if self.reset_to_center || self.has_made_step {
ui.set_scroll_here(0.5);
self.reset_to_center = false;
} else {
ui.set_scroll_from_pos_y(cy + text_height, 1.0);
}
}
ui.fill_rect(cx, cy, size_x, text_height, Color::from_argb(200, 0, 0, 127));
}
if regs_pc_use.len() > 0 {
Self::color_text_reg_selection(ui, ®s_pc_use, &line, text_height);
} else {
ui.text_fmt(format_args!(" 0x{:x} {}", line.address, line.opcode));
}
if self.has_breakpoint(line.address) {
ui.fill_circle(&Vec2{ x: cx + self.breakpoint_spacing + bp_radius, y: cy + bp_radius + 2.0},
bp_radius, Color::from_argb(255,0,0,140), 12, false);
}
//println!("draw arrow {} {}", line.address, self.exception_location);
if line.address == self.exception_location {
Self::render_arrow(ui, cx, cy + 2.0, self.breakpoint_radius * 2.0);
}
}
self.has_made_step = false;
}
}
impl View for DisassemblyView {
fn new(_: &Ui, _: &Service) -> Self {
DisassemblyView {
exception_location: u64::max_value(),
cursor: 0xe003, //u64::max_value(),
breakpoint_radius: 10.0,
breakpoint_spacing: 6.0,
address_size: 4,
has_made_step: false,
lines: Vec::new(),
breakpoints: Vec::new(),
reset_to_center: false,
}
}
fn update(&mut self, ui: &mut Ui, reader: &mut Reader, writer: &mut Writer) {
for event in reader.get_events() {
match event {
EVENT_SET_EXCEPTION_LOCATION => {
let location = reader.find_u64("address").ok().unwrap();
reader.find_u8("address_size").ok().map(|adress_size| {
self.address_size = adress_size;
});
if self.exception_location != location {
self.request_disassembly(ui, location, writer);
self.has_made_step = true;
self.exception_location = location;
self.cursor = location;
}
}
EVENT_SET_DISASSEMBLY => {
self.set_disassembly(reader);
}
_ => (),
}
}
if ui.is_key_down(Key::F9) {
self.toggle_breakpoint(writer);
}
if ui.is_key_down(Key::Down) {
self.scroll_cursor(1);
}
if ui.is_key_down(Key::Up) {
self.scroll_cursor(-1);
}
if ui.is_key_down(Key::PageDown) {
self.scroll_cursor(8);
}
if ui.is_key_down(Key::PageUp) {
self.scroll_cursor(-8);
}
self.render_ui(ui);
}
}
#[no_mangle]
pub fn init_plugin(plugin_handler: &mut PluginHandler) | {
define_view_plugin!(PLUGIN, b"Disassembly2 View", DisassemblyView);
plugin_handler.register_view(&PLUGIN);
} | identifier_body |
|
yhAlert.js | /*
* @author paper
*/
function YHAlert(arg) | frag.appendChild(temp.firstChild);
doingCallback(frag);
setTimeout(arguments.callee, 0);
} else {
if (endCallback) endCallback(frag);
}
})();
},
ie = (function(){
var undef,
v = 3,
div = document.createElement('div'),
all = div.getElementsByTagName('i');
while (
div.innerHTML = '<!--[if gt IE ' + (++v) + ']><i></i><![endif]-->',
all[0]
);
return v > 4 ? v : undef;
}()),
remove = function(elem){
if (elem.parentNode) elem.parentNode.removeChild(elem);
},
getArea = function(){
return {
height: document.documentElement.clientHeight,
width: document.documentElement.clientWidth
}
},
getMax = function(){
var dd = document.documentElement;
return {
height: Math.max(dd.scrollHeight, dd.clientHeight),
width: Math.max(dd.scrollWidth, dd.clientWidth)
}
},
getScroll = function(){
return {
top: Math.max(document.documentElement.scrollTop, document.body.scrollTop),
left: Math.max(document.documentElement.scrollLeft, document.body.scrollLeft)
}
},
setStyle = function(elem, styles){
for (var i in styles) {
elem.style[i] = styles[i];
}
},
setOpacity = function(elem, level){
elem.filters ? elem.style.filter = 'alpha(opacity=' + level + ')' : elem.style.opacity = level / 100;
},
fIn = function(elem, callback){
var step = 3;
setOpacity(elem, 0);
elem.style.visibility = 'visible';
elem.style[way] = parseInt(elem.style[way]) - 100 / step + 'px';
var opacity = 0, t = setInterval(function(){
setOpacity(elem, opacity);
if (opacity >= 100) {
setOpacity(elem, 100);
clearInterval(t);
if (callback) callback.call(elem);
}
opacity += step;
elem.style[way] = parseInt(elem.style[way]) + 1 + 'px';
}, 1);
},
fOut = function(elem, callback){
elem.style.visibility = 'visible';
var step = 3, opacity = 100, t = setInterval(function(){
setOpacity(elem, opacity);
if (opacity <= 0) {
setOpacity(elem, 0);
elem.style.visibility = 'hidden';
clearInterval(t);
if (callback) callback.call(elem);
}
opacity -= step;
elem.style[way] = parseInt(elem.style[way]) + 1 + 'px';
}, 1);
};
(function(){
while ($('YHAlert')) {
remove($('YHAlert'));
};
asynInnerHTML(html, function(f){
bd.appendChild(f);
}, function(){
var YHAlert = $('YHAlert'),
YHAlert_in = $('YHAlert_in'),
YHAlert_p = $('YHAlert_p'),
w = getArea().width,
h = getArea().height,
st = getScroll().top,
YHAlert_height = parseInt(YHAlert.offsetHeight),
pos=ie==6?'absolute':'fixed',
t=ie==6?parseInt(st + h / 2 - YHAlert_height * 6):parseInt(h / 2 - YHAlert_height * 6);
setStyle(YHAlert, {
'borderRadius': '5px',
'MozBorderRadius': '5px',
'WebkitBorderRadius': '5px',
'boxShadow': '0 0 3px #ccc',
'MozBoxShadow': '0 0 3px #ccc',
'WebkitBoxShadow': '0 0 3px #ccc',
'left': parseInt(w / 2 - 80) + 'px',
'top': t+'px',
'position': pos,
'width': '160px',
'backgroundColor': '#F3F7FD',
'border': '1px solid #6ed3e3',
'zIndex': '99999'
});
setStyle(YHAlert_in, {
'borderRadius': '5px',
'MozBorderRadius': '5px',
'WebkitBorderRadius': '5px',
'backgroundColor':'#fefefe',
'padding':'15px 10px'
});
setStyle(YHAlert_p, {
'textAlign': 'left',
'fontSize': '14px',
'margin': '0',
'color': '#000',
'lineHeight': '140%'
});
fIn(YHAlert, function(){
setTimeout(function(){
fOut(YHAlert, function(){
remove(YHAlert);
});
}, time);
});
});
}());
};
| {
var obj = arg ||{},
msg = obj.msg || '你没有写提示内容!',
bd = document.getElementsByTagName('body')[0],
time = obj.time || 2000,
way = obj.way == 'leftToRight' ? 'left' : 'top',
$ = function(id){
return typeof id == 'string' ? document.getElementById(id) : id;
},
html = '<div id="YHAlert" style="visibility:hidden;"><div id="YHAlert_in">' +
'<p id="YHAlert_p">' +
msg +
'</p>' +
'</div></div>',
asynInnerHTML = function(HTML, doingCallback, endCallback){
var temp = document.createElement('div'),
frag = document.createDocumentFragment();
temp.innerHTML = HTML;
(function(){
if (temp.firstChild) { | identifier_body |
yhAlert.js | /*
* @author paper
*/
function YHAlert(arg){
var obj = arg ||{},
msg = obj.msg || '你没有写提示内容!',
bd = document.getElementsByTagName('body')[0],
time = obj.time || 2000,
way = obj.way == 'leftToRight' ? 'left' : 'top',
$ = function(id){
return typeof id == 'string' ? document.getElementById(id) : id;
},
html = '<div id="YHAlert" style="visibility:hidden;"><div id="YHAlert_in">' +
'<p id="YHAlert_p">' +
msg +
'</p>' +
'</div></div>',
asynInnerHTML = function(HTML, doingCallback, endCallback){
var temp = document.createElement('div'),
frag = document.createDocumentFragment();
temp.innerHTML = HTML;
(function(){
if (temp.firstChild) {
frag.appendChild(temp.firstChild);
doingCallback(frag);
setTimeout(arguments.callee, 0);
} else {
if (endCallback) endCallback(frag);
}
})();
},
ie = (function(){
var undef,
v = 3,
div = document.createElement('div'),
all = div.getElementsByTagName('i');
while (
div.innerHTML = '<!--[if gt IE ' + (++v) + ']><i></i><![endif]-->',
all[0]
);
return v > 4 ? v : undef;
}()),
remove = function(elem){
if (elem.parentNode) elem.parentNode.removeChild(elem);
},
getArea = function(){
return {
height: document.documentElement.clientHeight,
width: document.documentElement.clientWidth
}
},
getMax = function(){
var dd = document.documentElement;
return {
height: Math.max(dd.scrollHeight, dd.clientHeight),
width: Math.max(dd.scrollWidth, dd.clientWidth)
}
},
getScroll = function(){
return {
top: Math.max(document.documentElement.scrollTop, document.body.scrollTop),
left: Math.max(document.documentElement.scrollLeft, document.body.scrollLeft)
}
},
setStyle = function(elem, styles){
for (var i in styles) {
elem.style[i] = styles[i];
}
},
setOpacity = function(elem, level){
elem.filters ? elem.style.filter = 'alpha(opacity=' + level + ')' : elem.style.opacity = level / 100;
},
fIn = function(elem, callback){
var step = 3;
setOpacity(elem, 0);
elem.style.visibility = 'visible';
elem.style[way] = parseInt(elem.style[way]) - 100 / step + 'px';
var opacity = 0, t = setInterval(function(){
setOpacity(elem, opacity);
if (opacity >= 100) {
setOpacity(elem, 100);
clearInterval(t);
if (callback) callback.call(elem);
}
opacity += step;
elem.style[way] = parseInt(elem.style[way]) + 1 + 'px';
}, 1);
},
fOut = function(elem, callback){
elem.style.visibility = 'visible';
var step = 3, opacity = 100, t = setInterval(function(){
setOpacity(elem, opacity);
if (opacity <= 0) {
setOpacity(elem, 0);
elem.style.visibility = 'hidden';
clearInterval(t);
if (callback) callback.call(elem);
}
opacity -= step;
elem.style[way] = parseInt(elem.style[way]) + 1 + 'px';
}, 1);
};
(function(){
while ($('YHAlert')) {
remove($('YHAlert'));
};
asynInnerHTML(html, function(f){
bd.appendChild(f);
}, function(){
var YHAlert = $('YHAlert'),
YHAlert_in = $('YHAlert_in'),
YHAlert_p = $('YHAlert_p'),
w = getArea().width,
h = getArea().height,
st = getScroll().top,
YHAlert_height = parseInt(YHAlert.offsetHeight),
pos=ie==6?'absolute':'fixed',
t=ie==6?parseInt(st + h / 2 - YHAlert_height * 6):parseInt(h / 2 - YHAlert_height * 6);
setStyle(YHAlert, {
'borderRadius': '5px',
'MozBorderRadius': '5px',
'WebkitBorderRadius': '5px',
'boxShadow': '0 0 3px #ccc',
'MozBoxShadow': '0 0 3px #ccc',
'WebkitBoxShadow': '0 0 3px #ccc',
'left': parseInt(w / 2 - 80) + 'px',
'top': t+'px',
'position': pos,
'width': '160px',
'backgroundColor': '#F3F7FD',
'border': '1px solid #6ed3e3',
'zIndex': '99999'
}); |
setStyle(YHAlert_in, {
'borderRadius': '5px',
'MozBorderRadius': '5px',
'WebkitBorderRadius': '5px',
'backgroundColor':'#fefefe',
'padding':'15px 10px'
});
setStyle(YHAlert_p, {
'textAlign': 'left',
'fontSize': '14px',
'margin': '0',
'color': '#000',
'lineHeight': '140%'
});
fIn(YHAlert, function(){
setTimeout(function(){
fOut(YHAlert, function(){
remove(YHAlert);
});
}, time);
});
});
}());
}; | random_line_split |
|
yhAlert.js | /*
* @author paper
*/
function | (arg){
var obj = arg ||{},
msg = obj.msg || '你没有写提示内容!',
bd = document.getElementsByTagName('body')[0],
time = obj.time || 2000,
way = obj.way == 'leftToRight' ? 'left' : 'top',
$ = function(id){
return typeof id == 'string' ? document.getElementById(id) : id;
},
html = '<div id="YHAlert" style="visibility:hidden;"><div id="YHAlert_in">' +
'<p id="YHAlert_p">' +
msg +
'</p>' +
'</div></div>',
asynInnerHTML = function(HTML, doingCallback, endCallback){
var temp = document.createElement('div'),
frag = document.createDocumentFragment();
temp.innerHTML = HTML;
(function(){
if (temp.firstChild) {
frag.appendChild(temp.firstChild);
doingCallback(frag);
setTimeout(arguments.callee, 0);
} else {
if (endCallback) endCallback(frag);
}
})();
},
ie = (function(){
var undef,
v = 3,
div = document.createElement('div'),
all = div.getElementsByTagName('i');
while (
div.innerHTML = '<!--[if gt IE ' + (++v) + ']><i></i><![endif]-->',
all[0]
);
return v > 4 ? v : undef;
}()),
remove = function(elem){
if (elem.parentNode) elem.parentNode.removeChild(elem);
},
getArea = function(){
return {
height: document.documentElement.clientHeight,
width: document.documentElement.clientWidth
}
},
getMax = function(){
var dd = document.documentElement;
return {
height: Math.max(dd.scrollHeight, dd.clientHeight),
width: Math.max(dd.scrollWidth, dd.clientWidth)
}
},
getScroll = function(){
return {
top: Math.max(document.documentElement.scrollTop, document.body.scrollTop),
left: Math.max(document.documentElement.scrollLeft, document.body.scrollLeft)
}
},
setStyle = function(elem, styles){
for (var i in styles) {
elem.style[i] = styles[i];
}
},
setOpacity = function(elem, level){
elem.filters ? elem.style.filter = 'alpha(opacity=' + level + ')' : elem.style.opacity = level / 100;
},
fIn = function(elem, callback){
var step = 3;
setOpacity(elem, 0);
elem.style.visibility = 'visible';
elem.style[way] = parseInt(elem.style[way]) - 100 / step + 'px';
var opacity = 0, t = setInterval(function(){
setOpacity(elem, opacity);
if (opacity >= 100) {
setOpacity(elem, 100);
clearInterval(t);
if (callback) callback.call(elem);
}
opacity += step;
elem.style[way] = parseInt(elem.style[way]) + 1 + 'px';
}, 1);
},
fOut = function(elem, callback){
elem.style.visibility = 'visible';
var step = 3, opacity = 100, t = setInterval(function(){
setOpacity(elem, opacity);
if (opacity <= 0) {
setOpacity(elem, 0);
elem.style.visibility = 'hidden';
clearInterval(t);
if (callback) callback.call(elem);
}
opacity -= step;
elem.style[way] = parseInt(elem.style[way]) + 1 + 'px';
}, 1);
};
(function(){
while ($('YHAlert')) {
remove($('YHAlert'));
};
asynInnerHTML(html, function(f){
bd.appendChild(f);
}, function(){
var YHAlert = $('YHAlert'),
YHAlert_in = $('YHAlert_in'),
YHAlert_p = $('YHAlert_p'),
w = getArea().width,
h = getArea().height,
st = getScroll().top,
YHAlert_height = parseInt(YHAlert.offsetHeight),
pos=ie==6?'absolute':'fixed',
t=ie==6?parseInt(st + h / 2 - YHAlert_height * 6):parseInt(h / 2 - YHAlert_height * 6);
setStyle(YHAlert, {
'borderRadius': '5px',
'MozBorderRadius': '5px',
'WebkitBorderRadius': '5px',
'boxShadow': '0 0 3px #ccc',
'MozBoxShadow': '0 0 3px #ccc',
'WebkitBoxShadow': '0 0 3px #ccc',
'left': parseInt(w / 2 - 80) + 'px',
'top': t+'px',
'position': pos,
'width': '160px',
'backgroundColor': '#F3F7FD',
'border': '1px solid #6ed3e3',
'zIndex': '99999'
});
setStyle(YHAlert_in, {
'borderRadius': '5px',
'MozBorderRadius': '5px',
'WebkitBorderRadius': '5px',
'backgroundColor':'#fefefe',
'padding':'15px 10px'
});
setStyle(YHAlert_p, {
'textAlign': 'left',
'fontSize': '14px',
'margin': '0',
'color': '#000',
'lineHeight': '140%'
});
fIn(YHAlert, function(){
setTimeout(function(){
fOut(YHAlert, function(){
remove(YHAlert);
});
}, time);
});
});
}());
};
| YHAlert | identifier_name |
account.rs | extern crate meg;
extern crate log;
use log::*;
use std::env;
use std::clone::Clone;
use turbo::util::{CliResult, Config};
use self::meg::ops::meg_account_create as Act;
use self::meg::ops::meg_account_show as Show;
#[derive(RustcDecodable, Clone)]
pub struct Options {
pub flag_create: String,
pub flag_show: bool,
pub flag_verbose: bool,
}
pub const USAGE: &'static str = "
Usage:
meg account [options]
Options:
-h, --help Print this message
--create EMAIL Provide an email to create a new account
--show View your account details
-v, --verbose Use verbose output
";
pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
debug!("executing; cmd=meg-account; args={:?}", env::args().collect::<Vec<_>>());
config.shell().set_verbose(options.flag_verbose);
let vec = env::args().collect::<Vec<_>>();
for x in vec.iter() {
if x == "--create" {
let mut acct: Act::Createoptions = Act::CreateAcc::new();
acct.email = options.flag_create.clone();
acct.create();
} else if x == "--show" {
let mut acct: Show::Showoptions = Show::ShowAcc::new(); //Not reqd - to expand later if
acct.email = options.flag_create.clone(); //multiple accounts needs to be showed
acct.show();
|
} | }
}
return Ok(None) | random_line_split |
account.rs | extern crate meg;
extern crate log;
use log::*;
use std::env;
use std::clone::Clone;
use turbo::util::{CliResult, Config};
use self::meg::ops::meg_account_create as Act;
use self::meg::ops::meg_account_show as Show;
#[derive(RustcDecodable, Clone)]
pub struct | {
pub flag_create: String,
pub flag_show: bool,
pub flag_verbose: bool,
}
pub const USAGE: &'static str = "
Usage:
meg account [options]
Options:
-h, --help Print this message
--create EMAIL Provide an email to create a new account
--show View your account details
-v, --verbose Use verbose output
";
pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
debug!("executing; cmd=meg-account; args={:?}", env::args().collect::<Vec<_>>());
config.shell().set_verbose(options.flag_verbose);
let vec = env::args().collect::<Vec<_>>();
for x in vec.iter() {
if x == "--create" {
let mut acct: Act::Createoptions = Act::CreateAcc::new();
acct.email = options.flag_create.clone();
acct.create();
} else if x == "--show" {
let mut acct: Show::Showoptions = Show::ShowAcc::new(); //Not reqd - to expand later if
acct.email = options.flag_create.clone(); //multiple accounts needs to be showed
acct.show();
}
}
return Ok(None)
}
| Options | identifier_name |
static-file-route.ts | import { createHash } from "crypto";
import * as etag from "etag";
import { compressSync as brotliCompress } from "iltorb";
import * as zlib from "zlib";
export interface StaticFileRoute {
path: string;
foreverPath: string;
etag: string;
integrity: string;
buffer: Buffer;
string?: string;
gzipped?: Buffer;
brotlied?: Buffer;
}
// Construct a static file route that doesn't change and has a "forever path" based on the file contents
export function staticFileRoute(path: string, contents: string | Buffer): StaticFileRoute {
const buffer = typeof contents === "string" ? Buffer.from(contents) : contents;
const integrity = createHash("sha256").update(buffer).digest("base64");
const result: StaticFileRoute = {
path,
foreverPath: path.replace(/\.((?!.*\.))/, "." + integrity.replace(/\//g, "_").replace(/\+/g, "-").replace(/=/g, "").substring(0, 16) + "."),
etag: etag(buffer),
integrity: "sha256-" + integrity,
buffer,
};
if (typeof contents === "string") |
return result;
}
export function stringFromRoute(route: StaticFileRoute) {
// Convert buffer to string on demand
return typeof route.string === "string" ? route.string : route.buffer.toString();
}
export function gzippedBufferFromRoute(route: StaticFileRoute) {
// Apply GZIP compression on demand
return route.gzipped || (route.gzipped = zlib.gzipSync(route.buffer, {
level: zlib.constants.Z_BEST_COMPRESSION,
}));
}
export function brotliedBufferFromRoute(route: StaticFileRoute) {
// Apply brotli compression on demand
return route.brotlied || (route.brotlied = brotliCompress(route.buffer, {
mode: 1,
}));
}
| {
result.string = contents;
} | conditional_block |
static-file-route.ts | import { createHash } from "crypto";
import * as etag from "etag";
import { compressSync as brotliCompress } from "iltorb";
import * as zlib from "zlib";
export interface StaticFileRoute {
path: string;
foreverPath: string;
etag: string;
integrity: string;
buffer: Buffer;
string?: string;
gzipped?: Buffer;
brotlied?: Buffer;
}
// Construct a static file route that doesn't change and has a "forever path" based on the file contents
export function staticFileRoute(path: string, contents: string | Buffer): StaticFileRoute {
const buffer = typeof contents === "string" ? Buffer.from(contents) : contents;
const integrity = createHash("sha256").update(buffer).digest("base64");
const result: StaticFileRoute = {
path,
foreverPath: path.replace(/\.((?!.*\.))/, "." + integrity.replace(/\//g, "_").replace(/\+/g, "-").replace(/=/g, "").substring(0, 16) + "."),
etag: etag(buffer),
integrity: "sha256-" + integrity,
buffer,
};
if (typeof contents === "string") {
result.string = contents;
}
return result;
}
export function | (route: StaticFileRoute) {
// Convert buffer to string on demand
return typeof route.string === "string" ? route.string : route.buffer.toString();
}
export function gzippedBufferFromRoute(route: StaticFileRoute) {
// Apply GZIP compression on demand
return route.gzipped || (route.gzipped = zlib.gzipSync(route.buffer, {
level: zlib.constants.Z_BEST_COMPRESSION,
}));
}
export function brotliedBufferFromRoute(route: StaticFileRoute) {
// Apply brotli compression on demand
return route.brotlied || (route.brotlied = brotliCompress(route.buffer, {
mode: 1,
}));
}
| stringFromRoute | identifier_name |
static-file-route.ts | import { createHash } from "crypto";
import * as etag from "etag";
import { compressSync as brotliCompress } from "iltorb";
import * as zlib from "zlib";
export interface StaticFileRoute {
path: string;
foreverPath: string;
etag: string;
integrity: string;
buffer: Buffer;
string?: string;
gzipped?: Buffer;
brotlied?: Buffer;
}
// Construct a static file route that doesn't change and has a "forever path" based on the file contents
export function staticFileRoute(path: string, contents: string | Buffer): StaticFileRoute {
const buffer = typeof contents === "string" ? Buffer.from(contents) : contents;
const integrity = createHash("sha256").update(buffer).digest("base64");
const result: StaticFileRoute = {
path,
foreverPath: path.replace(/\.((?!.*\.))/, "." + integrity.replace(/\//g, "_").replace(/\+/g, "-").replace(/=/g, "").substring(0, 16) + "."),
etag: etag(buffer),
integrity: "sha256-" + integrity,
buffer,
};
if (typeof contents === "string") {
result.string = contents;
}
return result;
}
export function stringFromRoute(route: StaticFileRoute) |
export function gzippedBufferFromRoute(route: StaticFileRoute) {
// Apply GZIP compression on demand
return route.gzipped || (route.gzipped = zlib.gzipSync(route.buffer, {
level: zlib.constants.Z_BEST_COMPRESSION,
}));
}
export function brotliedBufferFromRoute(route: StaticFileRoute) {
// Apply brotli compression on demand
return route.brotlied || (route.brotlied = brotliCompress(route.buffer, {
mode: 1,
}));
}
| {
// Convert buffer to string on demand
return typeof route.string === "string" ? route.string : route.buffer.toString();
} | identifier_body |
static-file-route.ts | import { createHash } from "crypto";
import * as etag from "etag";
import { compressSync as brotliCompress } from "iltorb";
import * as zlib from "zlib";
export interface StaticFileRoute {
path: string;
foreverPath: string;
etag: string;
integrity: string;
buffer: Buffer;
string?: string;
gzipped?: Buffer;
brotlied?: Buffer;
}
// Construct a static file route that doesn't change and has a "forever path" based on the file contents
export function staticFileRoute(path: string, contents: string | Buffer): StaticFileRoute {
const buffer = typeof contents === "string" ? Buffer.from(contents) : contents;
const integrity = createHash("sha256").update(buffer).digest("base64");
const result: StaticFileRoute = { | path,
foreverPath: path.replace(/\.((?!.*\.))/, "." + integrity.replace(/\//g, "_").replace(/\+/g, "-").replace(/=/g, "").substring(0, 16) + "."),
etag: etag(buffer),
integrity: "sha256-" + integrity,
buffer,
};
if (typeof contents === "string") {
result.string = contents;
}
return result;
}
export function stringFromRoute(route: StaticFileRoute) {
// Convert buffer to string on demand
return typeof route.string === "string" ? route.string : route.buffer.toString();
}
export function gzippedBufferFromRoute(route: StaticFileRoute) {
// Apply GZIP compression on demand
return route.gzipped || (route.gzipped = zlib.gzipSync(route.buffer, {
level: zlib.constants.Z_BEST_COMPRESSION,
}));
}
export function brotliedBufferFromRoute(route: StaticFileRoute) {
// Apply brotli compression on demand
return route.brotlied || (route.brotlied = brotliCompress(route.buffer, {
mode: 1,
}));
} | random_line_split |
|
fields.py | from django.db import models
# Django doesn't support big auto fields out of the box, see
# https://code.djangoproject.com/ticket/14286.
# This is a stripped down version of the BoundedBigAutoField from Sentry.
class BigAutoField(models.AutoField):
description = "Big Integer"
def db_type(self, connection):
engine = connection.settings_dict['ENGINE']
if 'mysql' in engine:
return "bigint AUTO_INCREMENT"
elif 'postgres' in engine:
return "bigserial"
else:
raise NotImplemented
def get_related_db_type(self, connection): |
class FlexibleForeignKey(models.ForeignKey):
def db_type(self, connection):
# This is required to support BigAutoField
rel_field = self.related_field
if hasattr(rel_field, 'get_related_db_type'):
return rel_field.get_related_db_type(connection)
return super(FlexibleForeignKey, self).db_type(connection) | return models.BigIntegerField().db_type(connection)
def get_internal_type(self):
return "BigIntegerField" | random_line_split |
fields.py | from django.db import models
# Django doesn't support big auto fields out of the box, see
# https://code.djangoproject.com/ticket/14286.
# This is a stripped down version of the BoundedBigAutoField from Sentry.
class BigAutoField(models.AutoField):
description = "Big Integer"
def db_type(self, connection):
engine = connection.settings_dict['ENGINE']
if 'mysql' in engine:
return "bigint AUTO_INCREMENT"
elif 'postgres' in engine:
return "bigserial"
else:
raise NotImplemented
def get_related_db_type(self, connection):
return models.BigIntegerField().db_type(connection)
def get_internal_type(self):
return "BigIntegerField"
class | (models.ForeignKey):
def db_type(self, connection):
# This is required to support BigAutoField
rel_field = self.related_field
if hasattr(rel_field, 'get_related_db_type'):
return rel_field.get_related_db_type(connection)
return super(FlexibleForeignKey, self).db_type(connection)
| FlexibleForeignKey | identifier_name |
fields.py | from django.db import models
# Django doesn't support big auto fields out of the box, see
# https://code.djangoproject.com/ticket/14286.
# This is a stripped down version of the BoundedBigAutoField from Sentry.
class BigAutoField(models.AutoField):
description = "Big Integer"
def db_type(self, connection):
engine = connection.settings_dict['ENGINE']
if 'mysql' in engine:
return "bigint AUTO_INCREMENT"
elif 'postgres' in engine:
|
else:
raise NotImplemented
def get_related_db_type(self, connection):
return models.BigIntegerField().db_type(connection)
def get_internal_type(self):
return "BigIntegerField"
class FlexibleForeignKey(models.ForeignKey):
def db_type(self, connection):
# This is required to support BigAutoField
rel_field = self.related_field
if hasattr(rel_field, 'get_related_db_type'):
return rel_field.get_related_db_type(connection)
return super(FlexibleForeignKey, self).db_type(connection)
| return "bigserial" | conditional_block |
fields.py | from django.db import models
# Django doesn't support big auto fields out of the box, see
# https://code.djangoproject.com/ticket/14286.
# This is a stripped down version of the BoundedBigAutoField from Sentry.
class BigAutoField(models.AutoField):
description = "Big Integer"
def db_type(self, connection):
|
def get_related_db_type(self, connection):
return models.BigIntegerField().db_type(connection)
def get_internal_type(self):
return "BigIntegerField"
class FlexibleForeignKey(models.ForeignKey):
def db_type(self, connection):
# This is required to support BigAutoField
rel_field = self.related_field
if hasattr(rel_field, 'get_related_db_type'):
return rel_field.get_related_db_type(connection)
return super(FlexibleForeignKey, self).db_type(connection)
| engine = connection.settings_dict['ENGINE']
if 'mysql' in engine:
return "bigint AUTO_INCREMENT"
elif 'postgres' in engine:
return "bigserial"
else:
raise NotImplemented | identifier_body |
pso.py | """
Copyright (C) 2012 Alan J Lockett
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import inspect
import numpy as np
from basic import PopulationDistribution
from pyec.config import Config
from pyec.history import LocalBestHistory
from pyec.space import Euclidean
class PSOHistory(LocalBestHistory):
"""A :class:`History` for Particle Swarm Optimization.
Rembers the local best and the velocities.
"""
def __init__(self, config):
super(PSOHistory, self).__init__(config)
self._positions = None
self._velocities = None
self.lowerv = None
self.upperv = None
self.attrs |= set(["_velocities", "_positions", "upperv", "lowerv"])
def velocities(self):
return self._velocities
def positions(self):
return self._positions
def updateVelocity(self):
popSize = self.config.populationSize
if self._velocities is None:
if self.config.initial is None:
self._velocities = np.array([self.config.space.random()
for i in xrange(popSize)])
elif (inspect.isclass(self.config.initial) and
isinstance(self.config.initial, PopulationDistribution)):
self._velocities = np.array([self.config.initial.batch(popSize)])
else:
self._velocities = np.array([self.config.initial()
for i in xrange(popSize)])
return
rp = np.outer(np.random.random_sample(popSize),
np.ones(self.config.dim))
rg = np.outer(np.random.random_sample(popSize),
np.ones(self.config.dim))
#print shape(rp), shape(self.bestLocal), shape(self.bestGlobal), shape(self.positions), shape(self.velocities)
bestLocal = np.array([x for x,s in self.localBestPop])
bestGlobal = self.best()[0]
velocities = (self.config.omega * self._velocities
+ self.config.phip * rp * (bestLocal - self._positions)
+ self.config.phig * rg * (bestGlobal - self._positions))
del self._velocities
self._velocities = np.maximum(self.lowerv,
np.minimum(self.upperv, velocities))
del rp
del rg
def internalUpdate(self, population):
super(PSOHistory, self).internalUpdate(population)
initialize = True
if self._positions is not None:
del self._positions
initialize = False
self._positions = np.array([x for x,s in population])
if hasattr(self.config.space, 'extent'):
|
if initialize:
self.upperv = self._positions.max(axis=0)
self.lowerv = self._positions.min(axis=0)
self.updateVelocity()
class ParticleSwarmOptimization(PopulationDistribution):
"""Particle Swarm Optimization.
Config parameters
* omega -- The decay factor for velocities
* phig -- The global best component in velocity update
* phip -- The local best component in velocity update
"""
config = Config(history=PSOHistory,
omega=-.5,
phig=2.0,
phip=2.0)
def __init__(self, **kwargs):
super(ParticleSwarmOptimization, self).__init__(**kwargs)
if self.config.space.type != np.ndarray:
raise ValueError("Space must have type numpy.ndarray")
def compatible(self, history):
return isinstance(history, PSOHistory)
def batch(self, popSize):
positions = self.history.positions() + self.history.velocities()
return positions
| lower, upper = self.config.space.extent()
self._positions = np.maximum(self._positions, lower)
self._positions = np.minimum(self._positions, upper) | conditional_block |
pso.py | """
Copyright (C) 2012 Alan J Lockett
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import inspect
import numpy as np
from basic import PopulationDistribution
from pyec.config import Config
from pyec.history import LocalBestHistory
from pyec.space import Euclidean
class PSOHistory(LocalBestHistory):
"""A :class:`History` for Particle Swarm Optimization.
Rembers the local best and the velocities.
"""
def __init__(self, config):
super(PSOHistory, self).__init__(config)
self._positions = None
self._velocities = None
self.lowerv = None
self.upperv = None
self.attrs |= set(["_velocities", "_positions", "upperv", "lowerv"])
def velocities(self):
return self._velocities
def positions(self):
|
def updateVelocity(self):
popSize = self.config.populationSize
if self._velocities is None:
if self.config.initial is None:
self._velocities = np.array([self.config.space.random()
for i in xrange(popSize)])
elif (inspect.isclass(self.config.initial) and
isinstance(self.config.initial, PopulationDistribution)):
self._velocities = np.array([self.config.initial.batch(popSize)])
else:
self._velocities = np.array([self.config.initial()
for i in xrange(popSize)])
return
rp = np.outer(np.random.random_sample(popSize),
np.ones(self.config.dim))
rg = np.outer(np.random.random_sample(popSize),
np.ones(self.config.dim))
#print shape(rp), shape(self.bestLocal), shape(self.bestGlobal), shape(self.positions), shape(self.velocities)
bestLocal = np.array([x for x,s in self.localBestPop])
bestGlobal = self.best()[0]
velocities = (self.config.omega * self._velocities
+ self.config.phip * rp * (bestLocal - self._positions)
+ self.config.phig * rg * (bestGlobal - self._positions))
del self._velocities
self._velocities = np.maximum(self.lowerv,
np.minimum(self.upperv, velocities))
del rp
del rg
def internalUpdate(self, population):
super(PSOHistory, self).internalUpdate(population)
initialize = True
if self._positions is not None:
del self._positions
initialize = False
self._positions = np.array([x for x,s in population])
if hasattr(self.config.space, 'extent'):
lower, upper = self.config.space.extent()
self._positions = np.maximum(self._positions, lower)
self._positions = np.minimum(self._positions, upper)
if initialize:
self.upperv = self._positions.max(axis=0)
self.lowerv = self._positions.min(axis=0)
self.updateVelocity()
class ParticleSwarmOptimization(PopulationDistribution):
"""Particle Swarm Optimization.
Config parameters
* omega -- The decay factor for velocities
* phig -- The global best component in velocity update
* phip -- The local best component in velocity update
"""
config = Config(history=PSOHistory,
omega=-.5,
phig=2.0,
phip=2.0)
def __init__(self, **kwargs):
super(ParticleSwarmOptimization, self).__init__(**kwargs)
if self.config.space.type != np.ndarray:
raise ValueError("Space must have type numpy.ndarray")
def compatible(self, history):
return isinstance(history, PSOHistory)
def batch(self, popSize):
positions = self.history.positions() + self.history.velocities()
return positions
| return self._positions | identifier_body |
pso.py | """
Copyright (C) 2012 Alan J Lockett
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import inspect
import numpy as np
from basic import PopulationDistribution
from pyec.config import Config
from pyec.history import LocalBestHistory
from pyec.space import Euclidean
class PSOHistory(LocalBestHistory):
"""A :class:`History` for Particle Swarm Optimization.
Rembers the local best and the velocities.
"""
def __init__(self, config):
super(PSOHistory, self).__init__(config)
self._positions = None
self._velocities = None
self.lowerv = None
self.upperv = None
self.attrs |= set(["_velocities", "_positions", "upperv", "lowerv"])
def velocities(self):
return self._velocities
def positions(self):
return self._positions
def | (self):
popSize = self.config.populationSize
if self._velocities is None:
if self.config.initial is None:
self._velocities = np.array([self.config.space.random()
for i in xrange(popSize)])
elif (inspect.isclass(self.config.initial) and
isinstance(self.config.initial, PopulationDistribution)):
self._velocities = np.array([self.config.initial.batch(popSize)])
else:
self._velocities = np.array([self.config.initial()
for i in xrange(popSize)])
return
rp = np.outer(np.random.random_sample(popSize),
np.ones(self.config.dim))
rg = np.outer(np.random.random_sample(popSize),
np.ones(self.config.dim))
#print shape(rp), shape(self.bestLocal), shape(self.bestGlobal), shape(self.positions), shape(self.velocities)
bestLocal = np.array([x for x,s in self.localBestPop])
bestGlobal = self.best()[0]
velocities = (self.config.omega * self._velocities
+ self.config.phip * rp * (bestLocal - self._positions)
+ self.config.phig * rg * (bestGlobal - self._positions))
del self._velocities
self._velocities = np.maximum(self.lowerv,
np.minimum(self.upperv, velocities))
del rp
del rg
def internalUpdate(self, population):
super(PSOHistory, self).internalUpdate(population)
initialize = True
if self._positions is not None:
del self._positions
initialize = False
self._positions = np.array([x for x,s in population])
if hasattr(self.config.space, 'extent'):
lower, upper = self.config.space.extent()
self._positions = np.maximum(self._positions, lower)
self._positions = np.minimum(self._positions, upper)
if initialize:
self.upperv = self._positions.max(axis=0)
self.lowerv = self._positions.min(axis=0)
self.updateVelocity()
class ParticleSwarmOptimization(PopulationDistribution):
"""Particle Swarm Optimization.
Config parameters
* omega -- The decay factor for velocities
* phig -- The global best component in velocity update
* phip -- The local best component in velocity update
"""
config = Config(history=PSOHistory,
omega=-.5,
phig=2.0,
phip=2.0)
def __init__(self, **kwargs):
super(ParticleSwarmOptimization, self).__init__(**kwargs)
if self.config.space.type != np.ndarray:
raise ValueError("Space must have type numpy.ndarray")
def compatible(self, history):
return isinstance(history, PSOHistory)
def batch(self, popSize):
positions = self.history.positions() + self.history.velocities()
return positions
| updateVelocity | identifier_name |
pso.py | """
Copyright (C) 2012 Alan J Lockett
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import inspect
import numpy as np
from basic import PopulationDistribution
from pyec.config import Config
from pyec.history import LocalBestHistory
from pyec.space import Euclidean
class PSOHistory(LocalBestHistory):
"""A :class:`History` for Particle Swarm Optimization.
Rembers the local best and the velocities.
"""
def __init__(self, config):
super(PSOHistory, self).__init__(config)
self._positions = None
self._velocities = None
self.lowerv = None
self.upperv = None
self.attrs |= set(["_velocities", "_positions", "upperv", "lowerv"])
def velocities(self):
return self._velocities
def positions(self):
return self._positions
def updateVelocity(self):
popSize = self.config.populationSize
if self._velocities is None:
if self.config.initial is None:
self._velocities = np.array([self.config.space.random()
for i in xrange(popSize)])
elif (inspect.isclass(self.config.initial) and
isinstance(self.config.initial, PopulationDistribution)):
self._velocities = np.array([self.config.initial.batch(popSize)])
else:
self._velocities = np.array([self.config.initial()
for i in xrange(popSize)])
return
rp = np.outer(np.random.random_sample(popSize), | rg = np.outer(np.random.random_sample(popSize),
np.ones(self.config.dim))
#print shape(rp), shape(self.bestLocal), shape(self.bestGlobal), shape(self.positions), shape(self.velocities)
bestLocal = np.array([x for x,s in self.localBestPop])
bestGlobal = self.best()[0]
velocities = (self.config.omega * self._velocities
+ self.config.phip * rp * (bestLocal - self._positions)
+ self.config.phig * rg * (bestGlobal - self._positions))
del self._velocities
self._velocities = np.maximum(self.lowerv,
np.minimum(self.upperv, velocities))
del rp
del rg
def internalUpdate(self, population):
super(PSOHistory, self).internalUpdate(population)
initialize = True
if self._positions is not None:
del self._positions
initialize = False
self._positions = np.array([x for x,s in population])
if hasattr(self.config.space, 'extent'):
lower, upper = self.config.space.extent()
self._positions = np.maximum(self._positions, lower)
self._positions = np.minimum(self._positions, upper)
if initialize:
self.upperv = self._positions.max(axis=0)
self.lowerv = self._positions.min(axis=0)
self.updateVelocity()
class ParticleSwarmOptimization(PopulationDistribution):
"""Particle Swarm Optimization.
Config parameters
* omega -- The decay factor for velocities
* phig -- The global best component in velocity update
* phip -- The local best component in velocity update
"""
config = Config(history=PSOHistory,
omega=-.5,
phig=2.0,
phip=2.0)
def __init__(self, **kwargs):
super(ParticleSwarmOptimization, self).__init__(**kwargs)
if self.config.space.type != np.ndarray:
raise ValueError("Space must have type numpy.ndarray")
def compatible(self, history):
return isinstance(history, PSOHistory)
def batch(self, popSize):
positions = self.history.positions() + self.history.velocities()
return positions | np.ones(self.config.dim)) | random_line_split |
tbody.module.js | var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { FormsModule } from '@angular/forms';
import { CellModule } from '../cell/cell.module';
import { Ng2SmartTableTbodyComponent } from './tbody.component';
import { TbodyCreateCancelComponent } from './cells/create-cancel.component';
import { TbodyEditDeleteComponent } from './cells/edit-delete.component';
import { TbodyCustomComponent } from './cells/custom.component';
var TBODY_COMPONENTS = [
TbodyCreateCancelComponent,
TbodyEditDeleteComponent,
TbodyCustomComponent,
Ng2SmartTableTbodyComponent
];
var TBodyModule = (function () {
function TBodyModule() |
return TBodyModule;
}());
TBodyModule = __decorate([
NgModule({
imports: [
CommonModule,
FormsModule,
CellModule,
],
declarations: TBODY_COMPONENTS.slice(),
exports: TBODY_COMPONENTS.slice(),
})
], TBodyModule);
export { TBodyModule };
//# sourceMappingURL=tbody.module.js.map | {
} | identifier_body |
tbody.module.js | var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { FormsModule } from '@angular/forms';
import { CellModule } from '../cell/cell.module';
import { Ng2SmartTableTbodyComponent } from './tbody.component';
import { TbodyCreateCancelComponent } from './cells/create-cancel.component';
import { TbodyEditDeleteComponent } from './cells/edit-delete.component';
import { TbodyCustomComponent } from './cells/custom.component';
var TBODY_COMPONENTS = [
TbodyCreateCancelComponent,
TbodyEditDeleteComponent,
TbodyCustomComponent,
Ng2SmartTableTbodyComponent
];
var TBodyModule = (function () {
function | () {
}
return TBodyModule;
}());
TBodyModule = __decorate([
NgModule({
imports: [
CommonModule,
FormsModule,
CellModule,
],
declarations: TBODY_COMPONENTS.slice(),
exports: TBODY_COMPONENTS.slice(),
})
], TBodyModule);
export { TBodyModule };
//# sourceMappingURL=tbody.module.js.map | TBodyModule | identifier_name |
tbody.module.js | var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { FormsModule } from '@angular/forms';
import { CellModule } from '../cell/cell.module';
import { Ng2SmartTableTbodyComponent } from './tbody.component';
import { TbodyCreateCancelComponent } from './cells/create-cancel.component';
import { TbodyEditDeleteComponent } from './cells/edit-delete.component';
import { TbodyCustomComponent } from './cells/custom.component'; | TbodyCustomComponent,
Ng2SmartTableTbodyComponent
];
var TBodyModule = (function () {
function TBodyModule() {
}
return TBodyModule;
}());
TBodyModule = __decorate([
NgModule({
imports: [
CommonModule,
FormsModule,
CellModule,
],
declarations: TBODY_COMPONENTS.slice(),
exports: TBODY_COMPONENTS.slice(),
})
], TBodyModule);
export { TBodyModule };
//# sourceMappingURL=tbody.module.js.map | var TBODY_COMPONENTS = [
TbodyCreateCancelComponent,
TbodyEditDeleteComponent, | random_line_split |
abstractmoduleloader.d.ts | declare module goog.module {
/**
* An interface that loads JavaScript modules.
* @interface
*/
interface AbstractModuleLoader {
/**
* Loads a list of JavaScript modules.
*
* @param {Array<string>} ids The module ids in dependency order.
* @param {Object} moduleInfoMap A mapping from module id to ModuleInfo object.
* @param {function()?=} opt_successFn The callback if module loading is a
* success.
* @param {function(?number)?=} opt_errorFn The callback if module loading is an
* error.
* @param {function()?=} opt_timeoutFn The callback if module loading times out.
* @param {boolean=} opt_forceReload Whether to bypass cache while loading the
* module.
*/
loadModules(ids: Array<string>, moduleInfoMap: Object, opt_successFn?: () => any, opt_errorFn?: (arg0: number) => any, opt_timeoutFn?: () => any, opt_forceReload?: boolean): void;
/**
* Pre-fetches a JavaScript module. | *
* @param {string} id The module id.
* @param {!goog.module.ModuleInfo} moduleInfo The module info.
*/
prefetchModule(id: string, moduleInfo: goog.module.ModuleInfo): void;
}
} | random_line_split |
|
lib.rs | #![feature(core)]
/// Generate a new iterable witn a list comprehension. This macro tries to follow the syntax of | /// Python's list comprehensions. This is a very flexable macro that allows the generation of any
/// iterable that implements `std::iter::FromIterator`. The resulting type will be determined by
/// the type of the variable that you are attempting to assign to. You can create a `Vec`:
///
/// ```ignore
/// let x: Vec<i32> = gen![i*30 => i in [1, 2, 3, 4, 5]];
/// ```
///
/// You can generate a `HashSet`:
///
/// ```ignore
/// let x: HashSet<i32> = gen![i*30 => i in [1, 2, 3, 4, 5]];
/// ```
///
/// You can even use conditionals to generate stuff:
///
/// ```ignore
/// let x: HashSet<i32> = gen![i => i in [1, 2, 3, 4, 5], x % 2 == 0];
/// assert_eq!(x, vec![2, 4]);
/// ```
///
/// Comparisson to Python's list comprehension
/// ===
///
/// Python
/// ---
/// ```python
/// x = [i*4 for i in range(1, 5)]
/// ```
///
/// Rust with gen! macro
/// ---
/// ```ignore
/// let x: Vec<i32> = gen!(x*4 => x in [1, 2, 3, 4]);
/// ```
#[macro_export]
#[macro_use]
macro_rules! gen {
[$e:expr => $variable:ident in $iterable:expr] => (
$iterable.iter().cloned().map(|$variable| $e).collect()
);
[$e:expr => $variable:ident in $iterable:expr, $condition:expr] => (
$iterable.iter().cloned().filter(|$variable| $condition).map(|$variable| $e).collect()
);
} | random_line_split |
|
executive.rs | ContractAddress, ReturnData,
};
use externalities::*;
use tests::helpers::*;
use ethjson;
use trace::{Tracer, NoopTracer};
use trace::{VMTracer, NoopVMTracer};
use bytes::{Bytes, BytesRef};
use trie; | use machine::EthereumMachine as Machine;
#[derive(Debug, PartialEq, Clone)]
struct CallCreate {
data: Bytes,
destination: Option<Address>,
gas_limit: U256,
value: U256
}
impl From<ethjson::vm::Call> for CallCreate {
fn from(c: ethjson::vm::Call) -> Self {
let dst: Option<ethjson::hash::Address> = c.destination.into();
CallCreate {
data: c.data.into(),
destination: dst.map(Into::into),
gas_limit: c.gas_limit.into(),
value: c.value.into()
}
}
}
/// Tiny wrapper around executive externalities.
/// Stores callcreates.
struct TestExt<'a, T: 'a, V: 'a, B: 'a>
where T: Tracer, V: VMTracer, B: StateBackend
{
ext: Externalities<'a, T, V, B>,
callcreates: Vec<CallCreate>,
nonce: U256,
sender: Address,
}
impl<'a, T: 'a, V: 'a, B: 'a> TestExt<'a, T, V, B>
where T: Tracer, V: VMTracer, B: StateBackend,
{
fn new(
state: &'a mut State<B>,
info: &'a EnvInfo,
machine: &'a Machine,
depth: usize,
origin_info: OriginInfo,
substate: &'a mut Substate,
output: OutputPolicy<'a, 'a>,
address: Address,
tracer: &'a mut T,
vm_tracer: &'a mut V,
) -> trie::Result<Self> {
let static_call = false;
Ok(TestExt {
nonce: state.nonce(&address)?,
ext: Externalities::new(state, info, machine, depth, origin_info, substate, output, tracer, vm_tracer, static_call),
callcreates: vec![],
sender: address,
})
}
}
impl<'a, T: 'a, V: 'a, B: 'a> Ext for TestExt<'a, T, V, B>
where T: Tracer, V: VMTracer, B: StateBackend
{
fn storage_at(&self, key: &H256) -> vm::Result<H256> {
self.ext.storage_at(key)
}
fn set_storage(&mut self, key: H256, value: H256) -> vm::Result<()> {
self.ext.set_storage(key, value)
}
fn exists(&self, address: &Address) -> vm::Result<bool> {
self.ext.exists(address)
}
fn exists_and_not_null(&self, address: &Address) -> vm::Result<bool> {
self.ext.exists_and_not_null(address)
}
fn balance(&self, address: &Address) -> vm::Result<U256> {
self.ext.balance(address)
}
fn origin_balance(&self) -> vm::Result<U256> {
self.ext.origin_balance()
}
fn blockhash(&mut self, number: &U256) -> H256 {
self.ext.blockhash(number)
}
fn create(&mut self, gas: &U256, value: &U256, code: &[u8], address: CreateContractAddress) -> ContractCreateResult {
self.callcreates.push(CallCreate {
data: code.to_vec(),
destination: None,
gas_limit: *gas,
value: *value
});
let contract_address = contract_address(address, &self.sender, &self.nonce, &code).0;
ContractCreateResult::Created(contract_address, *gas)
}
fn call(&mut self,
gas: &U256,
_sender_address: &Address,
receive_address: &Address,
value: Option<U256>,
data: &[u8],
_code_address: &Address,
_output: &mut [u8],
_call_type: CallType
) -> MessageCallResult {
self.callcreates.push(CallCreate {
data: data.to_vec(),
destination: Some(receive_address.clone()),
gas_limit: *gas,
value: value.unwrap()
});
MessageCallResult::Success(*gas, ReturnData::empty())
}
fn extcode(&self, address: &Address) -> vm::Result<Arc<Bytes>> {
self.ext.extcode(address)
}
fn extcodesize(&self, address: &Address) -> vm::Result<usize> {
self.ext.extcodesize(address)
}
fn log(&mut self, topics: Vec<H256>, data: &[u8]) -> vm::Result<()> {
self.ext.log(topics, data)
}
fn ret(self, gas: &U256, data: &ReturnData, apply_state: bool) -> Result<U256, vm::Error> {
self.ext.ret(gas, data, apply_state)
}
fn suicide(&mut self, refund_address: &Address) -> vm::Result<()> {
self.ext.suicide(refund_address)
}
fn schedule(&self) -> &Schedule {
self.ext.schedule()
}
fn env_info(&self) -> &EnvInfo {
self.ext.env_info()
}
fn depth(&self) -> usize {
0
}
fn is_static(&self) -> bool {
false
}
fn inc_sstore_clears(&mut self) {
self.ext.inc_sstore_clears()
}
}
fn do_json_test(json_data: &[u8]) -> Vec<String> {
let vms = VMType::all();
vms
.iter()
.flat_map(|vm| do_json_test_for(vm, json_data))
.collect()
}
fn do_json_test_for(vm_type: &VMType, json_data: &[u8]) -> Vec<String> {
let tests = ethjson::vm::Test::load(json_data).unwrap();
let mut failed = Vec::new();
for (name, vm) in tests.into_iter() {
println!("name: {:?}", name);
let mut fail = false;
let mut fail_unless = |cond: bool, s: &str | if !cond && !fail {
failed.push(format!("[{}] {}: {}", vm_type, name, s));
fail = true
};
macro_rules! try_fail {
($e: expr) => {
match $e {
Ok(x) => x,
Err(e) => {
let msg = format!("Internal error: {}", e);
fail_unless(false, &msg);
continue
}
}
}
}
let out_of_gas = vm.out_of_gas();
let mut state = get_temp_state();
state.populate_from(From::from(vm.pre_state.clone()));
let info = From::from(vm.env);
let machine = {
let mut machine = ::ethereum::new_frontier_test_machine();
machine.set_schedule_creation_rules(Box::new(move |s, _| s.max_depth = 1));
machine
};
let params = ActionParams::from(vm.transaction);
let mut substate = Substate::new();
let mut tracer = NoopTracer;
let mut vm_tracer = NoopVMTracer;
let mut output = vec![];
let vm_factory = state.vm_factory();
// execute
let (res, callcreates) = {
let mut ex = try_fail!(TestExt::new(
&mut state,
&info,
&machine,
0,
OriginInfo::from(¶ms),
&mut substate,
OutputPolicy::Return(BytesRef::Flexible(&mut output), None),
params.address.clone(),
&mut tracer,
&mut vm_tracer,
));
let mut evm = vm_factory.create(params.gas);
let res = evm.exec(params, &mut ex);
// a return in finalize will not alter callcreates
let callcreates = ex.callcreates.clone();
(res.finalize(ex), callcreates)
};
let log_hash = {
let mut rlp = RlpStream::new_list(substate.logs.len());
for l in &substate.logs {
rlp.append(l);
}
keccak(&rlp.drain())
};
match res {
Err(_) => fail_unless(out_of_gas, "didn't expect to run out of gas."),
Ok(res) => {
fail_unless(!out_of_gas, "expected to run out of gas.");
fail_unless(Some(res.gas_left) == vm.gas_left.map(Into::into), "gas_left is incorrect");
let vm_output: Option<Vec<u8>> = vm.output.map(Into::into);
fail_unless(Some(output) == vm_output, "output is incorrect");
fail_unless(Some(log_hash) == vm.logs.map(|h| h.0), "logs are incorrect | use rlp::RlpStream;
use hash::keccak; | random_line_split |
executive.rs | Address, ReturnData,
};
use externalities::*;
use tests::helpers::*;
use ethjson;
use trace::{Tracer, NoopTracer};
use trace::{VMTracer, NoopVMTracer};
use bytes::{Bytes, BytesRef};
use trie;
use rlp::RlpStream;
use hash::keccak;
use machine::EthereumMachine as Machine;
#[derive(Debug, PartialEq, Clone)]
struct CallCreate {
data: Bytes,
destination: Option<Address>,
gas_limit: U256,
value: U256
}
impl From<ethjson::vm::Call> for CallCreate {
fn from(c: ethjson::vm::Call) -> Self {
let dst: Option<ethjson::hash::Address> = c.destination.into();
CallCreate {
data: c.data.into(),
destination: dst.map(Into::into),
gas_limit: c.gas_limit.into(),
value: c.value.into()
}
}
}
/// Tiny wrapper around executive externalities.
/// Stores callcreates.
struct TestExt<'a, T: 'a, V: 'a, B: 'a>
where T: Tracer, V: VMTracer, B: StateBackend
{
ext: Externalities<'a, T, V, B>,
callcreates: Vec<CallCreate>,
nonce: U256,
sender: Address,
}
impl<'a, T: 'a, V: 'a, B: 'a> TestExt<'a, T, V, B>
where T: Tracer, V: VMTracer, B: StateBackend,
{
fn new(
state: &'a mut State<B>,
info: &'a EnvInfo,
machine: &'a Machine,
depth: usize,
origin_info: OriginInfo,
substate: &'a mut Substate,
output: OutputPolicy<'a, 'a>,
address: Address,
tracer: &'a mut T,
vm_tracer: &'a mut V,
) -> trie::Result<Self> {
let static_call = false;
Ok(TestExt {
nonce: state.nonce(&address)?,
ext: Externalities::new(state, info, machine, depth, origin_info, substate, output, tracer, vm_tracer, static_call),
callcreates: vec![],
sender: address,
})
}
}
impl<'a, T: 'a, V: 'a, B: 'a> Ext for TestExt<'a, T, V, B>
where T: Tracer, V: VMTracer, B: StateBackend
{
fn storage_at(&self, key: &H256) -> vm::Result<H256> {
self.ext.storage_at(key)
}
fn set_storage(&mut self, key: H256, value: H256) -> vm::Result<()> {
self.ext.set_storage(key, value)
}
fn exists(&self, address: &Address) -> vm::Result<bool> {
self.ext.exists(address)
}
fn exists_and_not_null(&self, address: &Address) -> vm::Result<bool> {
self.ext.exists_and_not_null(address)
}
fn balance(&self, address: &Address) -> vm::Result<U256> {
self.ext.balance(address)
}
fn origin_balance(&self) -> vm::Result<U256> {
self.ext.origin_balance()
}
fn blockhash(&mut self, number: &U256) -> H256 {
self.ext.blockhash(number)
}
fn create(&mut self, gas: &U256, value: &U256, code: &[u8], address: CreateContractAddress) -> ContractCreateResult {
self.callcreates.push(CallCreate {
data: code.to_vec(),
destination: None,
gas_limit: *gas,
value: *value
});
let contract_address = contract_address(address, &self.sender, &self.nonce, &code).0;
ContractCreateResult::Created(contract_address, *gas)
}
fn call(&mut self,
gas: &U256,
_sender_address: &Address,
receive_address: &Address,
value: Option<U256>,
data: &[u8],
_code_address: &Address,
_output: &mut [u8],
_call_type: CallType
) -> MessageCallResult {
self.callcreates.push(CallCreate {
data: data.to_vec(),
destination: Some(receive_address.clone()),
gas_limit: *gas,
value: value.unwrap()
});
MessageCallResult::Success(*gas, ReturnData::empty())
}
fn extcode(&self, address: &Address) -> vm::Result<Arc<Bytes>> {
self.ext.extcode(address)
}
fn extcodesize(&self, address: &Address) -> vm::Result<usize> {
self.ext.extcodesize(address)
}
fn log(&mut self, topics: Vec<H256>, data: &[u8]) -> vm::Result<()> {
self.ext.log(topics, data)
}
fn ret(self, gas: &U256, data: &ReturnData, apply_state: bool) -> Result<U256, vm::Error> {
self.ext.ret(gas, data, apply_state)
}
fn suicide(&mut self, refund_address: &Address) -> vm::Result<()> {
self.ext.suicide(refund_address)
}
fn schedule(&self) -> &Schedule {
self.ext.schedule()
}
fn env_info(&self) -> &EnvInfo {
self.ext.env_info()
}
fn depth(&self) -> usize {
0
}
fn is_static(&self) -> bool {
false
}
fn inc_sstore_clears(&mut self) {
self.ext.inc_sstore_clears()
}
}
fn do_json_test(json_data: &[u8]) -> Vec<String> {
let vms = VMType::all();
vms
.iter()
.flat_map(|vm| do_json_test_for(vm, json_data))
.collect()
}
fn do_json_test_for(vm_type: &VMType, json_data: &[u8]) -> Vec<String> {
let tests = ethjson::vm::Test::load(json_data).unwrap();
let mut failed = Vec::new();
for (name, vm) in tests.into_iter() {
println!("name: {:?}", name);
let mut fail = false;
let mut fail_unless = |cond: bool, s: &str | if !cond && !fail | ;
macro_rules! try_fail {
($e: expr) => {
match $e {
Ok(x) => x,
Err(e) => {
let msg = format!("Internal error: {}", e);
fail_unless(false, &msg);
continue
}
}
}
}
let out_of_gas = vm.out_of_gas();
let mut state = get_temp_state();
state.populate_from(From::from(vm.pre_state.clone()));
let info = From::from(vm.env);
let machine = {
let mut machine = ::ethereum::new_frontier_test_machine();
machine.set_schedule_creation_rules(Box::new(move |s, _| s.max_depth = 1));
machine
};
let params = ActionParams::from(vm.transaction);
let mut substate = Substate::new();
let mut tracer = NoopTracer;
let mut vm_tracer = NoopVMTracer;
let mut output = vec![];
let vm_factory = state.vm_factory();
// execute
let (res, callcreates) = {
let mut ex = try_fail!(TestExt::new(
&mut state,
&info,
&machine,
0,
OriginInfo::from(¶ms),
&mut substate,
OutputPolicy::Return(BytesRef::Flexible(&mut output), None),
params.address.clone(),
&mut tracer,
&mut vm_tracer,
));
let mut evm = vm_factory.create(params.gas);
let res = evm.exec(params, &mut ex);
// a return in finalize will not alter callcreates
let callcreates = ex.callcreates.clone();
(res.finalize(ex), callcreates)
};
let log_hash = {
let mut rlp = RlpStream::new_list(substate.logs.len());
for l in &substate.logs {
rlp.append(l);
}
keccak(&rlp.drain())
};
match res {
Err(_) => fail_unless(out_of_gas, "didn't expect to run out of gas."),
Ok(res) => {
fail_unless(!out_of_gas, "expected to run out of gas.");
fail_unless(Some(res.gas_left) == vm.gas_left.map(Into::into), "gas_left is incorrect");
let vm_output: Option<Vec<u8>> = vm.output.map(Into::into);
fail_unless(Some(output) == vm_output, "output is incorrect");
fail_unless(Some(log_hash) == vm.logs.map(|h| h.0), "logs are | {
failed.push(format!("[{}] {}: {}", vm_type, name, s));
fail = true
} | conditional_block |
executive.rs | Address, ReturnData,
};
use externalities::*;
use tests::helpers::*;
use ethjson;
use trace::{Tracer, NoopTracer};
use trace::{VMTracer, NoopVMTracer};
use bytes::{Bytes, BytesRef};
use trie;
use rlp::RlpStream;
use hash::keccak;
use machine::EthereumMachine as Machine;
#[derive(Debug, PartialEq, Clone)]
struct CallCreate {
data: Bytes,
destination: Option<Address>,
gas_limit: U256,
value: U256
}
impl From<ethjson::vm::Call> for CallCreate {
fn from(c: ethjson::vm::Call) -> Self {
let dst: Option<ethjson::hash::Address> = c.destination.into();
CallCreate {
data: c.data.into(),
destination: dst.map(Into::into),
gas_limit: c.gas_limit.into(),
value: c.value.into()
}
}
}
/// Tiny wrapper around executive externalities.
/// Stores callcreates.
struct TestExt<'a, T: 'a, V: 'a, B: 'a>
where T: Tracer, V: VMTracer, B: StateBackend
{
ext: Externalities<'a, T, V, B>,
callcreates: Vec<CallCreate>,
nonce: U256,
sender: Address,
}
impl<'a, T: 'a, V: 'a, B: 'a> TestExt<'a, T, V, B>
where T: Tracer, V: VMTracer, B: StateBackend,
{
fn new(
state: &'a mut State<B>,
info: &'a EnvInfo,
machine: &'a Machine,
depth: usize,
origin_info: OriginInfo,
substate: &'a mut Substate,
output: OutputPolicy<'a, 'a>,
address: Address,
tracer: &'a mut T,
vm_tracer: &'a mut V,
) -> trie::Result<Self> {
let static_call = false;
Ok(TestExt {
nonce: state.nonce(&address)?,
ext: Externalities::new(state, info, machine, depth, origin_info, substate, output, tracer, vm_tracer, static_call),
callcreates: vec![],
sender: address,
})
}
}
impl<'a, T: 'a, V: 'a, B: 'a> Ext for TestExt<'a, T, V, B>
where T: Tracer, V: VMTracer, B: StateBackend
{
fn storage_at(&self, key: &H256) -> vm::Result<H256> {
self.ext.storage_at(key)
}
fn set_storage(&mut self, key: H256, value: H256) -> vm::Result<()> {
self.ext.set_storage(key, value)
}
fn exists(&self, address: &Address) -> vm::Result<bool> {
self.ext.exists(address)
}
fn exists_and_not_null(&self, address: &Address) -> vm::Result<bool> {
self.ext.exists_and_not_null(address)
}
fn balance(&self, address: &Address) -> vm::Result<U256> {
self.ext.balance(address)
}
fn origin_balance(&self) -> vm::Result<U256> {
self.ext.origin_balance()
}
fn blockhash(&mut self, number: &U256) -> H256 {
self.ext.blockhash(number)
}
fn create(&mut self, gas: &U256, value: &U256, code: &[u8], address: CreateContractAddress) -> ContractCreateResult {
self.callcreates.push(CallCreate {
data: code.to_vec(),
destination: None,
gas_limit: *gas,
value: *value
});
let contract_address = contract_address(address, &self.sender, &self.nonce, &code).0;
ContractCreateResult::Created(contract_address, *gas)
}
fn call(&mut self,
gas: &U256,
_sender_address: &Address,
receive_address: &Address,
value: Option<U256>,
data: &[u8],
_code_address: &Address,
_output: &mut [u8],
_call_type: CallType
) -> MessageCallResult {
self.callcreates.push(CallCreate {
data: data.to_vec(),
destination: Some(receive_address.clone()),
gas_limit: *gas,
value: value.unwrap()
});
MessageCallResult::Success(*gas, ReturnData::empty())
}
fn extcode(&self, address: &Address) -> vm::Result<Arc<Bytes>> {
self.ext.extcode(address)
}
fn | (&self, address: &Address) -> vm::Result<usize> {
self.ext.extcodesize(address)
}
fn log(&mut self, topics: Vec<H256>, data: &[u8]) -> vm::Result<()> {
self.ext.log(topics, data)
}
fn ret(self, gas: &U256, data: &ReturnData, apply_state: bool) -> Result<U256, vm::Error> {
self.ext.ret(gas, data, apply_state)
}
fn suicide(&mut self, refund_address: &Address) -> vm::Result<()> {
self.ext.suicide(refund_address)
}
fn schedule(&self) -> &Schedule {
self.ext.schedule()
}
fn env_info(&self) -> &EnvInfo {
self.ext.env_info()
}
fn depth(&self) -> usize {
0
}
fn is_static(&self) -> bool {
false
}
fn inc_sstore_clears(&mut self) {
self.ext.inc_sstore_clears()
}
}
fn do_json_test(json_data: &[u8]) -> Vec<String> {
let vms = VMType::all();
vms
.iter()
.flat_map(|vm| do_json_test_for(vm, json_data))
.collect()
}
fn do_json_test_for(vm_type: &VMType, json_data: &[u8]) -> Vec<String> {
let tests = ethjson::vm::Test::load(json_data).unwrap();
let mut failed = Vec::new();
for (name, vm) in tests.into_iter() {
println!("name: {:?}", name);
let mut fail = false;
let mut fail_unless = |cond: bool, s: &str | if !cond && !fail {
failed.push(format!("[{}] {}: {}", vm_type, name, s));
fail = true
};
macro_rules! try_fail {
($e: expr) => {
match $e {
Ok(x) => x,
Err(e) => {
let msg = format!("Internal error: {}", e);
fail_unless(false, &msg);
continue
}
}
}
}
let out_of_gas = vm.out_of_gas();
let mut state = get_temp_state();
state.populate_from(From::from(vm.pre_state.clone()));
let info = From::from(vm.env);
let machine = {
let mut machine = ::ethereum::new_frontier_test_machine();
machine.set_schedule_creation_rules(Box::new(move |s, _| s.max_depth = 1));
machine
};
let params = ActionParams::from(vm.transaction);
let mut substate = Substate::new();
let mut tracer = NoopTracer;
let mut vm_tracer = NoopVMTracer;
let mut output = vec![];
let vm_factory = state.vm_factory();
// execute
let (res, callcreates) = {
let mut ex = try_fail!(TestExt::new(
&mut state,
&info,
&machine,
0,
OriginInfo::from(¶ms),
&mut substate,
OutputPolicy::Return(BytesRef::Flexible(&mut output), None),
params.address.clone(),
&mut tracer,
&mut vm_tracer,
));
let mut evm = vm_factory.create(params.gas);
let res = evm.exec(params, &mut ex);
// a return in finalize will not alter callcreates
let callcreates = ex.callcreates.clone();
(res.finalize(ex), callcreates)
};
let log_hash = {
let mut rlp = RlpStream::new_list(substate.logs.len());
for l in &substate.logs {
rlp.append(l);
}
keccak(&rlp.drain())
};
match res {
Err(_) => fail_unless(out_of_gas, "didn't expect to run out of gas."),
Ok(res) => {
fail_unless(!out_of_gas, "expected to run out of gas.");
fail_unless(Some(res.gas_left) == vm.gas_left.map(Into::into), "gas_left is incorrect");
let vm_output: Option<Vec<u8>> = vm.output.map(Into::into);
fail_unless(Some(output) == vm_output, "output is incorrect");
fail_unless(Some(log_hash) == vm.logs.map(|h| h.0), "logs are incorrect | extcodesize | identifier_name |
executive.rs | Address, ReturnData,
};
use externalities::*;
use tests::helpers::*;
use ethjson;
use trace::{Tracer, NoopTracer};
use trace::{VMTracer, NoopVMTracer};
use bytes::{Bytes, BytesRef};
use trie;
use rlp::RlpStream;
use hash::keccak;
use machine::EthereumMachine as Machine;
#[derive(Debug, PartialEq, Clone)]
struct CallCreate {
data: Bytes,
destination: Option<Address>,
gas_limit: U256,
value: U256
}
impl From<ethjson::vm::Call> for CallCreate {
fn from(c: ethjson::vm::Call) -> Self {
let dst: Option<ethjson::hash::Address> = c.destination.into();
CallCreate {
data: c.data.into(),
destination: dst.map(Into::into),
gas_limit: c.gas_limit.into(),
value: c.value.into()
}
}
}
/// Tiny wrapper around executive externalities.
/// Stores callcreates.
struct TestExt<'a, T: 'a, V: 'a, B: 'a>
where T: Tracer, V: VMTracer, B: StateBackend
{
ext: Externalities<'a, T, V, B>,
callcreates: Vec<CallCreate>,
nonce: U256,
sender: Address,
}
impl<'a, T: 'a, V: 'a, B: 'a> TestExt<'a, T, V, B>
where T: Tracer, V: VMTracer, B: StateBackend,
{
fn new(
state: &'a mut State<B>,
info: &'a EnvInfo,
machine: &'a Machine,
depth: usize,
origin_info: OriginInfo,
substate: &'a mut Substate,
output: OutputPolicy<'a, 'a>,
address: Address,
tracer: &'a mut T,
vm_tracer: &'a mut V,
) -> trie::Result<Self> {
let static_call = false;
Ok(TestExt {
nonce: state.nonce(&address)?,
ext: Externalities::new(state, info, machine, depth, origin_info, substate, output, tracer, vm_tracer, static_call),
callcreates: vec![],
sender: address,
})
}
}
impl<'a, T: 'a, V: 'a, B: 'a> Ext for TestExt<'a, T, V, B>
where T: Tracer, V: VMTracer, B: StateBackend
{
fn storage_at(&self, key: &H256) -> vm::Result<H256> {
self.ext.storage_at(key)
}
fn set_storage(&mut self, key: H256, value: H256) -> vm::Result<()> {
self.ext.set_storage(key, value)
}
fn exists(&self, address: &Address) -> vm::Result<bool> {
self.ext.exists(address)
}
fn exists_and_not_null(&self, address: &Address) -> vm::Result<bool> {
self.ext.exists_and_not_null(address)
}
fn balance(&self, address: &Address) -> vm::Result<U256> {
self.ext.balance(address)
}
fn origin_balance(&self) -> vm::Result<U256> {
self.ext.origin_balance()
}
fn blockhash(&mut self, number: &U256) -> H256 {
self.ext.blockhash(number)
}
fn create(&mut self, gas: &U256, value: &U256, code: &[u8], address: CreateContractAddress) -> ContractCreateResult {
self.callcreates.push(CallCreate {
data: code.to_vec(),
destination: None,
gas_limit: *gas,
value: *value
});
let contract_address = contract_address(address, &self.sender, &self.nonce, &code).0;
ContractCreateResult::Created(contract_address, *gas)
}
fn call(&mut self,
gas: &U256,
_sender_address: &Address,
receive_address: &Address,
value: Option<U256>,
data: &[u8],
_code_address: &Address,
_output: &mut [u8],
_call_type: CallType
) -> MessageCallResult {
self.callcreates.push(CallCreate {
data: data.to_vec(),
destination: Some(receive_address.clone()),
gas_limit: *gas,
value: value.unwrap()
});
MessageCallResult::Success(*gas, ReturnData::empty())
}
fn extcode(&self, address: &Address) -> vm::Result<Arc<Bytes>> {
self.ext.extcode(address)
}
fn extcodesize(&self, address: &Address) -> vm::Result<usize> {
self.ext.extcodesize(address)
}
fn log(&mut self, topics: Vec<H256>, data: &[u8]) -> vm::Result<()> {
self.ext.log(topics, data)
}
fn ret(self, gas: &U256, data: &ReturnData, apply_state: bool) -> Result<U256, vm::Error> {
self.ext.ret(gas, data, apply_state)
}
fn suicide(&mut self, refund_address: &Address) -> vm::Result<()> {
self.ext.suicide(refund_address)
}
fn schedule(&self) -> &Schedule |
fn env_info(&self) -> &EnvInfo {
self.ext.env_info()
}
fn depth(&self) -> usize {
0
}
fn is_static(&self) -> bool {
false
}
fn inc_sstore_clears(&mut self) {
self.ext.inc_sstore_clears()
}
}
fn do_json_test(json_data: &[u8]) -> Vec<String> {
let vms = VMType::all();
vms
.iter()
.flat_map(|vm| do_json_test_for(vm, json_data))
.collect()
}
fn do_json_test_for(vm_type: &VMType, json_data: &[u8]) -> Vec<String> {
let tests = ethjson::vm::Test::load(json_data).unwrap();
let mut failed = Vec::new();
for (name, vm) in tests.into_iter() {
println!("name: {:?}", name);
let mut fail = false;
let mut fail_unless = |cond: bool, s: &str | if !cond && !fail {
failed.push(format!("[{}] {}: {}", vm_type, name, s));
fail = true
};
macro_rules! try_fail {
($e: expr) => {
match $e {
Ok(x) => x,
Err(e) => {
let msg = format!("Internal error: {}", e);
fail_unless(false, &msg);
continue
}
}
}
}
let out_of_gas = vm.out_of_gas();
let mut state = get_temp_state();
state.populate_from(From::from(vm.pre_state.clone()));
let info = From::from(vm.env);
let machine = {
let mut machine = ::ethereum::new_frontier_test_machine();
machine.set_schedule_creation_rules(Box::new(move |s, _| s.max_depth = 1));
machine
};
let params = ActionParams::from(vm.transaction);
let mut substate = Substate::new();
let mut tracer = NoopTracer;
let mut vm_tracer = NoopVMTracer;
let mut output = vec![];
let vm_factory = state.vm_factory();
// execute
let (res, callcreates) = {
let mut ex = try_fail!(TestExt::new(
&mut state,
&info,
&machine,
0,
OriginInfo::from(¶ms),
&mut substate,
OutputPolicy::Return(BytesRef::Flexible(&mut output), None),
params.address.clone(),
&mut tracer,
&mut vm_tracer,
));
let mut evm = vm_factory.create(params.gas);
let res = evm.exec(params, &mut ex);
// a return in finalize will not alter callcreates
let callcreates = ex.callcreates.clone();
(res.finalize(ex), callcreates)
};
let log_hash = {
let mut rlp = RlpStream::new_list(substate.logs.len());
for l in &substate.logs {
rlp.append(l);
}
keccak(&rlp.drain())
};
match res {
Err(_) => fail_unless(out_of_gas, "didn't expect to run out of gas."),
Ok(res) => {
fail_unless(!out_of_gas, "expected to run out of gas.");
fail_unless(Some(res.gas_left) == vm.gas_left.map(Into::into), "gas_left is incorrect");
let vm_output: Option<Vec<u8>> = vm.output.map(Into::into);
fail_unless(Some(output) == vm_output, "output is incorrect");
fail_unless(Some(log_hash) == vm.logs.map(|h| h.0), "logs are | {
self.ext.schedule()
} | identifier_body |
preloader.js | 'use strict';
angular.module('mpApp')
.factory(
'preloader',
function($q, $rootScope) {
// I manage the preloading of image objects. Accepts an array of image URLs.
function Preloader(imageLocations) {
// I am the image SRC values to preload.
this.imageLocations = imageLocations;
// As the images load, we'll need to keep track of the load/error
// counts when announing the progress on the loading.
this.imageCount = this.imageLocations.length;
this.loadCount = 0;
this.errorCount = 0;
// I am the possible states that the preloader can be in.
this.states = {
PENDING: 1,
LOADING: 2,
RESOLVED: 3,
REJECTED: 4
};
// I keep track of the current state of the preloader.
this.state = this.states.PENDING;
// When loading the images, a promise will be returned to indicate
// when the loading has completed (and / or progressed).
this.deferred = $q.defer();
this.promise = this.deferred.promise;
}
// ---
// STATIC METHODS.
// ---
// I reload the given images [Array] and return a promise. The promise
// will be resolved with the array of image locations. 111111
Preloader.preloadImages = function(imageLocations) {
var preloader = new Preloader(imageLocations);
return (preloader.load());
};
// ---
// INSTANCE METHODS.
// ---
Preloader.prototype = {
// Best practice for "instnceof" operator.
constructor: Preloader,
// ---
// PUBLIC METHODS.
// ---
// I determine if the preloader has started loading images yet.
isInitiated: function isInitiated() {
return (this.state !== this.states.PENDING);
},
// I determine if the preloader has failed to load all of the images.
isRejected: function isRejected() {
return (this.state === this.states.REJECTED);
},
// I determine if the preloader has successfully loaded all of the images.
isResolved: function isResolved() {
return (this.state === this.states.RESOLVED);
},
// I initiate the preload of the images. Returns a promise. 222
load: function load() {
// If the images are already loading, return the existing promise.
if (this.isInitiated()) |
this.state = this.states.LOADING;
for (var i = 0; i < this.imageCount; i++) {
this.loadImageLocation(this.imageLocations[i]);
}
// Return the deferred promise for the load event.
return (this.promise);
},
// ---
// PRIVATE METHODS.
// ---
// I handle the load-failure of the given image location.
handleImageError: function handleImageError(imageLocation) {
this.errorCount++;
// If the preload action has already failed, ignore further action.
if (this.isRejected()) {
return;
}
this.state = this.states.REJECTED;
this.deferred.reject(imageLocation);
},
// I handle the load-success of the given image location.
handleImageLoad: function handleImageLoad(imageLocation) {
this.loadCount++;
// If the preload action has already failed, ignore further action.
if (this.isRejected()) {
return;
}
// Notify the progress of the overall deferred. This is different
// than Resolving the deferred - you can call notify many times
// before the ultimate resolution (or rejection) of the deferred.
this.deferred.notify({
percent: Math.ceil(this.loadCount / this.imageCount * 100),
imageLocation: imageLocation
});
// If all of the images have loaded, we can resolve the deferred
// value that we returned to the calling context.
if (this.loadCount === this.imageCount) {
this.state = this.states.RESOLVED;
this.deferred.resolve(this.imageLocations);
}
},
// I load the given image location and then wire the load / error
// events back into the preloader instance.
// --
// NOTE: The load/error events trigger a $digest. 333
loadImageLocation: function loadImageLocation(imageLocation) {
var preloader = this;
// When it comes to creating the image object, it is critical that
// we bind the event handlers BEFORE we actually set the image
// source. Failure to do so will prevent the events from proper
// triggering in some browsers.
var image = angular.element(new Image())
.bind('load', function(event) {
// Since the load event is asynchronous, we have to
// tell AngularJS that something changed.
$rootScope.$apply(
function() {
preloader.handleImageLoad(event.target.src);
// Clean up object reference to help with the
// garbage collection in the closure.
preloader = image = event = null;
}
);
})
.bind('error', function(event) {
// Since the load event is asynchronous, we have to
// tell AngularJS that something changed.
$rootScope.$apply(
function() {
preloader.handleImageError(event.target.src);
// Clean up object reference to help with the
// garbage collection in the closure.
preloader = image = event = null;
}
);
})
.attr('src', imageLocation);
}
};
// Return the factory instance.
return (Preloader);
}
);
| {
return (this.promise);
} | conditional_block |
preloader.js | 'use strict';
angular.module('mpApp')
.factory(
'preloader',
function($q, $rootScope) {
// I manage the preloading of image objects. Accepts an array of image URLs.
function Preloader(imageLocations) {
// I am the image SRC values to preload.
this.imageLocations = imageLocations;
// As the images load, we'll need to keep track of the load/error
// counts when announing the progress on the loading.
this.imageCount = this.imageLocations.length;
this.loadCount = 0;
this.errorCount = 0;
// I am the possible states that the preloader can be in.
this.states = {
PENDING: 1,
LOADING: 2,
RESOLVED: 3,
REJECTED: 4
};
// I keep track of the current state of the preloader.
this.state = this.states.PENDING;
// When loading the images, a promise will be returned to indicate
// when the loading has completed (and / or progressed).
this.deferred = $q.defer();
this.promise = this.deferred.promise;
}
// ---
// STATIC METHODS.
// ---
// I reload the given images [Array] and return a promise. The promise
// will be resolved with the array of image locations. 111111
Preloader.preloadImages = function(imageLocations) {
var preloader = new Preloader(imageLocations);
return (preloader.load());
};
// ---
// INSTANCE METHODS.
// ---
Preloader.prototype = {
// Best practice for "instnceof" operator.
constructor: Preloader,
// ---
// PUBLIC METHODS.
// ---
// I determine if the preloader has started loading images yet.
isInitiated: function isInitiated() {
return (this.state !== this.states.PENDING);
},
// I determine if the preloader has failed to load all of the images.
isRejected: function isRejected() {
return (this.state === this.states.REJECTED);
},
// I determine if the preloader has successfully loaded all of the images.
isResolved: function isResolved() {
return (this.state === this.states.RESOLVED);
},
// I initiate the preload of the images. Returns a promise. 222
load: function load() {
// If the images are already loading, return the existing promise.
if (this.isInitiated()) {
return (this.promise);
}
this.state = this.states.LOADING;
for (var i = 0; i < this.imageCount; i++) {
this.loadImageLocation(this.imageLocations[i]);
}
// Return the deferred promise for the load event.
return (this.promise);
},
// ---
// PRIVATE METHODS.
// ---
// I handle the load-failure of the given image location.
handleImageError: function handleImageError(imageLocation) {
this.errorCount++;
// If the preload action has already failed, ignore further action.
if (this.isRejected()) {
return;
}
this.state = this.states.REJECTED;
this.deferred.reject(imageLocation);
},
// I handle the load-success of the given image location.
handleImageLoad: function handleImageLoad(imageLocation) {
this.loadCount++;
// If the preload action has already failed, ignore further action.
if (this.isRejected()) {
return;
}
// Notify the progress of the overall deferred. This is different
// than Resolving the deferred - you can call notify many times
// before the ultimate resolution (or rejection) of the deferred.
this.deferred.notify({
percent: Math.ceil(this.loadCount / this.imageCount * 100),
imageLocation: imageLocation
});
// If all of the images have loaded, we can resolve the deferred
// value that we returned to the calling context.
if (this.loadCount === this.imageCount) {
this.state = this.states.RESOLVED;
this.deferred.resolve(this.imageLocations);
}
},
// I load the given image location and then wire the load / error
// events back into the preloader instance.
// --
// NOTE: The load/error events trigger a $digest. 333
loadImageLocation: function loadImageLocation(imageLocation) {
var preloader = this;
// When it comes to creating the image object, it is critical that
// we bind the event handlers BEFORE we actually set the image
// source. Failure to do so will prevent the events from proper
// triggering in some browsers.
var image = angular.element(new Image())
.bind('load', function(event) {
// Since the load event is asynchronous, we have to
// tell AngularJS that something changed.
$rootScope.$apply(
function() {
preloader.handleImageLoad(event.target.src);
// Clean up object reference to help with the
// garbage collection in the closure.
preloader = image = event = null;
}
);
})
.bind('error', function(event) {
// Since the load event is asynchronous, we have to
// tell AngularJS that something changed.
$rootScope.$apply(
function() {
preloader.handleImageError(event.target.src);
// Clean up object reference to help with the
// garbage collection in the closure.
preloader = image = event = null;
}
);
})
.attr('src', imageLocation); |
}
};
// Return the factory instance.
return (Preloader);
}
); | random_line_split |
|
preloader.js | 'use strict';
angular.module('mpApp')
.factory(
'preloader',
function($q, $rootScope) {
// I manage the preloading of image objects. Accepts an array of image URLs.
function Preloader(imageLocations) | this.state = this.states.PENDING;
// When loading the images, a promise will be returned to indicate
// when the loading has completed (and / or progressed).
this.deferred = $q.defer();
this.promise = this.deferred.promise;
}
// ---
// STATIC METHODS.
// ---
// I reload the given images [Array] and return a promise. The promise
// will be resolved with the array of image locations. 111111
Preloader.preloadImages = function(imageLocations) {
var preloader = new Preloader(imageLocations);
return (preloader.load());
};
// ---
// INSTANCE METHODS.
// ---
Preloader.prototype = {
// Best practice for "instnceof" operator.
constructor: Preloader,
// ---
// PUBLIC METHODS.
// ---
// I determine if the preloader has started loading images yet.
isInitiated: function isInitiated() {
return (this.state !== this.states.PENDING);
},
// I determine if the preloader has failed to load all of the images.
isRejected: function isRejected() {
return (this.state === this.states.REJECTED);
},
// I determine if the preloader has successfully loaded all of the images.
isResolved: function isResolved() {
return (this.state === this.states.RESOLVED);
},
// I initiate the preload of the images. Returns a promise. 222
load: function load() {
// If the images are already loading, return the existing promise.
if (this.isInitiated()) {
return (this.promise);
}
this.state = this.states.LOADING;
for (var i = 0; i < this.imageCount; i++) {
this.loadImageLocation(this.imageLocations[i]);
}
// Return the deferred promise for the load event.
return (this.promise);
},
// ---
// PRIVATE METHODS.
// ---
// I handle the load-failure of the given image location.
handleImageError: function handleImageError(imageLocation) {
this.errorCount++;
// If the preload action has already failed, ignore further action.
if (this.isRejected()) {
return;
}
this.state = this.states.REJECTED;
this.deferred.reject(imageLocation);
},
// I handle the load-success of the given image location.
handleImageLoad: function handleImageLoad(imageLocation) {
this.loadCount++;
// If the preload action has already failed, ignore further action.
if (this.isRejected()) {
return;
}
// Notify the progress of the overall deferred. This is different
// than Resolving the deferred - you can call notify many times
// before the ultimate resolution (or rejection) of the deferred.
this.deferred.notify({
percent: Math.ceil(this.loadCount / this.imageCount * 100),
imageLocation: imageLocation
});
// If all of the images have loaded, we can resolve the deferred
// value that we returned to the calling context.
if (this.loadCount === this.imageCount) {
this.state = this.states.RESOLVED;
this.deferred.resolve(this.imageLocations);
}
},
// I load the given image location and then wire the load / error
// events back into the preloader instance.
// --
// NOTE: The load/error events trigger a $digest. 333
loadImageLocation: function loadImageLocation(imageLocation) {
var preloader = this;
// When it comes to creating the image object, it is critical that
// we bind the event handlers BEFORE we actually set the image
// source. Failure to do so will prevent the events from proper
// triggering in some browsers.
var image = angular.element(new Image())
.bind('load', function(event) {
// Since the load event is asynchronous, we have to
// tell AngularJS that something changed.
$rootScope.$apply(
function() {
preloader.handleImageLoad(event.target.src);
// Clean up object reference to help with the
// garbage collection in the closure.
preloader = image = event = null;
}
);
})
.bind('error', function(event) {
// Since the load event is asynchronous, we have to
// tell AngularJS that something changed.
$rootScope.$apply(
function() {
preloader.handleImageError(event.target.src);
// Clean up object reference to help with the
// garbage collection in the closure.
preloader = image = event = null;
}
);
})
.attr('src', imageLocation);
}
};
// Return the factory instance.
return (Preloader);
}
);
| {
// I am the image SRC values to preload.
this.imageLocations = imageLocations;
// As the images load, we'll need to keep track of the load/error
// counts when announing the progress on the loading.
this.imageCount = this.imageLocations.length;
this.loadCount = 0;
this.errorCount = 0;
// I am the possible states that the preloader can be in.
this.states = {
PENDING: 1,
LOADING: 2,
RESOLVED: 3,
REJECTED: 4
};
// I keep track of the current state of the preloader. | identifier_body |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.