file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
12.1k
| suffix
large_stringlengths 0
12k
| middle
large_stringlengths 0
7.51k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
ball.js | function Ball() |
Ball.prototype.init = function() {
this.appendBall();
}
Ball.prototype.getBall = function() {
return document.getElementById(this.ballId);
}
Ball.prototype.appendBall = function() {
var b = document.createElement('div');
b.setAttribute('id',this.ballId);
b.setAttribute('class',this.ballClass);
document.body.appendChild(b);
}
Ball.prototype.move = function() {
var that = this;
that.ballInterval = setInterval(function() {
switch (that.direction) {
case 1:
that.getBall().style.left = that.getBall().offsetLeft - that.ballSpeed + 'px';
that.getBall().style.top = that.getBall().offsetTop - that.ballSpeed + 'px';
break;
case 2:
that.getBall().style.left = that.getBall().offsetLeft + that.ballSpeed + 'px';
that.getBall().style.top = that.getBall().offsetTop - that.ballSpeed + 'px';
break;
case 3:
that.getBall().style.left = that.getBall().offsetLeft + that.ballSpeed + 'px';
that.getBall().style.top = that.getBall().offsetTop + that.ballSpeed + 'px';
break;
case 4:
that.getBall().style.left = that.getBall().offsetLeft - that.ballSpeed + 'px';
that.getBall().style.top = that.getBall().offsetTop + that.ballSpeed + 'px';
break;
}
},1);
}
Ball.prototype.stop = function() {
clearInterval(this.ballInterval);
} | {
// ball id
this.ballId = 'B_'+Math.floor(Math.random() * 100000000);
// at first - we pick the direction randomlly -
// there are 4 possible directions (1,2,3,4)
// 1: up left, 2: up right, 3: down right, 4: down left
this.direction = Math.floor(Math.random() * 2) + 1;
// ball speed
this.ballSpeed = 1;
// ball size
this.ballSize = 15; // in px
// ball interval
this.ballInterval = undefined;
// ball class name
this.ballClass = 'ball';
// init the ball
this.init();
} | identifier_body |
ball.js | function | () {
// ball id
this.ballId = 'B_'+Math.floor(Math.random() * 100000000);
// at first - we pick the direction randomlly -
// there are 4 possible directions (1,2,3,4)
// 1: up left, 2: up right, 3: down right, 4: down left
this.direction = Math.floor(Math.random() * 2) + 1;
// ball speed
this.ballSpeed = 1;
// ball size
this.ballSize = 15; // in px
// ball interval
this.ballInterval = undefined;
// ball class name
this.ballClass = 'ball';
// init the ball
this.init();
}
Ball.prototype.init = function() {
this.appendBall();
}
Ball.prototype.getBall = function() {
return document.getElementById(this.ballId);
}
Ball.prototype.appendBall = function() {
var b = document.createElement('div');
b.setAttribute('id',this.ballId);
b.setAttribute('class',this.ballClass);
document.body.appendChild(b);
}
Ball.prototype.move = function() {
var that = this;
that.ballInterval = setInterval(function() {
switch (that.direction) {
case 1:
that.getBall().style.left = that.getBall().offsetLeft - that.ballSpeed + 'px';
that.getBall().style.top = that.getBall().offsetTop - that.ballSpeed + 'px';
break;
case 2:
that.getBall().style.left = that.getBall().offsetLeft + that.ballSpeed + 'px';
that.getBall().style.top = that.getBall().offsetTop - that.ballSpeed + 'px';
break;
case 3:
that.getBall().style.left = that.getBall().offsetLeft + that.ballSpeed + 'px';
that.getBall().style.top = that.getBall().offsetTop + that.ballSpeed + 'px';
break;
case 4:
that.getBall().style.left = that.getBall().offsetLeft - that.ballSpeed + 'px';
that.getBall().style.top = that.getBall().offsetTop + that.ballSpeed + 'px';
break;
}
},1);
}
Ball.prototype.stop = function() {
clearInterval(this.ballInterval);
} | Ball | identifier_name |
suspicious_splitn.rs | #![warn(clippy::suspicious_splitn)]
fn main() {
let _ = "a,b,c".splitn(3, ',');
let _ = [0, 1, 2, 1, 3].splitn(3, |&x| x == 1);
let _ = "".splitn(0, ',');
let _ = [].splitn(0, |&x: &u32| x == 1);
let _ = "a,b".splitn(0, ','); | let _ = "a,b".rsplitn(0, ',');
let _ = "a,b".splitn(1, ',');
let _ = [0, 1, 2].splitn(0, |&x| x == 1);
let _ = [0, 1, 2].splitn_mut(0, |&x| x == 1);
let _ = [0, 1, 2].splitn(1, |&x| x == 1);
let _ = [0, 1, 2].rsplitn_mut(1, |&x| x == 1);
const X: usize = 0;
let _ = "a,b".splitn(X + 1, ',');
let _ = "a,b".splitn(X, ',');
} | random_line_split |
|
suspicious_splitn.rs | #![warn(clippy::suspicious_splitn)]
fn main() | {
let _ = "a,b,c".splitn(3, ',');
let _ = [0, 1, 2, 1, 3].splitn(3, |&x| x == 1);
let _ = "".splitn(0, ',');
let _ = [].splitn(0, |&x: &u32| x == 1);
let _ = "a,b".splitn(0, ',');
let _ = "a,b".rsplitn(0, ',');
let _ = "a,b".splitn(1, ',');
let _ = [0, 1, 2].splitn(0, |&x| x == 1);
let _ = [0, 1, 2].splitn_mut(0, |&x| x == 1);
let _ = [0, 1, 2].splitn(1, |&x| x == 1);
let _ = [0, 1, 2].rsplitn_mut(1, |&x| x == 1);
const X: usize = 0;
let _ = "a,b".splitn(X + 1, ',');
let _ = "a,b".splitn(X, ',');
} | identifier_body |
|
suspicious_splitn.rs | #![warn(clippy::suspicious_splitn)]
fn | () {
let _ = "a,b,c".splitn(3, ',');
let _ = [0, 1, 2, 1, 3].splitn(3, |&x| x == 1);
let _ = "".splitn(0, ',');
let _ = [].splitn(0, |&x: &u32| x == 1);
let _ = "a,b".splitn(0, ',');
let _ = "a,b".rsplitn(0, ',');
let _ = "a,b".splitn(1, ',');
let _ = [0, 1, 2].splitn(0, |&x| x == 1);
let _ = [0, 1, 2].splitn_mut(0, |&x| x == 1);
let _ = [0, 1, 2].splitn(1, |&x| x == 1);
let _ = [0, 1, 2].rsplitn_mut(1, |&x| x == 1);
const X: usize = 0;
let _ = "a,b".splitn(X + 1, ',');
let _ = "a,b".splitn(X, ',');
}
| main | identifier_name |
lib.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! This module contains shared types and messages for use by devtools/script.
//! The traits are here instead of in script so that the devtools crate can be
//! modified independently of the rest of Servo.
#![crate_name = "style_traits"]
#![crate_type = "rlib"]
#![deny(unsafe_code, missing_docs)]
#![cfg_attr(feature = "servo", feature(plugin))]
extern crate app_units;
#[macro_use]
extern crate cssparser;
extern crate euclid;
#[cfg(feature = "servo")] extern crate heapsize;
#[cfg(feature = "servo")] #[macro_use] extern crate heapsize_derive;
extern crate rustc_serialize;
#[cfg(feature = "servo")] #[macro_use] extern crate serde_derive;
/// Opaque type stored in type-unsafe work queues for parallel layout.
/// Must be transmutable to and from `TNode`.
pub type UnsafeNode = (usize, usize);
/// One CSS "px" in the coordinate system of the "initial viewport":
/// http://www.w3.org/TR/css-device-adapt/#initial-viewport | /// `ViewportPx` is equal to `DeviceIndependentPixel` times a "page zoom" factor controlled by the user. This is
/// the desktop-style "full page" zoom that enlarges content but then reflows the layout viewport
/// so it still exactly fits the visible area.
///
/// At the default zoom level of 100%, one `PagePx` is equal to one `DeviceIndependentPixel`. However, if the
/// document is zoomed in or out then this scale may be larger or smaller.
#[derive(Clone, Copy, Debug)]
pub enum ViewportPx {}
/// One CSS "px" in the root coordinate system for the content document.
///
/// `PagePx` is equal to `ViewportPx` multiplied by a "viewport zoom" factor controlled by the user.
/// This is the mobile-style "pinch zoom" that enlarges content without reflowing it. When the
/// viewport zoom is not equal to 1.0, then the layout viewport is no longer the same physical size
/// as the viewable area.
#[derive(Clone, Copy, Debug)]
pub enum PagePx {}
// In summary, the hierarchy of pixel units and the factors to convert from one to the next:
//
// DevicePixel
// / hidpi_ratio => DeviceIndependentPixel
// / desktop_zoom => ViewportPx
// / pinch_zoom => PagePx
pub mod cursor;
#[macro_use]
pub mod values;
pub mod viewport;
pub use values::{ToCss, OneOrMoreCommaSeparated}; | /// | random_line_split |
lib.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! This module contains shared types and messages for use by devtools/script.
//! The traits are here instead of in script so that the devtools crate can be
//! modified independently of the rest of Servo.
#![crate_name = "style_traits"]
#![crate_type = "rlib"]
#![deny(unsafe_code, missing_docs)]
#![cfg_attr(feature = "servo", feature(plugin))]
extern crate app_units;
#[macro_use]
extern crate cssparser;
extern crate euclid;
#[cfg(feature = "servo")] extern crate heapsize;
#[cfg(feature = "servo")] #[macro_use] extern crate heapsize_derive;
extern crate rustc_serialize;
#[cfg(feature = "servo")] #[macro_use] extern crate serde_derive;
/// Opaque type stored in type-unsafe work queues for parallel layout.
/// Must be transmutable to and from `TNode`.
pub type UnsafeNode = (usize, usize);
/// One CSS "px" in the coordinate system of the "initial viewport":
/// http://www.w3.org/TR/css-device-adapt/#initial-viewport
///
/// `ViewportPx` is equal to `DeviceIndependentPixel` times a "page zoom" factor controlled by the user. This is
/// the desktop-style "full page" zoom that enlarges content but then reflows the layout viewport
/// so it still exactly fits the visible area.
///
/// At the default zoom level of 100%, one `PagePx` is equal to one `DeviceIndependentPixel`. However, if the
/// document is zoomed in or out then this scale may be larger or smaller.
#[derive(Clone, Copy, Debug)]
pub enum ViewportPx {}
/// One CSS "px" in the root coordinate system for the content document.
///
/// `PagePx` is equal to `ViewportPx` multiplied by a "viewport zoom" factor controlled by the user.
/// This is the mobile-style "pinch zoom" that enlarges content without reflowing it. When the
/// viewport zoom is not equal to 1.0, then the layout viewport is no longer the same physical size
/// as the viewable area.
#[derive(Clone, Copy, Debug)]
pub enum | {}
// In summary, the hierarchy of pixel units and the factors to convert from one to the next:
//
// DevicePixel
// / hidpi_ratio => DeviceIndependentPixel
// / desktop_zoom => ViewportPx
// / pinch_zoom => PagePx
pub mod cursor;
#[macro_use]
pub mod values;
pub mod viewport;
pub use values::{ToCss, OneOrMoreCommaSeparated};
| PagePx | identifier_name |
script.js | window.mozRequestAnimationFrame ||
window.oRequestAnimationFrame ||
window.msRequestAnimationFrame ||
function(/* function */ callback, /* DOMElement */ element){
window.setTimeout(callback, 1000 / 60);
};
})();
// example code from mr doob : http://mrdoob.com/lab/javascript/requestanimationframe/
//var canvas = document.getElementById("myCanvas");
//var context = canvas.getContext("2d");
var canvas, context, toggle;
var y= 220;
var x= 284;
var y2=-10;
var x2= 10;
var y3=-10;
var x3= 400;
var mid = 128;
var dirX = 1;
var dirY = 1;
var destX ;
var destY ;
var i;
var state ;
var inbounds='true';
var status = -1; // -1: stopped , 0 In play
var imageObj = new Image();
var imageObj2 = new Image();
var imageObj3 = new Image();
var background_obj= new Image();
background_obj.src = "deep-space.jpg";
imageObj.src = "spshipsprite.png";
imageObj2.src = "spacestation.png";
imageObj3.src = "blueship4.png";
var jump = 'rest';
var backg_x = 0;
var backg_y = 0;
var floating =false;
var degrees = 0;
var str;
var name;
//init();
var dir = 1;
var monster = {};
var origin = {};
// Bullet image
var bulletReady = false;
var bulletImage = new Image();
bulletImage.onload = function () {
//bulletReady = true;
};
bulletImage.src = "images/bullet.png";
var bullet = {
speed: 256 // movement in pixels per second
};
//function init() {
canvas = document.createElement( 'canvas' );
canvas.width = 568;
canvas.height = 400;
context = canvas.getContext( '2d' );
//context.font = "40pt Calibri";
//context.fillStyle = "white";
// align text horizontally center
context.textAlign = "center";
// align text vertically center
context.textBaseline = "middle";
//context.font = "12pt Calibri";
//canvas.width = 8248;
context.drawImage(background_obj, backg_x, backg_y);
//imageData = context.getImageData(0,0,8248,400); //fnord
//var x = document;
// canvas.width = 568;
$( "#container" ).append( canvas );
//}
animate();
// shoot addition
var shoot = function(modifier){
if (dir==1){
bullet.y -= bullet.speed * modifier * 4;
}
if (dir==2){
bullet.y += bullet.speed * modifier * 4;
}
if (dir==3){
bullet.x -= bullet.speed * modifier * 4;
}
if (dir==4){
bullet.x += bullet.speed * modifier * 4;
}
// Are they touching2?
if (
bullet.x <= (monster.x + 32)
&& monster.x <= (bullet.x + 32)
&& hero.y <= (bullet.y + 32)
&& monster.y <= (bullet.y + 32)
) {
++monstersShot;
reset();
}
//distance = square root sqrt of ( (x2-x1)^2 + (y2-y1)^2)
var distance = Math.sqrt( Math.pow(bullet.x-origin.x, 2) + Math.pow(bullet.y - origin.y,2) );
if (distance > 200)
{
bulletReady = false;
first = true
}
}
if (bulletReady) {
context.drawImage(bulletImage, bullet.x, bullet.y);
}
// shoot addition
function shoot()
{
if (dir==1){
bullet.y -= bullet.speed * 4;
}
if (dir==2){
bullet.y += bullet.speed * 4;
}
if (dir==3){
bullet.x -= bullet.speed * 4;
}
if (dir==4){
bullet.x += bullet.speed * 4;
}
//distance = square root sqrt of ( (x2-x1)^2 + (y2-y1)^2)
var distance = Math.sqrt( Math.pow(bullet.x - x, 2) + Math.pow(bullet.y - y,2) );
if (distance > 200)
{
bulletReady = false;
first = true
}
}
function animate() {
update();
requestAnimFrame( animate );
shoot();
draw();
}
function update() {
y2++;
x2++;
y3++;
x3--;
if (y2==400)
{
y2=0;
}
if (x2==598)
{
x2=0;
}
if (y3==400)
{
y3=0;
}
if (x3==0)
{
x3=598;
}
}
function draw() {
context.fillText( state + ":" , canvas.width / 2 , canvas.height / 2 );
$(document).keyup(function(e)
{
if (e.keyCode == 37)
{
state= "stop";
dirX=1;
dir=3;
}
if (e.keyCode == 39)
{
state= "stop";
dirX=1;
dir=4;
}
if (e.keyCode == 38)
{
jump = 'descend';
}
});
$(document).keydown(function(e) {
//alert (e.keyCode);
//if space start/stop gameloop
//var time = new Date().getTime() * 0.002;
if(e.keyCode == 32)
{
status = 0 - status;
bulletReady = true;
bullet.x = x;
bullet.y = y;
}
// if (jump != 'descend') | {
jump = 'ascend';
}
// }
if (e.keyCode == 40){
// down
}
if (e.keyCode == 37){
state = 'left';
}
if (e.keyCode == 39){
state = 'right';
}
});
///////////////////////////////////////////////////////////////////////////////
if (state == 'left')
{
//x = x-(1 * dirX);
// backg_x = backg_x + 1 ;
degrees = degrees - 1;
// context.setTransform(1,0.5,-0.5,10,10);
}
if (state == 'right')
{
//x = x + (1 * dirX);
// backg_x = backg_x - 1 ;
degrees = degrees +1 ;
// context.setTransform(1,0.5,-0.5,1,10,10);
}
if (jump == 'ascend')
{
}
if (jump == 'descend')
{
y = y - 1;
if (y == 0)
{
jump = 'rest';
}
}
if (jump == 'rest')
{
y = 0;
dirY = -1;
}
if (inbounds=='true')
{
// destX = (canvas.width / 2 ) + x;
// destY = canvas.height - 30 - y ;// 60 pixels offset from centre
}//end if inbounds
if (destX > canvas.width || destX < 0)
{
// dirX =-dirX;
}
if (destY > canvas.width || destY < 0)
{
// dirY =-dirY;
}
//canvas.width = 8248;
context.clearRect(0,0 , canvas.width, canvas.height);
context.drawImage(background_obj, backg_x, backg_y);
context.save();
context.beginPath();
context.translate( 290,210 );
// rotate the rect
context.rotate(degrees*Math.PI/180);
context.drawImage(imageObj, -37, -50);
context.restore();
context.drawImage(imageObj2, x2, y2);
context.drawImage(imageObj3, x3, y3);
str = "width=" + imageData.width + " height=" + imageData.height
+ " red :" + red + " green :" + green + " blue :" + blue
+ " destX :" + parseInt(0-backg_x) + " destY :" +destY
+ | // {
if (e.keyCode == 38 ) | random_line_split |
script.js |
window.mozRequestAnimationFrame ||
window.oRequestAnimationFrame ||
window.msRequestAnimationFrame ||
function(/* function */ callback, /* DOMElement */ element){
window.setTimeout(callback, 1000 / 60);
};
})();
// example code from mr doob : http://mrdoob.com/lab/javascript/requestanimationframe/
//var canvas = document.getElementById("myCanvas");
//var context = canvas.getContext("2d");
var canvas, context, toggle;
var y= 220;
var x= 284;
var y2=-10;
var x2= 10;
var y3=-10;
var x3= 400;
var mid = 128;
var dirX = 1;
var dirY = 1;
var destX ;
var destY ;
var i;
var state ;
var inbounds='true';
var status = -1; // -1: stopped , 0 In play
var imageObj = new Image();
var imageObj2 = new Image();
var imageObj3 = new Image();
var background_obj= new Image();
background_obj.src = "deep-space.jpg";
imageObj.src = "spshipsprite.png";
imageObj2.src = "spacestation.png";
imageObj3.src = "blueship4.png";
var jump = 'rest';
var backg_x = 0;
var backg_y = 0;
var floating =false;
var degrees = 0;
var str;
var name;
//init();
var dir = 1;
var monster = {};
var origin = {};
// Bullet image
var bulletReady = false;
var bulletImage = new Image();
bulletImage.onload = function () {
//bulletReady = true;
};
bulletImage.src = "images/bullet.png";
var bullet = {
speed: 256 // movement in pixels per second
};
//function init() {
canvas = document.createElement( 'canvas' );
canvas.width = 568;
canvas.height = 400;
context = canvas.getContext( '2d' );
//context.font = "40pt Calibri";
//context.fillStyle = "white";
// align text horizontally center
context.textAlign = "center";
// align text vertically center
context.textBaseline = "middle";
//context.font = "12pt Calibri";
//canvas.width = 8248;
context.drawImage(background_obj, backg_x, backg_y);
//imageData = context.getImageData(0,0,8248,400); //fnord
//var x = document;
// canvas.width = 568;
$( "#container" ).append( canvas );
//}
animate();
// shoot addition
var shoot = function(modifier){
if (dir==1){
bullet.y -= bullet.speed * modifier * 4;
}
if (dir==2){
bullet.y += bullet.speed * modifier * 4;
}
if (dir==3){
bullet.x -= bullet.speed * modifier * 4;
}
if (dir==4){
bullet.x += bullet.speed * modifier * 4;
}
// Are they touching2?
if (
bullet.x <= (monster.x + 32)
&& monster.x <= (bullet.x + 32)
&& hero.y <= (bullet.y + 32)
&& monster.y <= (bullet.y + 32)
) {
++monstersShot;
reset();
}
//distance = square root sqrt of ( (x2-x1)^2 + (y2-y1)^2)
var distance = Math.sqrt( Math.pow(bullet.x-origin.x, 2) + Math.pow(bullet.y - origin.y,2) );
if (distance > 200)
{
bulletReady = false;
first = true
}
}
if (bulletReady) {
context.drawImage(bulletImage, bullet.x, bullet.y);
}
// shoot addition
function shoot()
{
if (dir==1){
bullet.y -= bullet.speed * 4;
}
if (dir==2){
bullet.y += bullet.speed * 4;
}
if (dir==3){
bullet.x -= bullet.speed * 4;
}
if (dir==4){
bullet.x += bullet.speed * 4;
}
//distance = square root sqrt of ( (x2-x1)^2 + (y2-y1)^2)
var distance = Math.sqrt( Math.pow(bullet.x - x, 2) + Math.pow(bullet.y - y,2) );
if (distance > 200)
{
bulletReady = false;
first = true
}
}
function animate() {
update();
requestAnimFrame( animate );
shoot();
draw();
}
function update() | y3=0;
}
if (x3==0)
{
x3=598;
}
}
function draw() {
context.fillText( state + ":" , canvas.width / 2 , canvas.height / 2 );
$(document).keyup(function(e)
{
if (e.keyCode == 37)
{
state= "stop";
dirX=1;
dir=3;
}
if (e.keyCode == 39)
{
state= "stop";
dirX=1;
dir=4;
}
if (e.keyCode == 38)
{
jump = 'descend';
}
});
$(document).keydown(function(e) {
//alert (e.keyCode);
//if space start/stop gameloop
//var time = new Date().getTime() * 0.002;
if(e.keyCode == 32)
{
status = 0 - status;
bulletReady = true;
bullet.x = x;
bullet.y = y;
}
// if (jump != 'descend')
// {
if (e.keyCode == 38 )
{
jump = 'ascend';
}
// }
if (e.keyCode == 40){
// down
}
if (e.keyCode == 37){
state = 'left';
}
if (e.keyCode == 39){
state = 'right';
}
});
///////////////////////////////////////////////////////////////////////////////
if (state == 'left')
{
//x = x-(1 * dirX);
// backg_x = backg_x + 1 ;
degrees = degrees - 1;
// context.setTransform(1,0.5,-0.5,10,10);
}
if (state == 'right')
{
//x = x + (1 * dirX);
// backg_x = backg_x - 1 ;
degrees = degrees +1 ;
// context.setTransform(1,0.5,-0.5,1,10,10);
}
if (jump == 'ascend')
{
}
if (jump == 'descend')
{
y = y - 1;
if (y == 0)
{
jump = 'rest';
}
}
if (jump == 'rest')
{
y = 0;
dirY = -1;
}
if (inbounds=='true')
{
// destX = (canvas.width / 2 ) + x;
// destY = canvas.height - 30 - y ;// 60 pixels offset from centre
}//end if inbounds
if (destX > canvas.width || destX < 0)
{
// dirX =-dirX;
}
if (destY > canvas.width || destY < 0)
{
// dirY =-dirY;
}
//canvas.width = 8248;
context.clearRect(0,0 , canvas.width, canvas.height);
context.drawImage(background_obj, backg_x, backg_y);
context.save();
context.beginPath();
context.translate( 290,210 );
// rotate the rect
context.rotate(degrees*Math.PI/180);
context.drawImage(imageObj, -37, -50);
context.restore();
context.drawImage(imageObj2, x2, y2);
context.drawImage(imageObj3, x3, y3);
str = "width=" + imageData.width + " height=" + imageData.height
+ " red :" + red + " green :" + green + " blue :" + blue
+ " destX :" + parseInt(0-backg_x) + " destY :" +destY
| {
y2++;
x2++;
y3++;
x3--;
if (y2==400)
{
y2=0;
}
if (x2==598)
{
x2=0;
}
if (y3==400)
{ | identifier_body |
script.js |
window.mozRequestAnimationFrame ||
window.oRequestAnimationFrame ||
window.msRequestAnimationFrame ||
function(/* function */ callback, /* DOMElement */ element){
window.setTimeout(callback, 1000 / 60);
};
})();
// example code from mr doob : http://mrdoob.com/lab/javascript/requestanimationframe/
//var canvas = document.getElementById("myCanvas");
//var context = canvas.getContext("2d");
var canvas, context, toggle;
var y= 220;
var x= 284;
var y2=-10;
var x2= 10;
var y3=-10;
var x3= 400;
var mid = 128;
var dirX = 1;
var dirY = 1;
var destX ;
var destY ;
var i;
var state ;
var inbounds='true';
var status = -1; // -1: stopped , 0 In play
var imageObj = new Image();
var imageObj2 = new Image();
var imageObj3 = new Image();
var background_obj= new Image();
background_obj.src = "deep-space.jpg";
imageObj.src = "spshipsprite.png";
imageObj2.src = "spacestation.png";
imageObj3.src = "blueship4.png";
var jump = 'rest';
var backg_x = 0;
var backg_y = 0;
var floating =false;
var degrees = 0;
var str;
var name;
//init();
var dir = 1;
var monster = {};
var origin = {};
// Bullet image
var bulletReady = false;
var bulletImage = new Image();
bulletImage.onload = function () {
//bulletReady = true;
};
bulletImage.src = "images/bullet.png";
var bullet = {
speed: 256 // movement in pixels per second
};
//function init() {
canvas = document.createElement( 'canvas' );
canvas.width = 568;
canvas.height = 400;
context = canvas.getContext( '2d' );
//context.font = "40pt Calibri";
//context.fillStyle = "white";
// align text horizontally center
context.textAlign = "center";
// align text vertically center
context.textBaseline = "middle";
//context.font = "12pt Calibri";
//canvas.width = 8248;
context.drawImage(background_obj, backg_x, backg_y);
//imageData = context.getImageData(0,0,8248,400); //fnord
//var x = document;
// canvas.width = 568;
$( "#container" ).append( canvas );
//}
animate();
// shoot addition
var shoot = function(modifier){
if (dir==1){
bullet.y -= bullet.speed * modifier * 4;
}
if (dir==2){
bullet.y += bullet.speed * modifier * 4;
}
if (dir==3){
bullet.x -= bullet.speed * modifier * 4;
}
if (dir==4){
bullet.x += bullet.speed * modifier * 4;
}
// Are they touching2?
if (
bullet.x <= (monster.x + 32)
&& monster.x <= (bullet.x + 32)
&& hero.y <= (bullet.y + 32)
&& monster.y <= (bullet.y + 32)
) {
++monstersShot;
reset();
}
//distance = square root sqrt of ( (x2-x1)^2 + (y2-y1)^2)
var distance = Math.sqrt( Math.pow(bullet.x-origin.x, 2) + Math.pow(bullet.y - origin.y,2) );
if (distance > 200)
{
bulletReady = false;
first = true
}
}
if (bulletReady) {
context.drawImage(bulletImage, bullet.x, bullet.y);
}
// shoot addition
function shoot()
{
if (dir==1){
bullet.y -= bullet.speed * 4;
}
if (dir==2){
bullet.y += bullet.speed * 4;
}
if (dir==3){
bullet.x -= bullet.speed * 4;
}
if (dir==4){
bullet.x += bullet.speed * 4;
}
//distance = square root sqrt of ( (x2-x1)^2 + (y2-y1)^2)
var distance = Math.sqrt( Math.pow(bullet.x - x, 2) + Math.pow(bullet.y - y,2) );
if (distance > 200)
{
bulletReady = false;
first = true
}
}
function animate() {
update();
requestAnimFrame( animate );
shoot();
draw();
}
function update() {
y2++;
x2++;
y3++;
x3--;
if (y2==400)
{
y2=0;
}
if (x2==598)
{
x2=0;
}
if (y3==400)
{
y3=0;
}
if (x3==0)
{
x3=598;
}
}
function | () {
context.fillText( state + ":" , canvas.width / 2 , canvas.height / 2 );
$(document).keyup(function(e)
{
if (e.keyCode == 37)
{
state= "stop";
dirX=1;
dir=3;
}
if (e.keyCode == 39)
{
state= "stop";
dirX=1;
dir=4;
}
if (e.keyCode == 38)
{
jump = 'descend';
}
});
$(document).keydown(function(e) {
//alert (e.keyCode);
//if space start/stop gameloop
//var time = new Date().getTime() * 0.002;
if(e.keyCode == 32)
{
status = 0 - status;
bulletReady = true;
bullet.x = x;
bullet.y = y;
}
// if (jump != 'descend')
// {
if (e.keyCode == 38 )
{
jump = 'ascend';
}
// }
if (e.keyCode == 40){
// down
}
if (e.keyCode == 37){
state = 'left';
}
if (e.keyCode == 39){
state = 'right';
}
});
///////////////////////////////////////////////////////////////////////////////
if (state == 'left')
{
//x = x-(1 * dirX);
// backg_x = backg_x + 1 ;
degrees = degrees - 1;
// context.setTransform(1,0.5,-0.5,10,10);
}
if (state == 'right')
{
//x = x + (1 * dirX);
// backg_x = backg_x - 1 ;
degrees = degrees +1 ;
// context.setTransform(1,0.5,-0.5,1,10,10);
}
if (jump == 'ascend')
{
}
if (jump == 'descend')
{
y = y - 1;
if (y == 0)
{
jump = 'rest';
}
}
if (jump == 'rest')
{
y = 0;
dirY = -1;
}
if (inbounds=='true')
{
// destX = (canvas.width / 2 ) + x;
// destY = canvas.height - 30 - y ;// 60 pixels offset from centre
}//end if inbounds
if (destX > canvas.width || destX < 0)
{
// dirX =-dirX;
}
if (destY > canvas.width || destY < 0)
{
// dirY =-dirY;
}
//canvas.width = 8248;
context.clearRect(0,0 , canvas.width, canvas.height);
context.drawImage(background_obj, backg_x, backg_y);
context.save();
context.beginPath();
context.translate( 290,210 );
// rotate the rect
context.rotate(degrees*Math.PI/180);
context.drawImage(imageObj, -37, -50);
context.restore();
context.drawImage(imageObj2, x2, y2);
context.drawImage(imageObj3, x3, y3);
str = "width=" + imageData.width + " height=" + imageData.height
+ " red :" + red + " green :" + green + " blue :" + blue
+ " destX :" + parseInt(0-backg_x) + " destY :" +destY
| draw | identifier_name |
script.js |
window.mozRequestAnimationFrame ||
window.oRequestAnimationFrame ||
window.msRequestAnimationFrame ||
function(/* function */ callback, /* DOMElement */ element){
window.setTimeout(callback, 1000 / 60);
};
})();
// example code from mr doob : http://mrdoob.com/lab/javascript/requestanimationframe/
//var canvas = document.getElementById("myCanvas");
//var context = canvas.getContext("2d");
var canvas, context, toggle;
var y= 220;
var x= 284;
var y2=-10;
var x2= 10;
var y3=-10;
var x3= 400;
var mid = 128;
var dirX = 1;
var dirY = 1;
var destX ;
var destY ;
var i;
var state ;
var inbounds='true';
var status = -1; // -1: stopped , 0 In play
var imageObj = new Image();
var imageObj2 = new Image();
var imageObj3 = new Image();
var background_obj= new Image();
background_obj.src = "deep-space.jpg";
imageObj.src = "spshipsprite.png";
imageObj2.src = "spacestation.png";
imageObj3.src = "blueship4.png";
var jump = 'rest';
var backg_x = 0;
var backg_y = 0;
var floating =false;
var degrees = 0;
var str;
var name;
//init();
var dir = 1;
var monster = {};
var origin = {};
// Bullet image
var bulletReady = false;
var bulletImage = new Image();
bulletImage.onload = function () {
//bulletReady = true;
};
bulletImage.src = "images/bullet.png";
var bullet = {
speed: 256 // movement in pixels per second
};
//function init() {
canvas = document.createElement( 'canvas' );
canvas.width = 568;
canvas.height = 400;
context = canvas.getContext( '2d' );
//context.font = "40pt Calibri";
//context.fillStyle = "white";
// align text horizontally center
context.textAlign = "center";
// align text vertically center
context.textBaseline = "middle";
//context.font = "12pt Calibri";
//canvas.width = 8248;
context.drawImage(background_obj, backg_x, backg_y);
//imageData = context.getImageData(0,0,8248,400); //fnord
//var x = document;
// canvas.width = 568;
$( "#container" ).append( canvas );
//}
animate();
// shoot addition
var shoot = function(modifier){
if (dir==1){
bullet.y -= bullet.speed * modifier * 4;
}
if (dir==2){
bullet.y += bullet.speed * modifier * 4;
}
if (dir==3){
bullet.x -= bullet.speed * modifier * 4;
}
if (dir==4){
bullet.x += bullet.speed * modifier * 4;
}
// Are they touching2?
if (
bullet.x <= (monster.x + 32)
&& monster.x <= (bullet.x + 32)
&& hero.y <= (bullet.y + 32)
&& monster.y <= (bullet.y + 32)
) {
++monstersShot;
reset();
}
//distance = square root sqrt of ( (x2-x1)^2 + (y2-y1)^2)
var distance = Math.sqrt( Math.pow(bullet.x-origin.x, 2) + Math.pow(bullet.y - origin.y,2) );
if (distance > 200)
{
bulletReady = false;
first = true
}
}
if (bulletReady) {
context.drawImage(bulletImage, bullet.x, bullet.y);
}
// shoot addition
function shoot()
{
if (dir==1){
bullet.y -= bullet.speed * 4;
}
if (dir==2){
bullet.y += bullet.speed * 4;
}
if (dir==3){
bullet.x -= bullet.speed * 4;
}
if (dir==4){
bullet.x += bullet.speed * 4;
}
//distance = square root sqrt of ( (x2-x1)^2 + (y2-y1)^2)
var distance = Math.sqrt( Math.pow(bullet.x - x, 2) + Math.pow(bullet.y - y,2) );
if (distance > 200)
{
bulletReady = false;
first = true
}
}
function animate() {
update();
requestAnimFrame( animate );
shoot();
draw();
}
function update() {
y2++;
x2++;
y3++;
x3--;
if (y2==400)
{
y2=0;
}
if (x2==598)
{
x2=0;
}
if (y3==400)
{
y3=0;
}
if (x3==0)
{
x3=598;
}
}
function draw() {
context.fillText( state + ":" , canvas.width / 2 , canvas.height / 2 );
$(document).keyup(function(e)
{
if (e.keyCode == 37)
{
state= "stop";
dirX=1;
dir=3;
}
if (e.keyCode == 39)
{
state= "stop";
dirX=1;
dir=4;
}
if (e.keyCode == 38)
{
jump = 'descend';
}
});
$(document).keydown(function(e) {
//alert (e.keyCode);
//if space start/stop gameloop
//var time = new Date().getTime() * 0.002;
if(e.keyCode == 32)
{
status = 0 - status;
bulletReady = true;
bullet.x = x;
bullet.y = y;
}
// if (jump != 'descend')
// {
if (e.keyCode == 38 )
{
jump = 'ascend';
}
// }
if (e.keyCode == 40){
// down
}
if (e.keyCode == 37){
state = 'left';
}
if (e.keyCode == 39){
state = 'right';
}
});
///////////////////////////////////////////////////////////////////////////////
if (state == 'left')
{
//x = x-(1 * dirX);
// backg_x = backg_x + 1 ;
degrees = degrees - 1;
// context.setTransform(1,0.5,-0.5,10,10);
}
if (state == 'right')
{
//x = x + (1 * dirX);
// backg_x = backg_x - 1 ;
degrees = degrees +1 ;
// context.setTransform(1,0.5,-0.5,1,10,10);
}
if (jump == 'ascend')
{
}
if (jump == 'descend')
{
y = y - 1;
if (y == 0)
{
jump = 'rest';
}
}
if (jump == 'rest')
{
y = 0;
dirY = -1;
}
if (inbounds=='true')
{
// destX = (canvas.width / 2 ) + x;
// destY = canvas.height - 30 - y ;// 60 pixels offset from centre
}//end if inbounds
if (destX > canvas.width || destX < 0)
|
if (destY > canvas.width || destY < 0)
{
// dirY =-dirY;
}
//canvas.width = 8248;
context.clearRect(0,0 , canvas.width, canvas.height);
context.drawImage(background_obj, backg_x, backg_y);
context.save();
context.beginPath();
context.translate( 290,210 );
// rotate the rect
context.rotate(degrees*Math.PI/180);
context.drawImage(imageObj, -37, -50);
context.restore();
context.drawImage(imageObj2, x2, y2);
context.drawImage(imageObj3, x3, y3);
str = "width=" + imageData.width + " height=" + imageData.height
+ " red :" + red + " green :" + green + " blue :" + blue
+ " destX :" + parseInt(0-backg_x) + " destY :" +destY
| {
// dirX =-dirX;
} | conditional_block |
hammer_gestures.ts | /**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {Inject, Injectable, OpaqueToken} from '@angular/core';
import {EventManagerPlugin} from './event_manager';
const EVENT_NAMES = {
// pan
'pan': true,
'panstart': true,
'panmove': true,
'panend': true,
'pancancel': true,
'panleft': true,
'panright': true,
'panup': true,
'pandown': true,
// pinch
'pinch': true,
'pinchstart': true,
'pinchmove': true,
'pinchend': true,
'pinchcancel': true,
'pinchin': true,
'pinchout': true,
// press
'press': true,
'pressup': true,
// rotate
'rotate': true,
'rotatestart': true,
'rotatemove': true,
'rotateend': true,
'rotatecancel': true,
// swipe
'swipe': true,
'swipeleft': true,
'swiperight': true,
'swipeup': true,
'swipedown': true,
// tap
'tap': true,
};
/**
* A DI token that you can use to provide{@link HammerGestureConfig} to Angular. Use it to configure
* Hammer gestures.
*
* @experimental
*/
export const HAMMER_GESTURE_CONFIG: OpaqueToken = new OpaqueToken('HammerGestureConfig');
export interface HammerInstance {
on(eventName: string, callback: Function): void;
off(eventName: string, callback: Function): void;
}
/**
* @experimental
*/
@Injectable()
export class HammerGestureConfig {
events: string[] = [];
overrides: {[key: string]: Object} = {};
buildHammer(element: HTMLElement): HammerInstance |
}
@Injectable()
export class HammerGesturesPlugin extends EventManagerPlugin {
constructor(@Inject(HAMMER_GESTURE_CONFIG) private _config: HammerGestureConfig) { super(); }
supports(eventName: string): boolean {
if (!EVENT_NAMES.hasOwnProperty(eventName.toLowerCase()) && !this.isCustomEvent(eventName)) {
return false;
}
if (!(window as any).Hammer) {
throw new Error(`Hammer.js is not loaded, can not bind ${eventName} event`);
}
return true;
}
addEventListener(element: HTMLElement, eventName: string, handler: Function): Function {
const zone = this.manager.getZone();
eventName = eventName.toLowerCase();
return zone.runOutsideAngular(() => {
// Creating the manager bind events, must be done outside of angular
const mc = this._config.buildHammer(element);
const callback = function(eventObj: HammerInput) {
zone.runGuarded(function() { handler(eventObj); });
};
mc.on(eventName, callback);
return () => mc.off(eventName, callback);
});
}
isCustomEvent(eventName: string): boolean { return this._config.events.indexOf(eventName) > -1; }
}
| {
const mc = new Hammer(element);
mc.get('pinch').set({enable: true});
mc.get('rotate').set({enable: true});
for (const eventName in this.overrides) {
mc.get(eventName).set(this.overrides[eventName]);
}
return mc;
} | identifier_body |
hammer_gestures.ts | /**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {Inject, Injectable, OpaqueToken} from '@angular/core';
import {EventManagerPlugin} from './event_manager';
const EVENT_NAMES = {
// pan
'pan': true,
'panstart': true,
'panmove': true,
'panend': true,
'pancancel': true,
'panleft': true,
'panright': true,
'panup': true,
'pandown': true,
// pinch
'pinch': true,
'pinchstart': true,
'pinchmove': true,
'pinchend': true,
'pinchcancel': true,
'pinchin': true,
'pinchout': true,
// press
'press': true,
'pressup': true,
// rotate
'rotate': true,
'rotatestart': true,
'rotatemove': true,
'rotateend': true,
'rotatecancel': true,
// swipe
'swipe': true,
'swipeleft': true,
'swiperight': true,
'swipeup': true,
'swipedown': true,
// tap
'tap': true,
};
/**
* A DI token that you can use to provide{@link HammerGestureConfig} to Angular. Use it to configure | export const HAMMER_GESTURE_CONFIG: OpaqueToken = new OpaqueToken('HammerGestureConfig');
export interface HammerInstance {
on(eventName: string, callback: Function): void;
off(eventName: string, callback: Function): void;
}
/**
* @experimental
*/
@Injectable()
export class HammerGestureConfig {
events: string[] = [];
overrides: {[key: string]: Object} = {};
buildHammer(element: HTMLElement): HammerInstance {
const mc = new Hammer(element);
mc.get('pinch').set({enable: true});
mc.get('rotate').set({enable: true});
for (const eventName in this.overrides) {
mc.get(eventName).set(this.overrides[eventName]);
}
return mc;
}
}
@Injectable()
export class HammerGesturesPlugin extends EventManagerPlugin {
constructor(@Inject(HAMMER_GESTURE_CONFIG) private _config: HammerGestureConfig) { super(); }
supports(eventName: string): boolean {
if (!EVENT_NAMES.hasOwnProperty(eventName.toLowerCase()) && !this.isCustomEvent(eventName)) {
return false;
}
if (!(window as any).Hammer) {
throw new Error(`Hammer.js is not loaded, can not bind ${eventName} event`);
}
return true;
}
addEventListener(element: HTMLElement, eventName: string, handler: Function): Function {
const zone = this.manager.getZone();
eventName = eventName.toLowerCase();
return zone.runOutsideAngular(() => {
// Creating the manager bind events, must be done outside of angular
const mc = this._config.buildHammer(element);
const callback = function(eventObj: HammerInput) {
zone.runGuarded(function() { handler(eventObj); });
};
mc.on(eventName, callback);
return () => mc.off(eventName, callback);
});
}
isCustomEvent(eventName: string): boolean { return this._config.events.indexOf(eventName) > -1; }
} | * Hammer gestures.
*
* @experimental
*/ | random_line_split |
hammer_gestures.ts | /**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {Inject, Injectable, OpaqueToken} from '@angular/core';
import {EventManagerPlugin} from './event_manager';
const EVENT_NAMES = {
// pan
'pan': true,
'panstart': true,
'panmove': true,
'panend': true,
'pancancel': true,
'panleft': true,
'panright': true,
'panup': true,
'pandown': true,
// pinch
'pinch': true,
'pinchstart': true,
'pinchmove': true,
'pinchend': true,
'pinchcancel': true,
'pinchin': true,
'pinchout': true,
// press
'press': true,
'pressup': true,
// rotate
'rotate': true,
'rotatestart': true,
'rotatemove': true,
'rotateend': true,
'rotatecancel': true,
// swipe
'swipe': true,
'swipeleft': true,
'swiperight': true,
'swipeup': true,
'swipedown': true,
// tap
'tap': true,
};
/**
* A DI token that you can use to provide{@link HammerGestureConfig} to Angular. Use it to configure
* Hammer gestures.
*
* @experimental
*/
export const HAMMER_GESTURE_CONFIG: OpaqueToken = new OpaqueToken('HammerGestureConfig');
export interface HammerInstance {
on(eventName: string, callback: Function): void;
off(eventName: string, callback: Function): void;
}
/**
* @experimental
*/
@Injectable()
export class HammerGestureConfig {
events: string[] = [];
overrides: {[key: string]: Object} = {};
buildHammer(element: HTMLElement): HammerInstance {
const mc = new Hammer(element);
mc.get('pinch').set({enable: true});
mc.get('rotate').set({enable: true});
for (const eventName in this.overrides) {
mc.get(eventName).set(this.overrides[eventName]);
}
return mc;
}
}
@Injectable()
export class HammerGesturesPlugin extends EventManagerPlugin {
constructor(@Inject(HAMMER_GESTURE_CONFIG) private _config: HammerGestureConfig) { super(); }
supports(eventName: string): boolean {
if (!EVENT_NAMES.hasOwnProperty(eventName.toLowerCase()) && !this.isCustomEvent(eventName)) {
return false;
}
if (!(window as any).Hammer) {
throw new Error(`Hammer.js is not loaded, can not bind ${eventName} event`);
}
return true;
}
addEventListener(element: HTMLElement, eventName: string, handler: Function): Function {
const zone = this.manager.getZone();
eventName = eventName.toLowerCase();
return zone.runOutsideAngular(() => {
// Creating the manager bind events, must be done outside of angular
const mc = this._config.buildHammer(element);
const callback = function(eventObj: HammerInput) {
zone.runGuarded(function() { handler(eventObj); });
};
mc.on(eventName, callback);
return () => mc.off(eventName, callback);
});
}
| (eventName: string): boolean { return this._config.events.indexOf(eventName) > -1; }
}
| isCustomEvent | identifier_name |
hammer_gestures.ts | /**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {Inject, Injectable, OpaqueToken} from '@angular/core';
import {EventManagerPlugin} from './event_manager';
const EVENT_NAMES = {
// pan
'pan': true,
'panstart': true,
'panmove': true,
'panend': true,
'pancancel': true,
'panleft': true,
'panright': true,
'panup': true,
'pandown': true,
// pinch
'pinch': true,
'pinchstart': true,
'pinchmove': true,
'pinchend': true,
'pinchcancel': true,
'pinchin': true,
'pinchout': true,
// press
'press': true,
'pressup': true,
// rotate
'rotate': true,
'rotatestart': true,
'rotatemove': true,
'rotateend': true,
'rotatecancel': true,
// swipe
'swipe': true,
'swipeleft': true,
'swiperight': true,
'swipeup': true,
'swipedown': true,
// tap
'tap': true,
};
/**
* A DI token that you can use to provide{@link HammerGestureConfig} to Angular. Use it to configure
* Hammer gestures.
*
* @experimental
*/
export const HAMMER_GESTURE_CONFIG: OpaqueToken = new OpaqueToken('HammerGestureConfig');
export interface HammerInstance {
on(eventName: string, callback: Function): void;
off(eventName: string, callback: Function): void;
}
/**
* @experimental
*/
@Injectable()
export class HammerGestureConfig {
events: string[] = [];
overrides: {[key: string]: Object} = {};
buildHammer(element: HTMLElement): HammerInstance {
const mc = new Hammer(element);
mc.get('pinch').set({enable: true});
mc.get('rotate').set({enable: true});
for (const eventName in this.overrides) {
mc.get(eventName).set(this.overrides[eventName]);
}
return mc;
}
}
@Injectable()
export class HammerGesturesPlugin extends EventManagerPlugin {
constructor(@Inject(HAMMER_GESTURE_CONFIG) private _config: HammerGestureConfig) { super(); }
supports(eventName: string): boolean {
if (!EVENT_NAMES.hasOwnProperty(eventName.toLowerCase()) && !this.isCustomEvent(eventName)) {
return false;
}
if (!(window as any).Hammer) |
return true;
}
addEventListener(element: HTMLElement, eventName: string, handler: Function): Function {
const zone = this.manager.getZone();
eventName = eventName.toLowerCase();
return zone.runOutsideAngular(() => {
// Creating the manager bind events, must be done outside of angular
const mc = this._config.buildHammer(element);
const callback = function(eventObj: HammerInput) {
zone.runGuarded(function() { handler(eventObj); });
};
mc.on(eventName, callback);
return () => mc.off(eventName, callback);
});
}
isCustomEvent(eventName: string): boolean { return this._config.events.indexOf(eventName) > -1; }
}
| {
throw new Error(`Hammer.js is not loaded, can not bind ${eventName} event`);
} | conditional_block |
parse_args.py | = logging.getLogger(__name__)
class HelpFormatter(argparse.HelpFormatter):
def _format_usage(self, usage, actions, groups, prefix):
'''Special handling for usage lists
If usage is a list object, its elements will be printed on
separate lines. DEFAULT_USAGE will be replaced by the
default usage string of the parser (but, if `usage`` is a list,
excluding any --help arguments)).
'''
if isinstance(usage, list):
# Omit help argument
actions = [ x for x in actions if not isinstance(x, argparse._HelpAction) ]
res = []
for s in usage:
if not res:
res.append('usage: ')
else:
res.append(' or: ')
if s is DEFAULT_USAGE:
res.append(super()._format_usage(None, actions, groups, '')[:-1])
else:
res.append(s % dict(prog=self._prog))
res.append('\n')
return '%s\n\n' % ''.join(res)
elif usage is DEFAULT_USAGE:
return super()._format_usage(None, actions, groups, prefix)
else:
return super()._format_usage(usage, actions, groups, prefix)
def format_help(self):
help_ = super().format_help()
if help_.count('\n') > 2:
return help_ + '\n'
else:
return help_
class SubParsersAction(argparse._SubParsersAction):
'''A replacement for _SubParsersAction that keeps
track of the parent parser'''
def __init__(self, **kw):
self.parent = kw.pop('parent')
super().__init__(**kw)
def add_parser(self, *a, **kwargs):
'''Pass parent usage and add_help attributes to new parser'''
if 'usage' not in kwargs:
# Inherit, but preserve old progs attribute
usage = self.parent.usage
repl = dict(prog=self.parent.prog)
if isinstance(usage, list):
usage = [ (x % repl if isinstance(x, str) else x)
for x in usage ]
elif usage:
usage = usage % repl
kwargs['usage'] = usage
if 'help' in kwargs:
kwargs.setdefault('description', kwargs['help'].capitalize() + '.')
kwargs.setdefault('add_help', self.parent.add_help)
kwargs.setdefault('formatter_class', self.parent.formatter_class)
if 'parents' in kwargs:
for p in kwargs['parents']:
if p.epilog:
kwargs.setdefault('epilog', p.epilog % dict(prog=self.parent.prog))
return super().add_parser(*a, **kwargs)
class ArgumentParser(argparse.ArgumentParser):
def __init__(self, *a, **kw):
if 'formatter_class' not in kw:
kw['formatter_class'] = HelpFormatter
super().__init__(*a, **kw)
self.register('action', 'parsers', SubParsersAction)
def add_version(self):
self.add_argument('--version', action='version',
help="just print program version and exit",
version='S3QL %s' % RELEASE)
def add_quiet(self):
self.add_argument("--quiet", action="store_true", default=False,
help="be really quiet")
def add_backend_options(self):
self.add_argument("--backend-options", default={}, type=suboptions_type,
metavar='<options>',
help="Backend specific options (separate by commas). See "
"backend documentation for available options.")
def add_debug(self):
destnote = ('Debug messages will be written to the target '
'specified by the ``--log`` option.')
self.add_argument("--debug-modules", metavar='<modules>',
type=lambda s: s.split(','), dest='debug',
help="Activate debugging output from specified modules "
"(use commas to separate multiple modules). "
+ destnote)
self.add_argument("--debug", action='append_const', const='s3ql',
help="Activate debugging output from all S3QL modules. "
+ destnote)
def add_cachedir(self):
self.add_argument("--cachedir", type=str, metavar='<path>',
default=os.path.expanduser("~/.s3ql"),
help='Store cached data in this directory '
'(default: `~/.s3ql)`')
def add_log(self, default=None):
self.add_argument("--log", type=str_or_None_type, metavar='<target>', default=default,
help='Destination for log messages. Specify ``none`` for standard '
'output or ``syslog`` for the system logging daemon. '
'Anything else will be interpreted as a file name. Log files '
'will be rotated when they reach 1 MiB, and at most 5 old log '
'files will be kept. Default: ``%(default)s``')
def add_storage_url(self):
self.add_argument("storage_url", metavar='<storage-url>',
type=storage_url_type,
help='Storage URL of the backend that contains the file system')
self.add_argument("--authfile", type=str, metavar='<path>',
default=os.path.expanduser("~/.s3ql/authinfo2"),
help='Read authentication credentials from this file '
'(default: `~/.s3ql/authinfo2)`')
def add_compress(self):
def compression_type(s):
hit = re.match(r'^([a-z0-9]+)(?:-([0-9]))?$', s)
if not hit:
raise argparse.ArgumentTypeError('%s is not a valid --compress value' % s)
alg = hit.group(1)
lvl = hit.group(2)
if alg not in ('none', 'zlib', 'bzip2', 'lzma'):
raise argparse.ArgumentTypeError('Invalid compression algorithm: %s' % alg)
if lvl is None:
lvl = 6
else:
lvl = int(lvl)
if alg == 'none':
alg = None
return (alg, lvl)
self.add_argument("--compress", action="store", default='lzma-6',
metavar='<algorithm-lvl>', type=compression_type,
help="Compression algorithm and compression level to use when "
"storing new data. *algorithm* may be any of `lzma`, `bzip2`, "
"`zlib`, or none. *lvl* may be any integer from 0 (fastest) "
"to 9 (slowest). Default: `%(default)s`")
def add_subparsers(self, **kw):
'''Pass parent and set prog to default usage message'''
kw.setdefault('parser_class', argparse.ArgumentParser)
kw['parent'] = self
# prog defaults to the usage message of this parser, skipping
# optional arguments and with no "usage:" prefix
if kw.get('prog') is None:
formatter = self._get_formatter()
positionals = self._get_positional_actions()
groups = self._mutually_exclusive_groups
formatter.add_usage(None, positionals, groups, '')
kw['prog'] = formatter.format_help().strip()
return super().add_subparsers(**kw)
def _read_authinfo(self, path, storage_url):
ini_config = configparser.ConfigParser()
if os.path.isfile(path):
m |
merged = dict()
for section in ini_config.sections():
pattern = ini_config[section].get('storage-url', None)
if not pattern or not storage_url.startswith(pattern):
continue
for (key, val) in ini_config[section].items():
if key != 'storage-url':
merged[key] = val
return merged
def parse_args(self, *args, **kwargs):
try:
options = super().parse_args(*args, **kwargs)
except ArgumentError as exc:
self.error(str(exc))
if hasattr(options, 'authfile'):
storage_url = getattr(options, 'storage_url', '')
ini_config = self._read_authinfo(options.authfile, storage_url)
# Validate configuration file
fixed_keys = { 'backend-login', 'backend-password', 'fs-passphrase',
'storage-url' }
unknown_keys = (set(ini_config.keys())
- { x.replace('_', '-') for x in options.__dict__.keys() }
- fixed_keys)
if unknown_keys:
self.exit(2, 'Unknown keys(s) in configuration file: ' +
', '.join(unknown_keys))
# Update defaults and re-parse arguments
defaults = { k.replace('-', '_'): v
for (k,v) in ini_config.items()
if k != 'storage_url' }
self.set_defaults(**defaults)
options = super().parse_args(*args, **kwargs)
if hasattr(options, 'storage_url'):
self._init_backend_factory(options)
if hasattr(options, 'cachedir'):
assert options.storage_url
if not os.path.exists(options.cachedir):
try:
os.mkdir(options.cachedir, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
except PermissionError:
self.exit(45, 'No permission to create cache directory ' + options.cachedir)
if not os.access(options.cachedir, os.R_OK | os.W_OK | os.X_OK):
self.exit(45, 'No permission to access | ode = os.stat(path).st_mode
if mode & (stat.S_IRGRP | stat.S_IROTH):
self.exit(12, "%s has insecure permissions, aborting." % path)
ini_config.read(path)
| conditional_block |
parse_args.py | = logging.getLogger(__name__)
class HelpFormatter(argparse.HelpFormatter):
def _format_usage(self, usage, actions, groups, prefix):
'''Special handling for usage lists
If usage is a list object, its elements will be printed on
separate lines. DEFAULT_USAGE will be replaced by the
default usage string of the parser (but, if `usage`` is a list,
excluding any --help arguments)).
'''
if isinstance(usage, list):
# Omit help argument
actions = [ x for x in actions if not isinstance(x, argparse._HelpAction) ]
res = []
for s in usage:
if not res:
res.append('usage: ')
else:
res.append(' or: ')
if s is DEFAULT_USAGE:
res.append(super()._format_usage(None, actions, groups, '')[:-1])
else:
res.append(s % dict(prog=self._prog))
res.append('\n')
return '%s\n\n' % ''.join(res)
elif usage is DEFAULT_USAGE:
return super()._format_usage(None, actions, groups, prefix)
else:
return super()._format_usage(usage, actions, groups, prefix)
def format_help(self):
help_ = super().format_help()
if help_.count('\n') > 2:
return help_ + '\n'
else:
return help_
class SubParsersAction(argparse._SubParsersAction):
'''A replacement for _SubParsersAction that keeps
track of the parent parser'''
def __init__(self, **kw):
self.parent = kw.pop('parent')
super().__init__(**kw)
def add_parser(self, *a, **kwargs):
'''Pass parent usage and add_help attributes to new parser'''
if 'usage' not in kwargs:
# Inherit, but preserve old progs attribute
usage = self.parent.usage
repl = dict(prog=self.parent.prog)
if isinstance(usage, list):
usage = [ (x % repl if isinstance(x, str) else x)
for x in usage ]
elif usage:
usage = usage % repl
kwargs['usage'] = usage
if 'help' in kwargs:
kwargs.setdefault('description', kwargs['help'].capitalize() + '.')
kwargs.setdefault('add_help', self.parent.add_help)
kwargs.setdefault('formatter_class', self.parent.formatter_class)
if 'parents' in kwargs:
for p in kwargs['parents']:
if p.epilog:
kwargs.setdefault('epilog', p.epilog % dict(prog=self.parent.prog))
return super().add_parser(*a, **kwargs)
class ArgumentParser(argparse.ArgumentParser):
def __init__(self, *a, **kw):
if 'formatter_class' not in kw:
kw['formatter_class'] = HelpFormatter
super().__init__(*a, **kw)
self.register('action', 'parsers', SubParsersAction)
def add_version(self):
self.add_argument('--version', action='version',
help="just print program version and exit",
version='S3QL %s' % RELEASE)
def add_quiet(self):
self.add_argument("--quiet", action="store_true", default=False,
help="be really quiet")
def add_backend_options(self):
self.add_argument("--backend-options", default={}, type=suboptions_type,
metavar='<options>',
help="Backend specific options (separate by commas). See "
"backend documentation for available options.")
def add_debug(self):
destnote = ('Debug messages will be written to the target '
'specified by the ``--log`` option.')
self.add_argument("--debug-modules", metavar='<modules>',
type=lambda s: s.split(','), dest='debug',
help="Activate debugging output from specified modules "
"(use commas to separate multiple modules). "
+ destnote)
self.add_argument("--debug", action='append_const', const='s3ql',
help="Activate debugging output from all S3QL modules. "
+ destnote)
def add_cachedir(self):
self.add_argument("--cachedir", type=str, metavar='<path>',
default=os.path.expanduser("~/.s3ql"),
help='Store cached data in this directory '
'(default: `~/.s3ql)`')
def add_log(self, default=None):
self.add_argument("--log", type=str_or_None_type, metavar='<target>', default=default,
help='Destination for log messages. Specify ``none`` for standard '
'output or ``syslog`` for the system logging daemon. '
'Anything else will be interpreted as a file name. Log files '
'will be rotated when they reach 1 MiB, and at most 5 old log '
'files will be kept. Default: ``%(default)s``')
def a | self):
self.add_argument("storage_url", metavar='<storage-url>',
type=storage_url_type,
help='Storage URL of the backend that contains the file system')
self.add_argument("--authfile", type=str, metavar='<path>',
default=os.path.expanduser("~/.s3ql/authinfo2"),
help='Read authentication credentials from this file '
'(default: `~/.s3ql/authinfo2)`')
def add_compress(self):
def compression_type(s):
hit = re.match(r'^([a-z0-9]+)(?:-([0-9]))?$', s)
if not hit:
raise argparse.ArgumentTypeError('%s is not a valid --compress value' % s)
alg = hit.group(1)
lvl = hit.group(2)
if alg not in ('none', 'zlib', 'bzip2', 'lzma'):
raise argparse.ArgumentTypeError('Invalid compression algorithm: %s' % alg)
if lvl is None:
lvl = 6
else:
lvl = int(lvl)
if alg == 'none':
alg = None
return (alg, lvl)
self.add_argument("--compress", action="store", default='lzma-6',
metavar='<algorithm-lvl>', type=compression_type,
help="Compression algorithm and compression level to use when "
"storing new data. *algorithm* may be any of `lzma`, `bzip2`, "
"`zlib`, or none. *lvl* may be any integer from 0 (fastest) "
"to 9 (slowest). Default: `%(default)s`")
def add_subparsers(self, **kw):
'''Pass parent and set prog to default usage message'''
kw.setdefault('parser_class', argparse.ArgumentParser)
kw['parent'] = self
# prog defaults to the usage message of this parser, skipping
# optional arguments and with no "usage:" prefix
if kw.get('prog') is None:
formatter = self._get_formatter()
positionals = self._get_positional_actions()
groups = self._mutually_exclusive_groups
formatter.add_usage(None, positionals, groups, '')
kw['prog'] = formatter.format_help().strip()
return super().add_subparsers(**kw)
def _read_authinfo(self, path, storage_url):
ini_config = configparser.ConfigParser()
if os.path.isfile(path):
mode = os.stat(path).st_mode
if mode & (stat.S_IRGRP | stat.S_IROTH):
self.exit(12, "%s has insecure permissions, aborting." % path)
ini_config.read(path)
merged = dict()
for section in ini_config.sections():
pattern = ini_config[section].get('storage-url', None)
if not pattern or not storage_url.startswith(pattern):
continue
for (key, val) in ini_config[section].items():
if key != 'storage-url':
merged[key] = val
return merged
def parse_args(self, *args, **kwargs):
try:
options = super().parse_args(*args, **kwargs)
except ArgumentError as exc:
self.error(str(exc))
if hasattr(options, 'authfile'):
storage_url = getattr(options, 'storage_url', '')
ini_config = self._read_authinfo(options.authfile, storage_url)
# Validate configuration file
fixed_keys = { 'backend-login', 'backend-password', 'fs-passphrase',
'storage-url' }
unknown_keys = (set(ini_config.keys())
- { x.replace('_', '-') for x in options.__dict__.keys() }
- fixed_keys)
if unknown_keys:
self.exit(2, 'Unknown keys(s) in configuration file: ' +
', '.join(unknown_keys))
# Update defaults and re-parse arguments
defaults = { k.replace('-', '_'): v
for (k,v) in ini_config.items()
if k != 'storage_url' }
self.set_defaults(**defaults)
options = super().parse_args(*args, **kwargs)
if hasattr(options, 'storage_url'):
self._init_backend_factory(options)
if hasattr(options, 'cachedir'):
assert options.storage_url
if not os.path.exists(options.cachedir):
try:
os.mkdir(options.cachedir, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
except PermissionError:
self.exit(45, 'No permission to create cache directory ' + options.cachedir)
if not os.access(options.cachedir, os.R_OK | os.W_OK | os.X_OK):
self.exit(45, 'No permission to access | dd_storage_url( | identifier_name |
parse_args.py | log = logging.getLogger(__name__)
class HelpFormatter(argparse.HelpFormatter):
def _format_usage(self, usage, actions, groups, prefix):
'''Special handling for usage lists
If usage is a list object, its elements will be printed on
separate lines. DEFAULT_USAGE will be replaced by the
default usage string of the parser (but, if `usage`` is a list,
excluding any --help arguments)).
'''
if isinstance(usage, list):
# Omit help argument
actions = [ x for x in actions if not isinstance(x, argparse._HelpAction) ]
res = []
for s in usage:
if not res:
res.append('usage: ')
else:
res.append(' or: ')
if s is DEFAULT_USAGE:
res.append(super()._format_usage(None, actions, groups, '')[:-1])
else:
res.append(s % dict(prog=self._prog))
res.append('\n')
return '%s\n\n' % ''.join(res)
elif usage is DEFAULT_USAGE:
return super()._format_usage(None, actions, groups, prefix)
else:
return super()._format_usage(usage, actions, groups, prefix)
def format_help(self):
help_ = super().format_help()
if help_.count('\n') > 2:
return help_ + '\n'
else:
return help_
class SubParsersAction(argparse._SubParsersAction):
'''A replacement for _SubParsersAction that keeps
track of the parent parser'''
def __init__(self, **kw):
self.parent = kw.pop('parent')
super().__init__(**kw)
def add_parser(self, *a, **kwargs):
'''Pass parent usage and add_help attributes to new parser'''
if 'usage' not in kwargs:
# Inherit, but preserve old progs attribute
usage = self.parent.usage
repl = dict(prog=self.parent.prog)
if isinstance(usage, list):
usage = [ (x % repl if isinstance(x, str) else x)
for x in usage ]
elif usage:
usage = usage % repl
kwargs['usage'] = usage
if 'help' in kwargs:
kwargs.setdefault('description', kwargs['help'].capitalize() + '.')
kwargs.setdefault('add_help', self.parent.add_help)
kwargs.setdefault('formatter_class', self.parent.formatter_class)
if 'parents' in kwargs:
for p in kwargs['parents']:
if p.epilog:
kwargs.setdefault('epilog', p.epilog % dict(prog=self.parent.prog))
return super().add_parser(*a, **kwargs)
class ArgumentParser(argparse.ArgumentParser):
def __init__(self, *a, **kw):
if 'formatter_class' not in kw:
kw['formatter_class'] = HelpFormatter |
super().__init__(*a, **kw)
self.register('action', 'parsers', SubParsersAction)
def add_version(self):
self.add_argument('--version', action='version',
help="just print program version and exit",
version='S3QL %s' % RELEASE)
def add_quiet(self):
self.add_argument("--quiet", action="store_true", default=False,
help="be really quiet")
def add_backend_options(self):
self.add_argument("--backend-options", default={}, type=suboptions_type,
metavar='<options>',
help="Backend specific options (separate by commas). See "
"backend documentation for available options.")
def add_debug(self):
destnote = ('Debug messages will be written to the target '
'specified by the ``--log`` option.')
self.add_argument("--debug-modules", metavar='<modules>',
type=lambda s: s.split(','), dest='debug',
help="Activate debugging output from specified modules "
"(use commas to separate multiple modules). "
+ destnote)
self.add_argument("--debug", action='append_const', const='s3ql',
help="Activate debugging output from all S3QL modules. "
+ destnote)
def add_cachedir(self):
self.add_argument("--cachedir", type=str, metavar='<path>',
default=os.path.expanduser("~/.s3ql"),
help='Store cached data in this directory '
'(default: `~/.s3ql)`')
def add_log(self, default=None):
self.add_argument("--log", type=str_or_None_type, metavar='<target>', default=default,
help='Destination for log messages. Specify ``none`` for standard '
'output or ``syslog`` for the system logging daemon. '
'Anything else will be interpreted as a file name. Log files '
'will be rotated when they reach 1 MiB, and at most 5 old log '
'files will be kept. Default: ``%(default)s``')
def add_storage_url(self):
self.add_argument("storage_url", metavar='<storage-url>',
type=storage_url_type,
help='Storage URL of the backend that contains the file system')
self.add_argument("--authfile", type=str, metavar='<path>',
default=os.path.expanduser("~/.s3ql/authinfo2"),
help='Read authentication credentials from this file '
'(default: `~/.s3ql/authinfo2)`')
def add_compress(self):
def compression_type(s):
hit = re.match(r'^([a-z0-9]+)(?:-([0-9]))?$', s)
if not hit:
raise argparse.ArgumentTypeError('%s is not a valid --compress value' % s)
alg = hit.group(1)
lvl = hit.group(2)
if alg not in ('none', 'zlib', 'bzip2', 'lzma'):
raise argparse.ArgumentTypeError('Invalid compression algorithm: %s' % alg)
if lvl is None:
lvl = 6
else:
lvl = int(lvl)
if alg == 'none':
alg = None
return (alg, lvl)
self.add_argument("--compress", action="store", default='lzma-6',
metavar='<algorithm-lvl>', type=compression_type,
help="Compression algorithm and compression level to use when "
"storing new data. *algorithm* may be any of `lzma`, `bzip2`, "
"`zlib`, or none. *lvl* may be any integer from 0 (fastest) "
"to 9 (slowest). Default: `%(default)s`")
def add_subparsers(self, **kw):
'''Pass parent and set prog to default usage message'''
kw.setdefault('parser_class', argparse.ArgumentParser)
kw['parent'] = self
# prog defaults to the usage message of this parser, skipping
# optional arguments and with no "usage:" prefix
if kw.get('prog') is None:
formatter = self._get_formatter()
positionals = self._get_positional_actions()
groups = self._mutually_exclusive_groups
formatter.add_usage(None, positionals, groups, '')
kw['prog'] = formatter.format_help().strip()
return super().add_subparsers(**kw)
def _read_authinfo(self, path, storage_url):
ini_config = configparser.ConfigParser()
if os.path.isfile(path):
mode = os.stat(path).st_mode
if mode & (stat.S_IRGRP | stat.S_IROTH):
self.exit(12, "%s has insecure permissions, aborting." % path)
ini_config.read(path)
merged = dict()
for section in ini_config.sections():
pattern = ini_config[section].get('storage-url', None)
if not pattern or not storage_url.startswith(pattern):
continue
for (key, val) in ini_config[section].items():
if key != 'storage-url':
merged[key] = val
return merged
def parse_args(self, *args, **kwargs):
try:
options = super().parse_args(*args, **kwargs)
except ArgumentError as exc:
self.error(str(exc))
if hasattr(options, 'authfile'):
storage_url = getattr(options, 'storage_url', '')
ini_config = self._read_authinfo(options.authfile, storage_url)
# Validate configuration file
fixed_keys = { 'backend-login', 'backend-password', 'fs-passphrase',
'storage-url' }
unknown_keys = (set(ini_config.keys())
- { x.replace('_', '-') for x in options.__dict__.keys() }
- fixed_keys)
if unknown_keys:
self.exit(2, 'Unknown keys(s) in configuration file: ' +
', '.join(unknown_keys))
# Update defaults and re-parse arguments
defaults = { k.replace('-', '_'): v
for (k,v) in ini_config.items()
if k != 'storage_url' }
self.set_defaults(**defaults)
options = super().parse_args(*args, **kwargs)
if hasattr(options, 'storage_url'):
self._init_backend_factory(options)
if hasattr(options, 'cachedir'):
assert options.storage_url
if not os.path.exists(options.cachedir):
try:
os.mkdir(options.cachedir, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
except PermissionError:
self.exit(45, 'No permission to create cache directory ' + options.cachedir)
if not os.access(options.cachedir, os.R_OK | os.W_OK | os.X_OK):
self.exit(45, 'No permission to access cache | random_line_split |
|
parse_args.py | = logging.getLogger(__name__)
class HelpFormatter(argparse.HelpFormatter):
def _format_usage(self, usage, actions, groups, prefix):
'''Special handling for usage lists
If usage is a list object, its elements will be printed on
separate lines. DEFAULT_USAGE will be replaced by the
default usage string of the parser (but, if `usage`` is a list,
excluding any --help arguments)).
'''
if isinstance(usage, list):
# Omit help argument
actions = [ x for x in actions if not isinstance(x, argparse._HelpAction) ]
res = []
for s in usage:
if not res:
res.append('usage: ')
else:
res.append(' or: ')
if s is DEFAULT_USAGE:
res.append(super()._format_usage(None, actions, groups, '')[:-1])
else:
res.append(s % dict(prog=self._prog))
res.append('\n')
return '%s\n\n' % ''.join(res)
elif usage is DEFAULT_USAGE:
return super()._format_usage(None, actions, groups, prefix)
else:
return super()._format_usage(usage, actions, groups, prefix)
def format_help(self):
help_ = super().format_help()
if help_.count('\n') > 2:
return help_ + '\n'
else:
return help_
class SubParsersAction(argparse._SubParsersAction):
'''A replacement for _SubParsersAction that keeps
track of the parent parser'''
def __init__(self, **kw):
self.parent = kw.pop('parent')
super().__init__(**kw)
def add_parser(self, *a, **kwargs):
'''Pass parent usage and add_help attributes to new parser'''
if 'usage' not in kwargs:
# Inherit, but preserve old progs attribute
usage = self.parent.usage
repl = dict(prog=self.parent.prog)
if isinstance(usage, list):
usage = [ (x % repl if isinstance(x, str) else x)
for x in usage ]
elif usage:
usage = usage % repl
kwargs['usage'] = usage
if 'help' in kwargs:
kwargs.setdefault('description', kwargs['help'].capitalize() + '.')
kwargs.setdefault('add_help', self.parent.add_help)
kwargs.setdefault('formatter_class', self.parent.formatter_class)
if 'parents' in kwargs:
for p in kwargs['parents']:
if p.epilog:
kwargs.setdefault('epilog', p.epilog % dict(prog=self.parent.prog))
return super().add_parser(*a, **kwargs)
class ArgumentParser(argparse.ArgumentParser):
def __init__(self, *a, **kw):
if 'formatter_class' not in kw:
kw['formatter_class'] = HelpFormatter
super().__init__(*a, **kw)
self.register('action', 'parsers', SubParsersAction)
def add_version(self):
self.add_argument('--version', action='version',
help="just print program version and exit",
version='S3QL %s' % RELEASE)
def add_quiet(self):
self.add_argument("--quiet", action="store_true", default=False,
help="be really quiet")
def add_backend_options(self):
self.add_argument("--backend-options", default={}, type=suboptions_type,
metavar='<options>',
help="Backend specific options (separate by commas). See "
"backend documentation for available options.")
def add_debug(self):
destnote = ('Debug messages will be written to the target '
'specified by the ``--log`` option.')
self.add_argument("--debug-modules", metavar='<modules>',
type=lambda s: s.split(','), dest='debug',
help="Activate debugging output from specified modules "
"(use commas to separate multiple modules). "
+ destnote)
self.add_argument("--debug", action='append_const', const='s3ql',
help="Activate debugging output from all S3QL modules. "
+ destnote)
def add_cachedir(self):
self.add_argument("--cachedir", type=str, metavar='<path>',
default=os.path.expanduser("~/.s3ql"),
help='Store cached data in this directory '
'(default: `~/.s3ql)`')
def add_log(self, default=None):
s |
def add_storage_url(self):
self.add_argument("storage_url", metavar='<storage-url>',
type=storage_url_type,
help='Storage URL of the backend that contains the file system')
self.add_argument("--authfile", type=str, metavar='<path>',
default=os.path.expanduser("~/.s3ql/authinfo2"),
help='Read authentication credentials from this file '
'(default: `~/.s3ql/authinfo2)`')
def add_compress(self):
def compression_type(s):
hit = re.match(r'^([a-z0-9]+)(?:-([0-9]))?$', s)
if not hit:
raise argparse.ArgumentTypeError('%s is not a valid --compress value' % s)
alg = hit.group(1)
lvl = hit.group(2)
if alg not in ('none', 'zlib', 'bzip2', 'lzma'):
raise argparse.ArgumentTypeError('Invalid compression algorithm: %s' % alg)
if lvl is None:
lvl = 6
else:
lvl = int(lvl)
if alg == 'none':
alg = None
return (alg, lvl)
self.add_argument("--compress", action="store", default='lzma-6',
metavar='<algorithm-lvl>', type=compression_type,
help="Compression algorithm and compression level to use when "
"storing new data. *algorithm* may be any of `lzma`, `bzip2`, "
"`zlib`, or none. *lvl* may be any integer from 0 (fastest) "
"to 9 (slowest). Default: `%(default)s`")
def add_subparsers(self, **kw):
'''Pass parent and set prog to default usage message'''
kw.setdefault('parser_class', argparse.ArgumentParser)
kw['parent'] = self
# prog defaults to the usage message of this parser, skipping
# optional arguments and with no "usage:" prefix
if kw.get('prog') is None:
formatter = self._get_formatter()
positionals = self._get_positional_actions()
groups = self._mutually_exclusive_groups
formatter.add_usage(None, positionals, groups, '')
kw['prog'] = formatter.format_help().strip()
return super().add_subparsers(**kw)
def _read_authinfo(self, path, storage_url):
ini_config = configparser.ConfigParser()
if os.path.isfile(path):
mode = os.stat(path).st_mode
if mode & (stat.S_IRGRP | stat.S_IROTH):
self.exit(12, "%s has insecure permissions, aborting." % path)
ini_config.read(path)
merged = dict()
for section in ini_config.sections():
pattern = ini_config[section].get('storage-url', None)
if not pattern or not storage_url.startswith(pattern):
continue
for (key, val) in ini_config[section].items():
if key != 'storage-url':
merged[key] = val
return merged
def parse_args(self, *args, **kwargs):
try:
options = super().parse_args(*args, **kwargs)
except ArgumentError as exc:
self.error(str(exc))
if hasattr(options, 'authfile'):
storage_url = getattr(options, 'storage_url', '')
ini_config = self._read_authinfo(options.authfile, storage_url)
# Validate configuration file
fixed_keys = { 'backend-login', 'backend-password', 'fs-passphrase',
'storage-url' }
unknown_keys = (set(ini_config.keys())
- { x.replace('_', '-') for x in options.__dict__.keys() }
- fixed_keys)
if unknown_keys:
self.exit(2, 'Unknown keys(s) in configuration file: ' +
', '.join(unknown_keys))
# Update defaults and re-parse arguments
defaults = { k.replace('-', '_'): v
for (k,v) in ini_config.items()
if k != 'storage_url' }
self.set_defaults(**defaults)
options = super().parse_args(*args, **kwargs)
if hasattr(options, 'storage_url'):
self._init_backend_factory(options)
if hasattr(options, 'cachedir'):
assert options.storage_url
if not os.path.exists(options.cachedir):
try:
os.mkdir(options.cachedir, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
except PermissionError:
self.exit(45, 'No permission to create cache directory ' + options.cachedir)
if not os.access(options.cachedir, os.R_OK | os.W_OK | os.X_OK):
self.exit(45, 'No permission to access | elf.add_argument("--log", type=str_or_None_type, metavar='<target>', default=default,
help='Destination for log messages. Specify ``none`` for standard '
'output or ``syslog`` for the system logging daemon. '
'Anything else will be interpreted as a file name. Log files '
'will be rotated when they reach 1 MiB, and at most 5 old log '
'files will be kept. Default: ``%(default)s``')
| identifier_body |
test_iam2_project_vpc_cascade_delete.py | _uuid = None
vni_range_uuid = None
vxlan_pool_uuid = None
l3_vpc_network_uuid = None
dns_text = '223.5.5.5'
allservices = ["VRouterRoute","DHCP","IPsec","LoadBalancer","CentralizedDNS","Eip","DNS","SNAT","VipQos","PortForwarding"]
cond = res_ops.gen_query_conditions("type","=","vrouter")
network_service_provider_uuid = res_ops.query_resource(res_ops.NETWORK_SERVICE_PROVIDER,cond)[0].uuid
def create_l3_vpc(name,l2_uuid,session_uuid = None):
action = api_actions.CreateL3NetworkAction()
action.name = name
action.l2NetworkUuid = l2_uuid
action.timeout = 300000
action.type = inventory.VPC_L3_NETWORK_TYPE
action.sessionUuid = session_uuid
evt = acc_ops.execute_action_with_session(action,session_uuid)
test_util.action_logger('[l3:] %s is created' %name)
return evt.inventory
def AddDnsToL3Network(l3_network_uuid,dns_text,session_uuid = None):
action = api_actions.AddDnsToL3NetworkAction()
action.sessionUuid = session_uuid
action.dns = dns_text
action.l3NetworkUuid = l3_network_uuid
evt = acc_ops.execute_action_with_session(action,session_uuid)
test_util.action_logger('add dns to l3 network: %s' % l3_network_uuid)
return evt
def AttachNetworkServiceToL3Network(l3_network_uuid,allservices,session_uuid = None):
action = api_actions.AttachNetworkServiceToL3NetworkAction()
action.sessionUuid = session_uuid
action.l3NetworkUuid = l3_network_uuid
action.networkServices = {network_service_provider_uuid:allservices}
evt = acc_ops.execute_action_with_session(action,session_uuid)
test_util.action_logger('add network services to l3 network: %s' % l3_network_uuid)
return evt
def test():
global l2_vxlan_network_uuid,project_uuid,project_operator_uuid,vni_range_uuid,vxlan_pool_uuid,l3_vpc_network_uuid
# create vxlan pool and vni range
zone_uuid = res_ops.get_resource(res_ops.ZONE)[0].uuid
cluster_uuid = res_ops.get_resource(res_ops.CLUSTER)[0].uuid
vxlan_pool_name = 'vxlan_pool_name'
vxlan_pool_uuid = vxlan_ops.create_l2_vxlan_network_pool(vxlan_pool_name,zone_uuid).uuid
vxlan_ops.create_vni_range('vni_range',20,40,vxlan_pool_uuid)
systemTags = ["l2NetworkUuid::%s::clusterUuid::%s::cidr::{172.20.0.1/16}"%(vxlan_pool_uuid,cluster_uuid)]
net_ops.attach_l2_vxlan_pool(vxlan_pool_uuid,cluster_uuid,systemTags)
# 1 create project
project_name = 'test_project7'
project = iam2_ops.create_iam2_project(project_name)
project_uuid = project.uuid
#cond = res_ops.gen_query_conditions("name",'=',"test_project7")
#linked_account_uuid = res_ops.query_resource(res_ops.ACCOUNT,cond)[0].uuid
linked_account_uuid = project.linkedAccountUuid
# 2 create project operator
project_operator_name = 'username7'
project_operator_password = 'b109f3bbbc244eb82441917ed06d618b9008dd09b3befd1b5e07394c706a8bb980b1d7785e5976ec049b46df5f1326af5a2ea6d103fd07c95385ffab0cacbc86'
attributes = [{"name": "__ProjectOperator__", "value": project_uuid}]
project_operator_uuid = iam2_ops.create_iam2_virtual_id(project_operator_name,project_operator_password,attributes=attributes).uuid
zone_uuid = res_ops.get_resource(res_ops.ZONE)[0].uuid
attributes = [{"name": "__ProjectRelatedZone__", "value": zone_uuid}]
iam2_ops.add_attributes_to_iam2_project(project_uuid, attributes)
# 3 login in project by project operator
iam2_ops.add_iam2_virtual_ids_to_project([project_operator_uuid],project_uuid)
project_operator_session_uuid = iam2_ops.login_iam2_virtual_id(project_operator_name,project_operator_password)
project_login_uuid = iam2_ops.login_iam2_project(project_name,session_uuid=project_operator_session_uuid).uuid
# 4 share vxlan pool to project
l2vxlan_pools = res_ops.query_resource(res_ops.L2_VXLAN_NETWORK_POOL)
for l2vxlan_pool in l2vxlan_pools:
acc_ops.share_resources([linked_account_uuid],[l2vxlan_pool.uuid])
# 5 create l2 vxlan
l2_vxlan_network_uuid = vxlan_ops.create_l2_vxlan_network('l2_vxlan',vxlan_pool_uuid,zone_uuid,session_uuid=project_login_uuid).uuid
# 6 use l2 vxlan to create l3 vpc
l3_vpc_network = create_l3_vpc('test_vpc',l2_vxlan_network_uuid,project_login_uuid)
l3_vpc_network_uuid = l3_vpc_network.uuid
# add ip range
ir_option = test_util.IpRangeOption()
ir_option.set_name('iprange2')
ir_option.set_description('iprange for vpc')
ir_option.set_netmask('255.255.255.0')
ir_option.set_gateway('192.168.23.1')
ir_option.set_l3_uuid(l3_vpc_network_uuid)
ir_option.set_startIp('192.168.23.2')
ir_option.set_endIp('192.168.23.254')
net_ops.add_ip_range(ir_option)
# add network service
AttachNetworkServiceToL3Network(l3_vpc_network_uuid,allservices,session_uuid = project_login_uuid)
# share the vr_offering to project and do create vpc router and vpc network
cond = res_ops.gen_query_conditions("name",'=',"virtual-router-vm")
vr_offering_uuid = res_ops.query_resource(res_ops.VR_OFFERING,cond)[0].uuid
acc_ops.share_resources([linked_account_uuid],[vr_offering_uuid])
vpc_ops.create_vpc_vrouter(name = 'test_vpc_vr', virtualrouter_offering_uuid = vr_offering_uuid,session_uuid = project_login_uuid)
vpc_vr = test_stub.query_vpc_vrouter('test_vpc_vr')
vpc_vr.add_nic(l3_vpc_network_uuid)
# 7 expunge the project and check the l2 vxlan
iam2_ops.delete_iam2_project(project_uuid)
iam2_ops.expunge_iam2_project(project_uuid)
try:
l2_vxlan_network_test_uuid = res_ops.query_resource(res_ops.L2_VXLAN_NETWORK)[0].uuid
except:
test_util.test_pass(
"l2 vxlan is delete after deleted the project " )
test_util.test_dsc('test l2 l2 cascade delete')
# 8 check the vpc network and vpc_vr
try:
cond = res_ops.gen_query_conditions("name",'=',"test_vpc")
l3_vpc_network_uuid = res_ops.query_resource(res_ops.L3_NETWORK,cond)[0].uuid
except:
test_util.test_pass(
"l3_vpc is delete after deleted the project")
cond = res_ops.gen_query_conditions("name",'=',"test_vpc_vr")
vpc_vr = res_ops.query_resource(res_ops.VIRTUALROUTER_VM,cond)
if not vpc_vr.inv.state is 'Paused':
test_util.test_fail(
"vpc vr [%s] is still exist after delete and expunge the project [%s]" % (vpc_vr.uuid,project_uuid))
# 9 delete
vni_range_uuid = res_ops.get_resource(res_ops.VNI_RANGE)[0].uuid
vxlan_ops.delete_vni_range(vni_range_uuid)
net_ops.delete_l2(vxlan_pool_uuid)
iam2_ops.delete_iam2_virtual_id(project_operator_uuid)
def error_cleanup():
| if project_uuid:
iam2_ops.delete_iam2_project(project_uuid)
iam2_ops.expunge_iam2_project(project_uuid)
if project_operator_uuid:
iam2_ops.delete_iam2_virtual_id(project_operator_uuid)
if l2_vxlan_network_uuid:
net_ops.delete_l2(l2_vxlan_network_uuid)
if vni_range_uuid:
vxlan_ops.delete_vni_range(vni_range_uuid)
if vxlan_pool_uuid:
net_ops.delete_l2(vxlan_pool_uuid)
if l3_vpc_network_uuid:
net_ops.delete_l3(l3_vpc_network_uuid) | identifier_body |
|
test_iam2_project_vpc_cascade_delete.py | lan_ops
import zstackwoodpecker.operations.net_operations as net_ops
import zstackwoodpecker.operations.account_operations as acc_ops
import zstackwoodpecker.operations.tag_operations as tag_ops
import zstackwoodpecker.operations.deploy_operations as dep_ops
import zstackwoodpecker.operations.vpcdns_operations as vpcdns_ops
import apibinding.inventory as inventory
import zstackwoodpecker.operations.vpc_operations as vpc_ops
import os
test_stub = test_lib.lib_get_test_stub()
test_obj_dict = test_state.TestStateDict()
l2_vxlan_network_uuid = None
project_uuid = None
project_operator_uuid = None
vni_range_uuid = None
vxlan_pool_uuid = None
l3_vpc_network_uuid = None
dns_text = '223.5.5.5'
allservices = ["VRouterRoute","DHCP","IPsec","LoadBalancer","CentralizedDNS","Eip","DNS","SNAT","VipQos","PortForwarding"]
cond = res_ops.gen_query_conditions("type","=","vrouter")
network_service_provider_uuid = res_ops.query_resource(res_ops.NETWORK_SERVICE_PROVIDER,cond)[0].uuid
def create_l3_vpc(name,l2_uuid,session_uuid = None):
action = api_actions.CreateL3NetworkAction()
action.name = name
action.l2NetworkUuid = l2_uuid
action.timeout = 300000
action.type = inventory.VPC_L3_NETWORK_TYPE
action.sessionUuid = session_uuid
evt = acc_ops.execute_action_with_session(action,session_uuid)
test_util.action_logger('[l3:] %s is created' %name)
return evt.inventory
def AddDnsToL3Network(l3_network_uuid,dns_text,session_uuid = None):
action = api_actions.AddDnsToL3NetworkAction()
action.sessionUuid = session_uuid
action.dns = dns_text
action.l3NetworkUuid = l3_network_uuid
evt = acc_ops.execute_action_with_session(action,session_uuid)
test_util.action_logger('add dns to l3 network: %s' % l3_network_uuid)
return evt
def AttachNetworkServiceToL3Network(l3_network_uuid,allservices,session_uuid = None):
action = api_actions.AttachNetworkServiceToL3NetworkAction()
action.sessionUuid = session_uuid
action.l3NetworkUuid = l3_network_uuid
action.networkServices = {network_service_provider_uuid:allservices}
evt = acc_ops.execute_action_with_session(action,session_uuid)
test_util.action_logger('add network services to l3 network: %s' % l3_network_uuid)
return evt
def test():
global l2_vxlan_network_uuid,project_uuid,project_operator_uuid,vni_range_uuid,vxlan_pool_uuid,l3_vpc_network_uuid
# create vxlan pool and vni range
zone_uuid = res_ops.get_resource(res_ops.ZONE)[0].uuid
cluster_uuid = res_ops.get_resource(res_ops.CLUSTER)[0].uuid
vxlan_pool_name = 'vxlan_pool_name'
vxlan_pool_uuid = vxlan_ops.create_l2_vxlan_network_pool(vxlan_pool_name,zone_uuid).uuid
vxlan_ops.create_vni_range('vni_range',20,40,vxlan_pool_uuid)
systemTags = ["l2NetworkUuid::%s::clusterUuid::%s::cidr::{172.20.0.1/16}"%(vxlan_pool_uuid,cluster_uuid)]
net_ops.attach_l2_vxlan_pool(vxlan_pool_uuid,cluster_uuid,systemTags)
# 1 create project
project_name = 'test_project7'
project = iam2_ops.create_iam2_project(project_name)
project_uuid = project.uuid
#cond = res_ops.gen_query_conditions("name",'=',"test_project7")
#linked_account_uuid = res_ops.query_resource(res_ops.ACCOUNT,cond)[0].uuid
linked_account_uuid = project.linkedAccountUuid
# 2 create project operator
project_operator_name = 'username7'
project_operator_password = 'b109f3bbbc244eb82441917ed06d618b9008dd09b3befd1b5e07394c706a8bb980b1d7785e5976ec049b46df5f1326af5a2ea6d103fd07c95385ffab0cacbc86'
attributes = [{"name": "__ProjectOperator__", "value": project_uuid}]
project_operator_uuid = iam2_ops.create_iam2_virtual_id(project_operator_name,project_operator_password,attributes=attributes).uuid
zone_uuid = res_ops.get_resource(res_ops.ZONE)[0].uuid
attributes = [{"name": "__ProjectRelatedZone__", "value": zone_uuid}]
iam2_ops.add_attributes_to_iam2_project(project_uuid, attributes)
# 3 login in project by project operator
iam2_ops.add_iam2_virtual_ids_to_project([project_operator_uuid],project_uuid)
project_operator_session_uuid = iam2_ops.login_iam2_virtual_id(project_operator_name,project_operator_password)
project_login_uuid = iam2_ops.login_iam2_project(project_name,session_uuid=project_operator_session_uuid).uuid
# 4 share vxlan pool to project
l2vxlan_pools = res_ops.query_resource(res_ops.L2_VXLAN_NETWORK_POOL)
for l2vxlan_pool in l2vxlan_pools:
acc_ops.share_resources([linked_account_uuid],[l2vxlan_pool.uuid])
# 5 create l2 vxlan
l2_vxlan_network_uuid = vxlan_ops.create_l2_vxlan_network('l2_vxlan',vxlan_pool_uuid,zone_uuid,session_uuid=project_login_uuid).uuid
# 6 use l2 vxlan to create l3 vpc
l3_vpc_network = create_l3_vpc('test_vpc',l2_vxlan_network_uuid,project_login_uuid)
l3_vpc_network_uuid = l3_vpc_network.uuid
# add ip range
ir_option = test_util.IpRangeOption()
ir_option.set_name('iprange2')
ir_option.set_description('iprange for vpc')
ir_option.set_netmask('255.255.255.0')
ir_option.set_gateway('192.168.23.1')
ir_option.set_l3_uuid(l3_vpc_network_uuid)
ir_option.set_startIp('192.168.23.2')
ir_option.set_endIp('192.168.23.254')
net_ops.add_ip_range(ir_option)
# add network service
AttachNetworkServiceToL3Network(l3_vpc_network_uuid,allservices,session_uuid = project_login_uuid)
# share the vr_offering to project and do create vpc router and vpc network
cond = res_ops.gen_query_conditions("name",'=',"virtual-router-vm")
vr_offering_uuid = res_ops.query_resource(res_ops.VR_OFFERING,cond)[0].uuid
acc_ops.share_resources([linked_account_uuid],[vr_offering_uuid])
vpc_ops.create_vpc_vrouter(name = 'test_vpc_vr', virtualrouter_offering_uuid = vr_offering_uuid,session_uuid = project_login_uuid)
vpc_vr = test_stub.query_vpc_vrouter('test_vpc_vr')
vpc_vr.add_nic(l3_vpc_network_uuid)
# 7 expunge the project and check the l2 vxlan
iam2_ops.delete_iam2_project(project_uuid)
iam2_ops.expunge_iam2_project(project_uuid)
try:
l2_vxlan_network_test_uuid = res_ops.query_resource(res_ops.L2_VXLAN_NETWORK)[0].uuid
except:
test_util.test_pass(
"l2 vxlan is delete after deleted the project " )
test_util.test_dsc('test l2 l2 cascade delete')
# 8 check the vpc network and vpc_vr
try:
cond = res_ops.gen_query_conditions("name",'=',"test_vpc")
l3_vpc_network_uuid = res_ops.query_resource(res_ops.L3_NETWORK,cond)[0].uuid
except:
test_util.test_pass(
"l3_vpc is delete after deleted the project")
cond = res_ops.gen_query_conditions("name",'=',"test_vpc_vr")
vpc_vr = res_ops.query_resource(res_ops.VIRTUALROUTER_VM,cond)
if not vpc_vr.inv.state is 'Paused':
test_util.test_fail(
"vpc vr [%s] is still exist after delete and expunge the project [%s]" % (vpc_vr.uuid,project_uuid))
# 9 delete
vni_range_uuid = res_ops.get_resource(res_ops.VNI_RANGE)[0].uuid
vxlan_ops.delete_vni_range(vni_range_uuid)
net_ops.delete_l2(vxlan_pool_uuid)
iam2_ops.delete_iam2_virtual_id(project_operator_uuid)
def | error_cleanup | identifier_name |
|
test_iam2_project_vpc_cascade_delete.py | zstackwoodpecker.operations.affinitygroup_operations as ag_ops
import zstackwoodpecker.operations.resource_operations as res_ops
import zstackwoodpecker.operations.vxlan_operations as vxlan_ops
import zstackwoodpecker.operations.net_operations as net_ops
import zstackwoodpecker.operations.account_operations as acc_ops
import zstackwoodpecker.operations.tag_operations as tag_ops
import zstackwoodpecker.operations.deploy_operations as dep_ops
import zstackwoodpecker.operations.vpcdns_operations as vpcdns_ops
import apibinding.inventory as inventory
import zstackwoodpecker.operations.vpc_operations as vpc_ops
import os
test_stub = test_lib.lib_get_test_stub()
test_obj_dict = test_state.TestStateDict()
l2_vxlan_network_uuid = None
project_uuid = None
project_operator_uuid = None
vni_range_uuid = None
vxlan_pool_uuid = None
l3_vpc_network_uuid = None
dns_text = '223.5.5.5'
allservices = ["VRouterRoute","DHCP","IPsec","LoadBalancer","CentralizedDNS","Eip","DNS","SNAT","VipQos","PortForwarding"]
cond = res_ops.gen_query_conditions("type","=","vrouter")
network_service_provider_uuid = res_ops.query_resource(res_ops.NETWORK_SERVICE_PROVIDER,cond)[0].uuid
def create_l3_vpc(name,l2_uuid,session_uuid = None):
action = api_actions.CreateL3NetworkAction()
action.name = name
action.l2NetworkUuid = l2_uuid
action.timeout = 300000
action.type = inventory.VPC_L3_NETWORK_TYPE
action.sessionUuid = session_uuid
evt = acc_ops.execute_action_with_session(action,session_uuid)
test_util.action_logger('[l3:] %s is created' %name)
return evt.inventory
def AddDnsToL3Network(l3_network_uuid,dns_text,session_uuid = None):
action = api_actions.AddDnsToL3NetworkAction()
action.sessionUuid = session_uuid
action.dns = dns_text
action.l3NetworkUuid = l3_network_uuid
evt = acc_ops.execute_action_with_session(action,session_uuid)
test_util.action_logger('add dns to l3 network: %s' % l3_network_uuid)
return evt
def AttachNetworkServiceToL3Network(l3_network_uuid,allservices,session_uuid = None):
action = api_actions.AttachNetworkServiceToL3NetworkAction()
action.sessionUuid = session_uuid
action.l3NetworkUuid = l3_network_uuid
action.networkServices = {network_service_provider_uuid:allservices}
evt = acc_ops.execute_action_with_session(action,session_uuid)
test_util.action_logger('add network services to l3 network: %s' % l3_network_uuid)
return evt
def test():
global l2_vxlan_network_uuid,project_uuid,project_operator_uuid,vni_range_uuid,vxlan_pool_uuid,l3_vpc_network_uuid
# create vxlan pool and vni range
zone_uuid = res_ops.get_resource(res_ops.ZONE)[0].uuid
cluster_uuid = res_ops.get_resource(res_ops.CLUSTER)[0].uuid
vxlan_pool_name = 'vxlan_pool_name'
vxlan_pool_uuid = vxlan_ops.create_l2_vxlan_network_pool(vxlan_pool_name,zone_uuid).uuid
vxlan_ops.create_vni_range('vni_range',20,40,vxlan_pool_uuid)
systemTags = ["l2NetworkUuid::%s::clusterUuid::%s::cidr::{172.20.0.1/16}"%(vxlan_pool_uuid,cluster_uuid)]
net_ops.attach_l2_vxlan_pool(vxlan_pool_uuid,cluster_uuid,systemTags)
# 1 create project
project_name = 'test_project7'
project = iam2_ops.create_iam2_project(project_name)
project_uuid = project.uuid
#cond = res_ops.gen_query_conditions("name",'=',"test_project7")
#linked_account_uuid = res_ops.query_resource(res_ops.ACCOUNT,cond)[0].uuid
linked_account_uuid = project.linkedAccountUuid
# 2 create project operator
project_operator_name = 'username7'
project_operator_password = 'b109f3bbbc244eb82441917ed06d618b9008dd09b3befd1b5e07394c706a8bb980b1d7785e5976ec049b46df5f1326af5a2ea6d103fd07c95385ffab0cacbc86'
attributes = [{"name": "__ProjectOperator__", "value": project_uuid}]
project_operator_uuid = iam2_ops.create_iam2_virtual_id(project_operator_name,project_operator_password,attributes=attributes).uuid
zone_uuid = res_ops.get_resource(res_ops.ZONE)[0].uuid
attributes = [{"name": "__ProjectRelatedZone__", "value": zone_uuid}]
iam2_ops.add_attributes_to_iam2_project(project_uuid, attributes)
# 3 login in project by project operator
iam2_ops.add_iam2_virtual_ids_to_project([project_operator_uuid],project_uuid)
project_operator_session_uuid = iam2_ops.login_iam2_virtual_id(project_operator_name,project_operator_password)
project_login_uuid = iam2_ops.login_iam2_project(project_name,session_uuid=project_operator_session_uuid).uuid
# 4 share vxlan pool to project
l2vxlan_pools = res_ops.query_resource(res_ops.L2_VXLAN_NETWORK_POOL)
for l2vxlan_pool in l2vxlan_pools:
acc_ops.share_resources([linked_account_uuid],[l2vxlan_pool.uuid])
# 5 create l2 vxlan
l2_vxlan_network_uuid = vxlan_ops.create_l2_vxlan_network('l2_vxlan',vxlan_pool_uuid,zone_uuid,session_uuid=project_login_uuid).uuid
# 6 use l2 vxlan to create l3 vpc
l3_vpc_network = create_l3_vpc('test_vpc',l2_vxlan_network_uuid,project_login_uuid)
l3_vpc_network_uuid = l3_vpc_network.uuid
# add ip range
ir_option = test_util.IpRangeOption()
ir_option.set_name('iprange2')
ir_option.set_description('iprange for vpc')
ir_option.set_netmask('255.255.255.0')
ir_option.set_gateway('192.168.23.1')
ir_option.set_l3_uuid(l3_vpc_network_uuid)
ir_option.set_startIp('192.168.23.2')
ir_option.set_endIp('192.168.23.254')
net_ops.add_ip_range(ir_option)
# add network service
AttachNetworkServiceToL3Network(l3_vpc_network_uuid,allservices,session_uuid = project_login_uuid)
# share the vr_offering to project and do create vpc router and vpc network
cond = res_ops.gen_query_conditions("name",'=',"virtual-router-vm")
vr_offering_uuid = res_ops.query_resource(res_ops.VR_OFFERING,cond)[0].uuid
acc_ops.share_resources([linked_account_uuid],[vr_offering_uuid])
vpc_ops.create_vpc_vrouter(name = 'test_vpc_vr', virtualrouter_offering_uuid = vr_offering_uuid,session_uuid = project_login_uuid)
vpc_vr = test_stub.query_vpc_vrouter('test_vpc_vr')
vpc_vr.add_nic(l3_vpc_network_uuid)
# 7 expunge the project and check the l2 vxlan
iam2_ops.delete_iam2_project(project_uuid)
iam2_ops.expunge_iam2_project(project_uuid)
try:
l2_vxlan_network_test_uuid = res_ops.query_resource(res_ops.L2_VXLAN_NETWORK)[0].uuid
except:
test_util.test_pass(
"l2 vxlan is delete after deleted the project " )
test_util.test_dsc('test l2 l2 cascade delete')
# 8 check the vpc network and vpc_vr
try:
cond = res_ops.gen_query_conditions("name",'=',"test_vpc")
l3_vpc_network_uuid = res_ops.query_resource(res_ops.L3_NETWORK,cond)[0].uuid
except:
test_util.test_pass(
"l3_vpc is delete after deleted the project")
cond = res_ops.gen_query_conditions("name",'=',"test_vpc_vr")
vpc_vr = res_ops.query_resource(res_ops.VIRTUALROUTER_VM,cond)
if not vpc_vr.inv.state is 'Paused':
|
# 9 delete
vni_range_uuid = res_ops.get_resource(res_ops.VNI_RANGE)[0].uuid
vxlan | test_util.test_fail(
"vpc vr [%s] is still exist after delete and expunge the project [%s]" % (vpc_vr.uuid,project_uuid)) | conditional_block |
test_iam2_project_vpc_cascade_delete.py | stackwoodpecker.operations.affinitygroup_operations as ag_ops
import zstackwoodpecker.operations.resource_operations as res_ops
import zstackwoodpecker.operations.vxlan_operations as vxlan_ops
import zstackwoodpecker.operations.net_operations as net_ops
import zstackwoodpecker.operations.account_operations as acc_ops
import zstackwoodpecker.operations.tag_operations as tag_ops
import zstackwoodpecker.operations.deploy_operations as dep_ops
import zstackwoodpecker.operations.vpcdns_operations as vpcdns_ops
import apibinding.inventory as inventory
import zstackwoodpecker.operations.vpc_operations as vpc_ops
import os
test_stub = test_lib.lib_get_test_stub()
test_obj_dict = test_state.TestStateDict()
l2_vxlan_network_uuid = None
project_uuid = None
project_operator_uuid = None
vni_range_uuid = None
vxlan_pool_uuid = None
l3_vpc_network_uuid = None
dns_text = '223.5.5.5'
allservices = ["VRouterRoute","DHCP","IPsec","LoadBalancer","CentralizedDNS","Eip","DNS","SNAT","VipQos","PortForwarding"]
cond = res_ops.gen_query_conditions("type","=","vrouter")
network_service_provider_uuid = res_ops.query_resource(res_ops.NETWORK_SERVICE_PROVIDER,cond)[0].uuid
def create_l3_vpc(name,l2_uuid,session_uuid = None):
action = api_actions.CreateL3NetworkAction()
action.name = name | test_util.action_logger('[l3:] %s is created' %name)
return evt.inventory
def AddDnsToL3Network(l3_network_uuid,dns_text,session_uuid = None):
action = api_actions.AddDnsToL3NetworkAction()
action.sessionUuid = session_uuid
action.dns = dns_text
action.l3NetworkUuid = l3_network_uuid
evt = acc_ops.execute_action_with_session(action,session_uuid)
test_util.action_logger('add dns to l3 network: %s' % l3_network_uuid)
return evt
def AttachNetworkServiceToL3Network(l3_network_uuid,allservices,session_uuid = None):
action = api_actions.AttachNetworkServiceToL3NetworkAction()
action.sessionUuid = session_uuid
action.l3NetworkUuid = l3_network_uuid
action.networkServices = {network_service_provider_uuid:allservices}
evt = acc_ops.execute_action_with_session(action,session_uuid)
test_util.action_logger('add network services to l3 network: %s' % l3_network_uuid)
return evt
def test():
global l2_vxlan_network_uuid,project_uuid,project_operator_uuid,vni_range_uuid,vxlan_pool_uuid,l3_vpc_network_uuid
# create vxlan pool and vni range
zone_uuid = res_ops.get_resource(res_ops.ZONE)[0].uuid
cluster_uuid = res_ops.get_resource(res_ops.CLUSTER)[0].uuid
vxlan_pool_name = 'vxlan_pool_name'
vxlan_pool_uuid = vxlan_ops.create_l2_vxlan_network_pool(vxlan_pool_name,zone_uuid).uuid
vxlan_ops.create_vni_range('vni_range',20,40,vxlan_pool_uuid)
systemTags = ["l2NetworkUuid::%s::clusterUuid::%s::cidr::{172.20.0.1/16}"%(vxlan_pool_uuid,cluster_uuid)]
net_ops.attach_l2_vxlan_pool(vxlan_pool_uuid,cluster_uuid,systemTags)
# 1 create project
project_name = 'test_project7'
project = iam2_ops.create_iam2_project(project_name)
project_uuid = project.uuid
#cond = res_ops.gen_query_conditions("name",'=',"test_project7")
#linked_account_uuid = res_ops.query_resource(res_ops.ACCOUNT,cond)[0].uuid
linked_account_uuid = project.linkedAccountUuid
# 2 create project operator
project_operator_name = 'username7'
project_operator_password = 'b109f3bbbc244eb82441917ed06d618b9008dd09b3befd1b5e07394c706a8bb980b1d7785e5976ec049b46df5f1326af5a2ea6d103fd07c95385ffab0cacbc86'
attributes = [{"name": "__ProjectOperator__", "value": project_uuid}]
project_operator_uuid = iam2_ops.create_iam2_virtual_id(project_operator_name,project_operator_password,attributes=attributes).uuid
zone_uuid = res_ops.get_resource(res_ops.ZONE)[0].uuid
attributes = [{"name": "__ProjectRelatedZone__", "value": zone_uuid}]
iam2_ops.add_attributes_to_iam2_project(project_uuid, attributes)
# 3 login in project by project operator
iam2_ops.add_iam2_virtual_ids_to_project([project_operator_uuid],project_uuid)
project_operator_session_uuid = iam2_ops.login_iam2_virtual_id(project_operator_name,project_operator_password)
project_login_uuid = iam2_ops.login_iam2_project(project_name,session_uuid=project_operator_session_uuid).uuid
# 4 share vxlan pool to project
l2vxlan_pools = res_ops.query_resource(res_ops.L2_VXLAN_NETWORK_POOL)
for l2vxlan_pool in l2vxlan_pools:
acc_ops.share_resources([linked_account_uuid],[l2vxlan_pool.uuid])
# 5 create l2 vxlan
l2_vxlan_network_uuid = vxlan_ops.create_l2_vxlan_network('l2_vxlan',vxlan_pool_uuid,zone_uuid,session_uuid=project_login_uuid).uuid
# 6 use l2 vxlan to create l3 vpc
l3_vpc_network = create_l3_vpc('test_vpc',l2_vxlan_network_uuid,project_login_uuid)
l3_vpc_network_uuid = l3_vpc_network.uuid
# add ip range
ir_option = test_util.IpRangeOption()
ir_option.set_name('iprange2')
ir_option.set_description('iprange for vpc')
ir_option.set_netmask('255.255.255.0')
ir_option.set_gateway('192.168.23.1')
ir_option.set_l3_uuid(l3_vpc_network_uuid)
ir_option.set_startIp('192.168.23.2')
ir_option.set_endIp('192.168.23.254')
net_ops.add_ip_range(ir_option)
# add network service
AttachNetworkServiceToL3Network(l3_vpc_network_uuid,allservices,session_uuid = project_login_uuid)
# share the vr_offering to project and do create vpc router and vpc network
cond = res_ops.gen_query_conditions("name",'=',"virtual-router-vm")
vr_offering_uuid = res_ops.query_resource(res_ops.VR_OFFERING,cond)[0].uuid
acc_ops.share_resources([linked_account_uuid],[vr_offering_uuid])
vpc_ops.create_vpc_vrouter(name = 'test_vpc_vr', virtualrouter_offering_uuid = vr_offering_uuid,session_uuid = project_login_uuid)
vpc_vr = test_stub.query_vpc_vrouter('test_vpc_vr')
vpc_vr.add_nic(l3_vpc_network_uuid)
# 7 expunge the project and check the l2 vxlan
iam2_ops.delete_iam2_project(project_uuid)
iam2_ops.expunge_iam2_project(project_uuid)
try:
l2_vxlan_network_test_uuid = res_ops.query_resource(res_ops.L2_VXLAN_NETWORK)[0].uuid
except:
test_util.test_pass(
"l2 vxlan is delete after deleted the project " )
test_util.test_dsc('test l2 l2 cascade delete')
# 8 check the vpc network and vpc_vr
try:
cond = res_ops.gen_query_conditions("name",'=',"test_vpc")
l3_vpc_network_uuid = res_ops.query_resource(res_ops.L3_NETWORK,cond)[0].uuid
except:
test_util.test_pass(
"l3_vpc is delete after deleted the project")
cond = res_ops.gen_query_conditions("name",'=',"test_vpc_vr")
vpc_vr = res_ops.query_resource(res_ops.VIRTUALROUTER_VM,cond)
if not vpc_vr.inv.state is 'Paused':
test_util.test_fail(
"vpc vr [%s] is still exist after delete and expunge the project [%s]" % (vpc_vr.uuid,project_uuid))
# 9 delete
vni_range_uuid = res_ops.get_resource(res_ops.VNI_RANGE)[0].uuid
vxlan_ops.delete | action.l2NetworkUuid = l2_uuid
action.timeout = 300000
action.type = inventory.VPC_L3_NETWORK_TYPE
action.sessionUuid = session_uuid
evt = acc_ops.execute_action_with_session(action,session_uuid) | random_line_split |
bigdigits.py | def bigdigits(line_splitted):
line_splitted = list(line_splitted)
string_row_one = '-**----*--***--***---*---****--**--****--**---**--'
list_row_one = list(string_row_one)
string_row_two = '*--*--**-----*----*-*--*-*----*-------*-*--*-*--*-'
list_row_two = list(string_row_two)
string_row_three = '*--*---*---**---**--****-***--***----*---**---***-'
list_row_three = list(string_row_three)
string_row_four = '*--*---*--*-------*----*----*-*--*--*---*--*----*-'
list_row_four = list(string_row_four)
string_row_five = '-**---***-****-***-----*-***---**---*----**---**--'
list_row_five = list(string_row_five)
string_row_six = '--------------------------------------------------'
list_row_six = list(string_row_six)
rows_formatted = [list_row_one,list_row_two,list_row_three,list_row_four,list_row_five,list_row_six]
line_splitted = [ord(c.lower()) for c in line_splitted]
for x in range(0,len(line_splitted)):
if(x>=len(line_splitted)):
|
elif(line_splitted[x] >= 48 and line_splitted[x]<=57):
pass
else:
line_splitted.pop(x)
x = x-1
line_splitted = [chr(i) for i in line_splitted]
for x in range(0,6):
current_row = ''
for i in range(0,len(line_splitted)):
current_number = int(line_splitted[i])
current_row = current_row + ''.join(rows_formatted[x][current_number*5 : current_number*5+5])
print(current_row)
test_cases = open('test_cases','r')
for line in test_cases:
line_splitted = line.strip()
bigdigits(line_splitted) | pass | conditional_block |
bigdigits.py | def bigdigits(line_splitted):
line_splitted = list(line_splitted)
string_row_one = '-**----*--***--***---*---****--**--****--**---**--'
list_row_one = list(string_row_one)
string_row_two = '*--*--**-----*----*-*--*-*----*-------*-*--*-*--*-'
list_row_two = list(string_row_two)
string_row_three = '*--*---*---**---**--****-***--***----*---**---***-'
list_row_three = list(string_row_three)
string_row_four = '*--*---*--*-------*----*----*-*--*--*---*--*----*-'
list_row_four = list(string_row_four)
string_row_five = '-**---***-****-***-----*-***---**---*----**---**--'
list_row_five = list(string_row_five)
string_row_six = '--------------------------------------------------'
list_row_six = list(string_row_six)
rows_formatted = [list_row_one,list_row_two,list_row_three,list_row_four,list_row_five,list_row_six]
line_splitted = [ord(c.lower()) for c in line_splitted]
for x in range(0,len(line_splitted)):
if(x>=len(line_splitted)):
pass
elif(line_splitted[x] >= 48 and line_splitted[x]<=57):
pass
else:
line_splitted.pop(x)
x = x-1
line_splitted = [chr(i) for i in line_splitted]
for x in range(0,6):
current_row = ''
for i in range(0,len(line_splitted)): |
current_row = current_row + ''.join(rows_formatted[x][current_number*5 : current_number*5+5])
print(current_row)
test_cases = open('test_cases','r')
for line in test_cases:
line_splitted = line.strip()
bigdigits(line_splitted) |
current_number = int(line_splitted[i]) | random_line_split |
bigdigits.py | def bigdigits(line_splitted):
| rows_formatted = [list_row_one,list_row_two,list_row_three,list_row_four,list_row_five,list_row_six]
line_splitted = [ord(c.lower()) for c in line_splitted]
for x in range(0,len(line_splitted)):
if(x>=len(line_splitted)):
pass
elif(line_splitted[x] >= 48 and line_splitted[x]<=57):
pass
else:
line_splitted.pop(x)
x = x-1
line_splitted = [chr(i) for i in line_splitted]
for x in range(0,6):
current_row = ''
for i in range(0,len(line_splitted)):
current_number = int(line_splitted[i])
current_row = current_row + ''.join(rows_formatted[x][current_number*5 : current_number*5+5])
print(current_row)
test_cases = open('test_cases','r')
for line in test_cases:
line_splitted = line.strip()
bigdigits(line_splitted) | line_splitted = list(line_splitted)
string_row_one = '-**----*--***--***---*---****--**--****--**---**--'
list_row_one = list(string_row_one)
string_row_two = '*--*--**-----*----*-*--*-*----*-------*-*--*-*--*-'
list_row_two = list(string_row_two)
string_row_three = '*--*---*---**---**--****-***--***----*---**---***-'
list_row_three = list(string_row_three)
string_row_four = '*--*---*--*-------*----*----*-*--*--*---*--*----*-'
list_row_four = list(string_row_four)
string_row_five = '-**---***-****-***-----*-***---**---*----**---**--'
list_row_five = list(string_row_five)
string_row_six = '--------------------------------------------------'
list_row_six = list(string_row_six)
| identifier_body |
bigdigits.py | def | (line_splitted):
line_splitted = list(line_splitted)
string_row_one = '-**----*--***--***---*---****--**--****--**---**--'
list_row_one = list(string_row_one)
string_row_two = '*--*--**-----*----*-*--*-*----*-------*-*--*-*--*-'
list_row_two = list(string_row_two)
string_row_three = '*--*---*---**---**--****-***--***----*---**---***-'
list_row_three = list(string_row_three)
string_row_four = '*--*---*--*-------*----*----*-*--*--*---*--*----*-'
list_row_four = list(string_row_four)
string_row_five = '-**---***-****-***-----*-***---**---*----**---**--'
list_row_five = list(string_row_five)
string_row_six = '--------------------------------------------------'
list_row_six = list(string_row_six)
rows_formatted = [list_row_one,list_row_two,list_row_three,list_row_four,list_row_five,list_row_six]
line_splitted = [ord(c.lower()) for c in line_splitted]
for x in range(0,len(line_splitted)):
if(x>=len(line_splitted)):
pass
elif(line_splitted[x] >= 48 and line_splitted[x]<=57):
pass
else:
line_splitted.pop(x)
x = x-1
line_splitted = [chr(i) for i in line_splitted]
for x in range(0,6):
current_row = ''
for i in range(0,len(line_splitted)):
current_number = int(line_splitted[i])
current_row = current_row + ''.join(rows_formatted[x][current_number*5 : current_number*5+5])
print(current_row)
test_cases = open('test_cases','r')
for line in test_cases:
line_splitted = line.strip()
bigdigits(line_splitted) | bigdigits | identifier_name |
index.js | 'use strict';
Object.defineProperty(exports, "__esModule", { | });
var _oneLineCommaListsOr = require('./oneLineCommaListsOr');
var _oneLineCommaListsOr2 = _interopRequireDefault(_oneLineCommaListsOr);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
exports.default = _oneLineCommaListsOr2.default;
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbIi4uLy4uL3NyYy9vbmVMaW5lQ29tbWFMaXN0c09yL2luZGV4LmpzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBOzs7Ozs7QUFFQSIsImZpbGUiOiJpbmRleC5qcyIsInNvdXJjZXNDb250ZW50IjpbIid1c2Ugc3RyaWN0J1xuXG5pbXBvcnQgb25lTGluZUNvbW1hTGlzdHNPciBmcm9tICcuL29uZUxpbmVDb21tYUxpc3RzT3InXG5cbmV4cG9ydCBkZWZhdWx0IG9uZUxpbmVDb21tYUxpc3RzT3JcbiJdfQ== | value: true | random_line_split |
index.js | 'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _oneLineCommaListsOr = require('./oneLineCommaListsOr');
var _oneLineCommaListsOr2 = _interopRequireDefault(_oneLineCommaListsOr);
function _interopRequireDefault(obj) |
exports.default = _oneLineCommaListsOr2.default;
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbIi4uLy4uL3NyYy9vbmVMaW5lQ29tbWFMaXN0c09yL2luZGV4LmpzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBOzs7Ozs7QUFFQSIsImZpbGUiOiJpbmRleC5qcyIsInNvdXJjZXNDb250ZW50IjpbIid1c2Ugc3RyaWN0J1xuXG5pbXBvcnQgb25lTGluZUNvbW1hTGlzdHNPciBmcm9tICcuL29uZUxpbmVDb21tYUxpc3RzT3InXG5cbmV4cG9ydCBkZWZhdWx0IG9uZUxpbmVDb21tYUxpc3RzT3JcbiJdfQ== | { return obj && obj.__esModule ? obj : { default: obj }; } | identifier_body |
index.js | 'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _oneLineCommaListsOr = require('./oneLineCommaListsOr');
var _oneLineCommaListsOr2 = _interopRequireDefault(_oneLineCommaListsOr);
function | (obj) { return obj && obj.__esModule ? obj : { default: obj }; }
exports.default = _oneLineCommaListsOr2.default;
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbIi4uLy4uL3NyYy9vbmVMaW5lQ29tbWFMaXN0c09yL2luZGV4LmpzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBOzs7Ozs7QUFFQSIsImZpbGUiOiJpbmRleC5qcyIsInNvdXJjZXNDb250ZW50IjpbIid1c2Ugc3RyaWN0J1xuXG5pbXBvcnQgb25lTGluZUNvbW1hTGlzdHNPciBmcm9tICcuL29uZUxpbmVDb21tYUxpc3RzT3InXG5cbmV4cG9ydCBkZWZhdWx0IG9uZUxpbmVDb21tYUxpc3RzT3JcbiJdfQ== | _interopRequireDefault | identifier_name |
test-dgram-address.js | // Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE. |
{
// IPv4 Test
const socket = dgram.createSocket('udp4');
socket.on('listening', common.mustCall(() => {
const address = socket.address();
assert.strictEqual(address.address, common.localhostIPv4);
assert.strictEqual(typeof address.port, 'number');
assert.ok(isFinite(address.port));
assert.ok(address.port > 0);
assert.strictEqual(address.family, 'IPv4');
socket.close();
}));
socket.on('error', (err) => {
socket.close();
assert.fail(`Unexpected error on udp4 socket. ${err.toString()}`);
});
socket.bind(0, common.localhostIPv4);
}
if (common.hasIPv6) {
// IPv6 Test
const socket = dgram.createSocket('udp6');
const localhost = '::1';
socket.on('listening', common.mustCall(() => {
const address = socket.address();
assert.strictEqual(address.address, localhost);
assert.strictEqual(typeof address.port, 'number');
assert.ok(isFinite(address.port));
assert.ok(address.port > 0);
assert.strictEqual(address.family, 'IPv6');
socket.close();
}));
socket.on('error', (err) => {
socket.close();
assert.fail(`Unexpected error on udp6 socket. ${err.toString()}`);
});
socket.bind(0, localhost);
}
{
// Verify that address() throws if the socket is not bound.
const socket = dgram.createSocket('udp4');
assert.throws(() => {
socket.address();
}, /^Error: getsockname EINVAL$/);
} |
'use strict';
const common = require('../common');
const assert = require('assert');
const dgram = require('dgram'); | random_line_split |
test-dgram-address.js | // Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
'use strict';
const common = require('../common');
const assert = require('assert');
const dgram = require('dgram');
{
// IPv4 Test
const socket = dgram.createSocket('udp4');
socket.on('listening', common.mustCall(() => {
const address = socket.address();
assert.strictEqual(address.address, common.localhostIPv4);
assert.strictEqual(typeof address.port, 'number');
assert.ok(isFinite(address.port));
assert.ok(address.port > 0);
assert.strictEqual(address.family, 'IPv4');
socket.close();
}));
socket.on('error', (err) => {
socket.close();
assert.fail(`Unexpected error on udp4 socket. ${err.toString()}`);
});
socket.bind(0, common.localhostIPv4);
}
if (common.hasIPv6) |
socket.bind(0, localhost);
}
{
// Verify that address() throws if the socket is not bound.
const socket = dgram.createSocket('udp4');
assert.throws(() => {
socket.address();
}, /^Error: getsockname EINVAL$/);
}
| {
// IPv6 Test
const socket = dgram.createSocket('udp6');
const localhost = '::1';
socket.on('listening', common.mustCall(() => {
const address = socket.address();
assert.strictEqual(address.address, localhost);
assert.strictEqual(typeof address.port, 'number');
assert.ok(isFinite(address.port));
assert.ok(address.port > 0);
assert.strictEqual(address.family, 'IPv6');
socket.close();
}));
socket.on('error', (err) => {
socket.close();
assert.fail(`Unexpected error on udp6 socket. ${err.toString()}`);
}); | conditional_block |
list1.py | 資料 = [1, 2, 3, 4, 5]
'''
program: list1.py
'''
print(資料[:3])
print(資料[2:])
print(資料[1:2])
a = [3, 5, 7, 11, 13]
for x in a:
if x == 7:
print('list contains 7')
break
print(list(range(10)))
for 索引 in range(-5, 6, 2):
print(索引)
squares = | in range(0, 11) ]
print(squares)
a = [10, 'sage', 3.14159]
b = a[:]
#list.pop([i]) 取出 list 中索引值為 i 的元素,預設是最後一個
print(b.pop())
print(a)
數列 = [0]*10
print(數列)
'''
delete 用法
'''
a = [1, 2, 3, 4]
print("刪除之前:", a)
del a[:2]
print("刪除之後:", a) | [ x*x for x | conditional_block |
list1.py | 資料 = [1, 2, 3, 4, 5]
'''
program: list1.py
'''
print(資料[:3])
print(資料[2:])
print(資料[1:2])
a = [3, 5, 7, 11, 13]
for x in a:
if x == 7:
print('list contains 7')
break
print(list(range(10)))
for 索引 in range(-5, 6, 2):
print(索引)
squares = [ x*x for x in range(0, 11) ]
print(squares)
a = [10, 'sage', 3.14159]
b = a[:]
#list.pop([i]) 取出 list 中索引值為 i 的元素,預設是最後一個
print(b.pop())
print(a)
數列 = [0]*10
print(數列)
'''
delete 用法
'''
| a = [1, 2, 3, 4]
print("刪除之前:", a)
del a[:2]
print("刪除之後:", a) | random_line_split |
|
main.rs | // Copyright 2015-2017 Intecture Developers.
//
// Licensed under the Mozilla Public License 2.0 <LICENSE or
// https://www.tldrlegal.com/l/mpl-2.0>. This file may not be copied,
// modified, or distributed except according to those terms.
extern crate clap;
extern crate env_logger;
#[macro_use] extern crate error_chain;
extern crate futures;
extern crate intecture_api;
#[macro_use] extern crate serde_derive;
extern crate serde_json;
extern crate tokio_core;
extern crate tokio_proto;
extern crate tokio_service;
extern crate toml;
mod errors;
use error_chain::ChainedError;
use errors::*;
use futures::{future, Future};
use intecture_api::host::local::Local;
use intecture_api::host::remote::JsonLineProto;
use intecture_api::{FromMessage, InMessage, Request};
use std::fs::File;
use std::io::{self, Read};
use std::net::SocketAddr;
use std::result;
use std::sync::Arc;
use tokio_core::reactor::Remote;
use tokio_proto::streaming::Message;
use tokio_proto::TcpServer;
use tokio_service::{NewService, Service};
pub struct Api {
host: Local,
}
pub struct | {
remote: Remote,
}
impl Service for Api {
type Request = InMessage;
type Response = InMessage;
type Error = Error;
type Future = Box<Future<Item = Self::Response, Error = Self::Error>>;
fn call(&self, req: Self::Request) -> Self::Future {
let request = match Request::from_msg(req)
.chain_err(|| "Malformed Request")
{
Ok(r) => r,
Err(e) => return Box::new(future::ok(error_to_msg(e))),
};
Box::new(request.exec(&self.host)
.chain_err(|| "Failed to execute Request")
.then(|mut result| match result {
Ok(mut msg) => {
let mut reply = msg.get_mut();
reply = format!("{\"Ok\":\"{}\"}", reply);
future::ok(msg)
},
Err(e) => future::ok(error_to_msg(e))
}))
}
}
impl NewService for NewApi {
type Request = InMessage;
type Response = InMessage;
type Error = Error;
type Instance = Api;
fn new_service(&self) -> io::Result<Self::Instance> {
// XXX Danger zone! If we're running multiple threads, this `unwrap()`
// will explode. The API requires a `Handle`, but we can only send a
// `Remote` to this Service. Currently we force the `Handle`, which is
// only safe for the current thread.
// See https://github.com/alexcrichton/tokio-process/issues/23
let handle = self.remote.handle().unwrap();
Ok(Api {
host: Local::new(&handle).wait().unwrap(),
})
}
}
#[derive(Deserialize)]
struct Config {
address: SocketAddr,
}
quick_main!(|| -> Result<()> {
env_logger::init().chain_err(|| "Could not start logging")?;
let matches = clap::App::new("Intecture Agent")
.version(env!("CARGO_PKG_VERSION"))
.author(env!("CARGO_PKG_AUTHORS"))
.about(env!("CARGO_PKG_DESCRIPTION"))
.arg(clap::Arg::with_name("config")
.short("c")
.long("config")
.value_name("FILE")
.help("Path to the agent configuration file")
.takes_value(true))
.arg(clap::Arg::with_name("addr")
.short("a")
.long("address")
.value_name("ADDR")
.help("Set the socket address this server will listen on (e.g. 0.0.0.0:7101)")
.takes_value(true))
.group(clap::ArgGroup::with_name("config_or_else")
.args(&["config", "addr"])
.required(true))
.get_matches();
let config = if let Some(c) = matches.value_of("config") {
let mut fh = File::open(c).chain_err(|| "Could not open config file")?;
let mut buf = Vec::new();
fh.read_to_end(&mut buf).chain_err(|| "Could not read config file")?;
toml::from_slice(&buf).chain_err(|| "Config file contained invalid TOML")?
} else {
let address = matches.value_of("addr").unwrap().parse().chain_err(|| "Invalid server address")?;
Config { address }
};
// XXX We can only run a single thread here, or big boom!!
// The API requires a `Handle`, but we can only send a `Remote`.
// Currently we force the issue (`unwrap()`), which is only safe
// for the current thread.
// See https://github.com/alexcrichton/tokio-process/issues/23
let server = TcpServer::new(JsonLineProto, config.address);
server.with_handle(move |handle| {
Arc::new(NewApi {
remote: handle.remote().clone(),
})
});
Ok(())
});
fn error_to_msg(e: Error) -> InMessage {
let response: result::Result<(), String> = Err(format!("{}", e.display_chain()));
// If we can't serialize this, we can't serialize anything, so
// panicking is appropriate.
let value = serde_json::to_value(response)
.expect("Cannot serialize ResponseResult::Err. This is bad...");
Message::WithoutBody(value)
}
| NewApi | identifier_name |
main.rs | // Copyright 2015-2017 Intecture Developers.
//
// Licensed under the Mozilla Public License 2.0 <LICENSE or
// https://www.tldrlegal.com/l/mpl-2.0>. This file may not be copied,
// modified, or distributed except according to those terms.
extern crate clap;
extern crate env_logger;
#[macro_use] extern crate error_chain;
extern crate futures;
extern crate intecture_api;
#[macro_use] extern crate serde_derive;
extern crate serde_json;
extern crate tokio_core;
extern crate tokio_proto;
extern crate tokio_service;
extern crate toml;
mod errors;
use error_chain::ChainedError;
use errors::*;
use futures::{future, Future};
use intecture_api::host::local::Local;
use intecture_api::host::remote::JsonLineProto;
use intecture_api::{FromMessage, InMessage, Request};
use std::fs::File;
use std::io::{self, Read};
use std::net::SocketAddr;
use std::result;
use std::sync::Arc;
use tokio_core::reactor::Remote;
use tokio_proto::streaming::Message;
use tokio_proto::TcpServer;
use tokio_service::{NewService, Service};
pub struct Api {
host: Local,
}
pub struct NewApi {
remote: Remote,
}
impl Service for Api {
type Request = InMessage;
type Response = InMessage;
type Error = Error;
type Future = Box<Future<Item = Self::Response, Error = Self::Error>>;
fn call(&self, req: Self::Request) -> Self::Future |
}
impl NewService for NewApi {
type Request = InMessage;
type Response = InMessage;
type Error = Error;
type Instance = Api;
fn new_service(&self) -> io::Result<Self::Instance> {
// XXX Danger zone! If we're running multiple threads, this `unwrap()`
// will explode. The API requires a `Handle`, but we can only send a
// `Remote` to this Service. Currently we force the `Handle`, which is
// only safe for the current thread.
// See https://github.com/alexcrichton/tokio-process/issues/23
let handle = self.remote.handle().unwrap();
Ok(Api {
host: Local::new(&handle).wait().unwrap(),
})
}
}
#[derive(Deserialize)]
struct Config {
address: SocketAddr,
}
quick_main!(|| -> Result<()> {
env_logger::init().chain_err(|| "Could not start logging")?;
let matches = clap::App::new("Intecture Agent")
.version(env!("CARGO_PKG_VERSION"))
.author(env!("CARGO_PKG_AUTHORS"))
.about(env!("CARGO_PKG_DESCRIPTION"))
.arg(clap::Arg::with_name("config")
.short("c")
.long("config")
.value_name("FILE")
.help("Path to the agent configuration file")
.takes_value(true))
.arg(clap::Arg::with_name("addr")
.short("a")
.long("address")
.value_name("ADDR")
.help("Set the socket address this server will listen on (e.g. 0.0.0.0:7101)")
.takes_value(true))
.group(clap::ArgGroup::with_name("config_or_else")
.args(&["config", "addr"])
.required(true))
.get_matches();
let config = if let Some(c) = matches.value_of("config") {
let mut fh = File::open(c).chain_err(|| "Could not open config file")?;
let mut buf = Vec::new();
fh.read_to_end(&mut buf).chain_err(|| "Could not read config file")?;
toml::from_slice(&buf).chain_err(|| "Config file contained invalid TOML")?
} else {
let address = matches.value_of("addr").unwrap().parse().chain_err(|| "Invalid server address")?;
Config { address }
};
// XXX We can only run a single thread here, or big boom!!
// The API requires a `Handle`, but we can only send a `Remote`.
// Currently we force the issue (`unwrap()`), which is only safe
// for the current thread.
// See https://github.com/alexcrichton/tokio-process/issues/23
let server = TcpServer::new(JsonLineProto, config.address);
server.with_handle(move |handle| {
Arc::new(NewApi {
remote: handle.remote().clone(),
})
});
Ok(())
});
fn error_to_msg(e: Error) -> InMessage {
let response: result::Result<(), String> = Err(format!("{}", e.display_chain()));
// If we can't serialize this, we can't serialize anything, so
// panicking is appropriate.
let value = serde_json::to_value(response)
.expect("Cannot serialize ResponseResult::Err. This is bad...");
Message::WithoutBody(value)
}
| {
let request = match Request::from_msg(req)
.chain_err(|| "Malformed Request")
{
Ok(r) => r,
Err(e) => return Box::new(future::ok(error_to_msg(e))),
};
Box::new(request.exec(&self.host)
.chain_err(|| "Failed to execute Request")
.then(|mut result| match result {
Ok(mut msg) => {
let mut reply = msg.get_mut();
reply = format!("{\"Ok\":\"{}\"}", reply);
future::ok(msg)
},
Err(e) => future::ok(error_to_msg(e))
}))
} | identifier_body |
main.rs | // Copyright 2015-2017 Intecture Developers.
//
// Licensed under the Mozilla Public License 2.0 <LICENSE or
// https://www.tldrlegal.com/l/mpl-2.0>. This file may not be copied,
// modified, or distributed except according to those terms.
extern crate clap;
extern crate env_logger;
#[macro_use] extern crate error_chain;
extern crate futures;
extern crate intecture_api;
#[macro_use] extern crate serde_derive;
extern crate serde_json;
extern crate tokio_core;
extern crate tokio_proto;
extern crate tokio_service;
extern crate toml;
mod errors;
use error_chain::ChainedError;
use errors::*;
use futures::{future, Future};
use intecture_api::host::local::Local;
use intecture_api::host::remote::JsonLineProto;
use intecture_api::{FromMessage, InMessage, Request};
use std::fs::File;
use std::io::{self, Read};
use std::net::SocketAddr;
use std::result;
use std::sync::Arc;
use tokio_core::reactor::Remote;
use tokio_proto::streaming::Message;
use tokio_proto::TcpServer;
use tokio_service::{NewService, Service};
pub struct Api {
host: Local,
}
pub struct NewApi {
remote: Remote,
}
impl Service for Api {
type Request = InMessage;
type Response = InMessage;
type Error = Error;
type Future = Box<Future<Item = Self::Response, Error = Self::Error>>;
fn call(&self, req: Self::Request) -> Self::Future {
let request = match Request::from_msg(req)
.chain_err(|| "Malformed Request")
{
Ok(r) => r,
Err(e) => return Box::new(future::ok(error_to_msg(e))),
};
Box::new(request.exec(&self.host)
.chain_err(|| "Failed to execute Request")
.then(|mut result| match result {
Ok(mut msg) => {
let mut reply = msg.get_mut();
reply = format!("{\"Ok\":\"{}\"}", reply);
future::ok(msg)
},
Err(e) => future::ok(error_to_msg(e))
}))
}
}
impl NewService for NewApi {
type Request = InMessage;
type Response = InMessage;
type Error = Error;
type Instance = Api;
fn new_service(&self) -> io::Result<Self::Instance> {
// XXX Danger zone! If we're running multiple threads, this `unwrap()`
// will explode. The API requires a `Handle`, but we can only send a
// `Remote` to this Service. Currently we force the `Handle`, which is
// only safe for the current thread.
// See https://github.com/alexcrichton/tokio-process/issues/23
let handle = self.remote.handle().unwrap();
Ok(Api {
host: Local::new(&handle).wait().unwrap(),
})
}
}
#[derive(Deserialize)]
struct Config {
address: SocketAddr,
}
| quick_main!(|| -> Result<()> {
env_logger::init().chain_err(|| "Could not start logging")?;
let matches = clap::App::new("Intecture Agent")
.version(env!("CARGO_PKG_VERSION"))
.author(env!("CARGO_PKG_AUTHORS"))
.about(env!("CARGO_PKG_DESCRIPTION"))
.arg(clap::Arg::with_name("config")
.short("c")
.long("config")
.value_name("FILE")
.help("Path to the agent configuration file")
.takes_value(true))
.arg(clap::Arg::with_name("addr")
.short("a")
.long("address")
.value_name("ADDR")
.help("Set the socket address this server will listen on (e.g. 0.0.0.0:7101)")
.takes_value(true))
.group(clap::ArgGroup::with_name("config_or_else")
.args(&["config", "addr"])
.required(true))
.get_matches();
let config = if let Some(c) = matches.value_of("config") {
let mut fh = File::open(c).chain_err(|| "Could not open config file")?;
let mut buf = Vec::new();
fh.read_to_end(&mut buf).chain_err(|| "Could not read config file")?;
toml::from_slice(&buf).chain_err(|| "Config file contained invalid TOML")?
} else {
let address = matches.value_of("addr").unwrap().parse().chain_err(|| "Invalid server address")?;
Config { address }
};
// XXX We can only run a single thread here, or big boom!!
// The API requires a `Handle`, but we can only send a `Remote`.
// Currently we force the issue (`unwrap()`), which is only safe
// for the current thread.
// See https://github.com/alexcrichton/tokio-process/issues/23
let server = TcpServer::new(JsonLineProto, config.address);
server.with_handle(move |handle| {
Arc::new(NewApi {
remote: handle.remote().clone(),
})
});
Ok(())
});
fn error_to_msg(e: Error) -> InMessage {
let response: result::Result<(), String> = Err(format!("{}", e.display_chain()));
// If we can't serialize this, we can't serialize anything, so
// panicking is appropriate.
let value = serde_json::to_value(response)
.expect("Cannot serialize ResponseResult::Err. This is bad...");
Message::WithoutBody(value)
} | random_line_split |
|
UnsavedChangesPrompt.tsx | import React, { useState, useEffect, SetStateAction } from 'react';
import { Location } from 'history';
import { Prompt, Redirect } from 'react-router';
import { Modal } from './Modal';
import { ModalConfirmation } from "./ModalConfirmation";
export interface UnsavedChangesPromptProps {
showModal: boolean;
setShowModal: React.Dispatch<SetStateAction<boolean>>;
hasUnsavedChanges: boolean;
unSavedChangesDeps?: any[] | undefined;
cancelButtonLabel?: string;
confirmButtonLabel?: string;
handleConfirm?: () => void;
handleCancel?: () => void;
whiteList?: (location: Location<any>) => boolean;
delayConfirmation?: boolean;
}
const CONFIRM_DELAY_MS = 3000;
export const UnsavedChangesPrompt: React.FC<UnsavedChangesPromptProps> = ({
showModal,
setShowModal,
hasUnsavedChanges,
unSavedChangesDeps,
cancelButtonLabel,
confirmButtonLabel,
handleConfirm,
handleCancel,
whiteList,
delayConfirmation,
children
}) => {
const [nextLocation, setNextLocation] = useState<Location>();
const [okConfirmed, setOkConfirmed] = useState(false);
const [confirmDisabled, setConfirmDisabled] = useState(true);
const navAwayHandler = (location: Location<any>) => {
if (whiteList && whiteList(location)) {
return true;
}
if (!okConfirmed) {
setShowModal(true);
}
setNextLocation(location);
return okConfirmed;
};
const modalConfirmHandler = () => {
setShowModal(false);
setOkConfirmed(true);
}
const modalCancelHandler = () => {
setShowModal(false);
handleCancel && handleCancel();
}
useEffect(windowUnloadEffect(hasUnsavedChanges), unSavedChangesDeps);
useEffect(() => {
console.debug('UnsavedChangesPrompt#useEffect(showModal)');
if (showModal) {
setConfirmDisabled(true);
setTimeout(() => setConfirmDisabled(false), CONFIRM_DELAY_MS);
} else {
setConfirmDisabled(false);
}
}, [showModal]);
useEffect(() => {
console.debug('UnsavedChangesPrompt#useEffect(okConfirmed)'); | if (okConfirmed) {
handleConfirm && handleConfirm();
}
}, [okConfirmed]);
return (
<>
<Prompt when={hasUnsavedChanges} message={(nextLocation) => navAwayHandler(nextLocation)} />
<Modal isActive={showModal}>
<ModalConfirmation
cancelButtonLabel={cancelButtonLabel || 'No, Cancel'}
cancelHandler={modalCancelHandler}
confirmButtonLabel={confirmButtonLabel || "Yes, I'm sure"}
confirmHandler={modalConfirmHandler}
isActive={showModal}
isConfirmDisabled={delayConfirmation !== false && confirmDisabled}
>
{children}
</ModalConfirmation>
</Modal>
{okConfirmed && nextLocation && <Redirect to={nextLocation} />}
</>
);
}
export function windowUnloadEffect(hasUnsavedChanges: boolean): React.EffectCallback {
return () => {
console.debug('UnsavedChangesPrompt#useEffect()');
const listener = function (ev: BeforeUnloadEvent) {
if (hasUnsavedChanges) {
ev.preventDefault();
ev.returnValue = false;
}
else {
delete ev['returnValue'];
}
};
window.addEventListener('beforeunload', listener);
return () => {
window.removeEventListener('beforeunload', listener);
};
};
} | random_line_split |
|
UnsavedChangesPrompt.tsx | import React, { useState, useEffect, SetStateAction } from 'react';
import { Location } from 'history';
import { Prompt, Redirect } from 'react-router';
import { Modal } from './Modal';
import { ModalConfirmation } from "./ModalConfirmation";
export interface UnsavedChangesPromptProps {
showModal: boolean;
setShowModal: React.Dispatch<SetStateAction<boolean>>;
hasUnsavedChanges: boolean;
unSavedChangesDeps?: any[] | undefined;
cancelButtonLabel?: string;
confirmButtonLabel?: string;
handleConfirm?: () => void;
handleCancel?: () => void;
whiteList?: (location: Location<any>) => boolean;
delayConfirmation?: boolean;
}
const CONFIRM_DELAY_MS = 3000;
export const UnsavedChangesPrompt: React.FC<UnsavedChangesPromptProps> = ({
showModal,
setShowModal,
hasUnsavedChanges,
unSavedChangesDeps,
cancelButtonLabel,
confirmButtonLabel,
handleConfirm,
handleCancel,
whiteList,
delayConfirmation,
children
}) => {
const [nextLocation, setNextLocation] = useState<Location>();
const [okConfirmed, setOkConfirmed] = useState(false);
const [confirmDisabled, setConfirmDisabled] = useState(true);
const navAwayHandler = (location: Location<any>) => {
if (whiteList && whiteList(location)) |
if (!okConfirmed) {
setShowModal(true);
}
setNextLocation(location);
return okConfirmed;
};
const modalConfirmHandler = () => {
setShowModal(false);
setOkConfirmed(true);
}
const modalCancelHandler = () => {
setShowModal(false);
handleCancel && handleCancel();
}
useEffect(windowUnloadEffect(hasUnsavedChanges), unSavedChangesDeps);
useEffect(() => {
console.debug('UnsavedChangesPrompt#useEffect(showModal)');
if (showModal) {
setConfirmDisabled(true);
setTimeout(() => setConfirmDisabled(false), CONFIRM_DELAY_MS);
} else {
setConfirmDisabled(false);
}
}, [showModal]);
useEffect(() => {
console.debug('UnsavedChangesPrompt#useEffect(okConfirmed)');
if (okConfirmed) {
handleConfirm && handleConfirm();
}
}, [okConfirmed]);
return (
<>
<Prompt when={hasUnsavedChanges} message={(nextLocation) => navAwayHandler(nextLocation)} />
<Modal isActive={showModal}>
<ModalConfirmation
cancelButtonLabel={cancelButtonLabel || 'No, Cancel'}
cancelHandler={modalCancelHandler}
confirmButtonLabel={confirmButtonLabel || "Yes, I'm sure"}
confirmHandler={modalConfirmHandler}
isActive={showModal}
isConfirmDisabled={delayConfirmation !== false && confirmDisabled}
>
{children}
</ModalConfirmation>
</Modal>
{okConfirmed && nextLocation && <Redirect to={nextLocation} />}
</>
);
}
export function windowUnloadEffect(hasUnsavedChanges: boolean): React.EffectCallback {
return () => {
console.debug('UnsavedChangesPrompt#useEffect()');
const listener = function (ev: BeforeUnloadEvent) {
if (hasUnsavedChanges) {
ev.preventDefault();
ev.returnValue = false;
}
else {
delete ev['returnValue'];
}
};
window.addEventListener('beforeunload', listener);
return () => {
window.removeEventListener('beforeunload', listener);
};
};
}
| {
return true;
} | conditional_block |
array.rs | //! Array with SIMD alignment
use crate::types::*;
use ffi;
use num_traits::Zero;
use std::ops::{Deref, DerefMut};
use std::os::raw::c_void;
use std::slice::{from_raw_parts, from_raw_parts_mut};
/// A RAII-wrapper of `fftw_alloc` and `fftw_free` with the [SIMD alignment].
///
/// [SIMD alignment]: http://www.fftw.org/fftw3_doc/SIMD-alignment-and-fftw_005fmalloc.html
#[derive(Debug)]
pub struct AlignedVec<T> {
n: usize,
data: *mut T,
}
/// Allocate SIMD-aligned memory of Real/Complex type
pub trait AlignedAllocable: Zero + Clone + Copy + Sized {
/// Allocate SIMD-aligned memory
unsafe fn alloc(n: usize) -> *mut Self;
}
impl AlignedAllocable for f64 {
unsafe fn alloc(n: usize) -> *mut Self {
ffi::fftw_alloc_real(n)
}
}
impl AlignedAllocable for f32 {
unsafe fn alloc(n: usize) -> *mut Self {
ffi::fftwf_alloc_real(n)
}
}
impl AlignedAllocable for c64 {
unsafe fn alloc(n: usize) -> *mut Self {
ffi::fftw_alloc_complex(n)
}
}
impl AlignedAllocable for c32 {
unsafe fn alloc(n: usize) -> *mut Self {
ffi::fftwf_alloc_complex(n)
}
}
impl<T> AlignedVec<T> {
pub fn as_slice(&self) -> &[T] {
unsafe { from_raw_parts(self.data, self.n) }
}
pub fn as_slice_mut(&mut self) -> &mut [T] {
unsafe { from_raw_parts_mut(self.data, self.n) }
}
}
impl<T> Deref for AlignedVec<T> {
type Target = [T];
fn deref(&self) -> &[T] {
self.as_slice()
}
}
impl<T> DerefMut for AlignedVec<T> {
fn deref_mut(&mut self) -> &mut [T] {
self.as_slice_mut()
}
}
impl<T> AlignedVec<T>
where
T: AlignedAllocable,
{
/// Create array with `fftw_malloc` (`fftw_free` will be automatically called by `Drop` trait)
pub fn | (n: usize) -> Self {
let ptr = excall! { T::alloc(n) };
let mut vec = AlignedVec { n: n, data: ptr };
for v in vec.iter_mut() {
*v = T::zero();
}
vec
}
}
impl<T> Drop for AlignedVec<T> {
fn drop(&mut self) {
excall! { ffi::fftw_free(self.data as *mut c_void) };
}
}
impl<T> Clone for AlignedVec<T>
where
T: AlignedAllocable,
{
fn clone(&self) -> Self {
let mut new_vec = Self::new(self.n);
new_vec.copy_from_slice(self);
new_vec
}
}
pub type Alignment = i32;
/// Check the alignment of slice
///
/// ```
/// # use fftw::array::*;
/// let a = AlignedVec::<f32>::new(123);
/// assert_eq!(alignment_of(&a), 0); // aligned
/// ```
pub fn alignment_of<T>(a: &[T]) -> Alignment {
unsafe { ffi::fftw_alignment_of(a.as_ptr() as *mut _) }
}
| new | identifier_name |
array.rs | //! Array with SIMD alignment
use crate::types::*;
use ffi;
use num_traits::Zero;
use std::ops::{Deref, DerefMut};
use std::os::raw::c_void;
use std::slice::{from_raw_parts, from_raw_parts_mut};
/// A RAII-wrapper of `fftw_alloc` and `fftw_free` with the [SIMD alignment].
///
/// [SIMD alignment]: http://www.fftw.org/fftw3_doc/SIMD-alignment-and-fftw_005fmalloc.html
#[derive(Debug)]
pub struct AlignedVec<T> {
n: usize,
data: *mut T,
}
/// Allocate SIMD-aligned memory of Real/Complex type
pub trait AlignedAllocable: Zero + Clone + Copy + Sized {
/// Allocate SIMD-aligned memory
unsafe fn alloc(n: usize) -> *mut Self;
}
impl AlignedAllocable for f64 {
unsafe fn alloc(n: usize) -> *mut Self |
}
impl AlignedAllocable for f32 {
unsafe fn alloc(n: usize) -> *mut Self {
ffi::fftwf_alloc_real(n)
}
}
impl AlignedAllocable for c64 {
unsafe fn alloc(n: usize) -> *mut Self {
ffi::fftw_alloc_complex(n)
}
}
impl AlignedAllocable for c32 {
unsafe fn alloc(n: usize) -> *mut Self {
ffi::fftwf_alloc_complex(n)
}
}
impl<T> AlignedVec<T> {
pub fn as_slice(&self) -> &[T] {
unsafe { from_raw_parts(self.data, self.n) }
}
pub fn as_slice_mut(&mut self) -> &mut [T] {
unsafe { from_raw_parts_mut(self.data, self.n) }
}
}
impl<T> Deref for AlignedVec<T> {
type Target = [T];
fn deref(&self) -> &[T] {
self.as_slice()
}
}
impl<T> DerefMut for AlignedVec<T> {
fn deref_mut(&mut self) -> &mut [T] {
self.as_slice_mut()
}
}
impl<T> AlignedVec<T>
where
T: AlignedAllocable,
{
/// Create array with `fftw_malloc` (`fftw_free` will be automatically called by `Drop` trait)
pub fn new(n: usize) -> Self {
let ptr = excall! { T::alloc(n) };
let mut vec = AlignedVec { n: n, data: ptr };
for v in vec.iter_mut() {
*v = T::zero();
}
vec
}
}
impl<T> Drop for AlignedVec<T> {
fn drop(&mut self) {
excall! { ffi::fftw_free(self.data as *mut c_void) };
}
}
impl<T> Clone for AlignedVec<T>
where
T: AlignedAllocable,
{
fn clone(&self) -> Self {
let mut new_vec = Self::new(self.n);
new_vec.copy_from_slice(self);
new_vec
}
}
pub type Alignment = i32;
/// Check the alignment of slice
///
/// ```
/// # use fftw::array::*;
/// let a = AlignedVec::<f32>::new(123);
/// assert_eq!(alignment_of(&a), 0); // aligned
/// ```
pub fn alignment_of<T>(a: &[T]) -> Alignment {
unsafe { ffi::fftw_alignment_of(a.as_ptr() as *mut _) }
}
| {
ffi::fftw_alloc_real(n)
} | identifier_body |
array.rs | //! Array with SIMD alignment
use crate::types::*;
use ffi;
use num_traits::Zero;
use std::ops::{Deref, DerefMut};
use std::os::raw::c_void;
use std::slice::{from_raw_parts, from_raw_parts_mut};
/// A RAII-wrapper of `fftw_alloc` and `fftw_free` with the [SIMD alignment].
///
/// [SIMD alignment]: http://www.fftw.org/fftw3_doc/SIMD-alignment-and-fftw_005fmalloc.html
#[derive(Debug)]
pub struct AlignedVec<T> {
n: usize,
data: *mut T,
}
/// Allocate SIMD-aligned memory of Real/Complex type
pub trait AlignedAllocable: Zero + Clone + Copy + Sized {
/// Allocate SIMD-aligned memory
unsafe fn alloc(n: usize) -> *mut Self;
}
impl AlignedAllocable for f64 {
unsafe fn alloc(n: usize) -> *mut Self {
ffi::fftw_alloc_real(n)
}
}
impl AlignedAllocable for f32 {
unsafe fn alloc(n: usize) -> *mut Self {
ffi::fftwf_alloc_real(n)
}
} | ffi::fftw_alloc_complex(n)
}
}
impl AlignedAllocable for c32 {
unsafe fn alloc(n: usize) -> *mut Self {
ffi::fftwf_alloc_complex(n)
}
}
impl<T> AlignedVec<T> {
pub fn as_slice(&self) -> &[T] {
unsafe { from_raw_parts(self.data, self.n) }
}
pub fn as_slice_mut(&mut self) -> &mut [T] {
unsafe { from_raw_parts_mut(self.data, self.n) }
}
}
impl<T> Deref for AlignedVec<T> {
type Target = [T];
fn deref(&self) -> &[T] {
self.as_slice()
}
}
impl<T> DerefMut for AlignedVec<T> {
fn deref_mut(&mut self) -> &mut [T] {
self.as_slice_mut()
}
}
impl<T> AlignedVec<T>
where
T: AlignedAllocable,
{
/// Create array with `fftw_malloc` (`fftw_free` will be automatically called by `Drop` trait)
pub fn new(n: usize) -> Self {
let ptr = excall! { T::alloc(n) };
let mut vec = AlignedVec { n: n, data: ptr };
for v in vec.iter_mut() {
*v = T::zero();
}
vec
}
}
impl<T> Drop for AlignedVec<T> {
fn drop(&mut self) {
excall! { ffi::fftw_free(self.data as *mut c_void) };
}
}
impl<T> Clone for AlignedVec<T>
where
T: AlignedAllocable,
{
fn clone(&self) -> Self {
let mut new_vec = Self::new(self.n);
new_vec.copy_from_slice(self);
new_vec
}
}
pub type Alignment = i32;
/// Check the alignment of slice
///
/// ```
/// # use fftw::array::*;
/// let a = AlignedVec::<f32>::new(123);
/// assert_eq!(alignment_of(&a), 0); // aligned
/// ```
pub fn alignment_of<T>(a: &[T]) -> Alignment {
unsafe { ffi::fftw_alignment_of(a.as_ptr() as *mut _) }
} |
impl AlignedAllocable for c64 {
unsafe fn alloc(n: usize) -> *mut Self { | random_line_split |
fuzz_one.py | #!/usr/bin/env python
# Copyright 2020 the V8 project authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Helper script to execute a single-processed fuzzing session.
Creates fuzz tests in workdir/output/dir-<dir number>/fuzz-XXX.js.
Expects the <dir number> as single parameter.
"""
import os
import subprocess
import sys
import time
BASE_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
APP_DIR = os.path.join(BASE_PATH, 'workdir', 'app_dir')
FUZZ_EXE = os.path.join(BASE_PATH, 'workdir', 'fuzzer', 'ochang_js_fuzzer')
INPUT_DIR = os.path.join(BASE_PATH, 'workdir', 'input')
TEST_CASES = os.path.join(BASE_PATH, 'workdir', 'output')
COUNT = 64
FUZZ = ('FUZZ_MODE=foozzie APP_NAME=d8 APP_DIR=%s %s -o %%s -n %s -i %s > %%s'
% (APP_DIR, FUZZ_EXE, COUNT, INPUT_DIR))
assert(len(sys.argv) > 1)
dir_number = int(sys.argv[1])
assert(dir_number >= 0)
path = os.path.join(TEST_CASES, 'dir-%d' % dir_number)
assert not os.path.exists(path), 'Need fresh workdir for fuzzing'
os.makedirs(path)
start = time.time()
subprocess.check_call(
FUZZ % (path, os.path.join(path, 'out.log')), shell=True)
duration = int(time.time() - start) | with open(os.path.join(path, 'duration.log'), 'w') as f:
f.write(str(duration)) | random_line_split |
|
RenderableBehavior.ts | import {Node} from '../core/Node.js'
/**
* Base class for behaviors relating to rendering. This is for any behavior that renders with CSS or WebGL rendering.
*/
export abstract class RenderableBehavior extends Behavior {
requiredElementType() {
return [Node]
}
connectedCallback() {
super.connectedCallback()
this.loadGL()
this.element.on(Events.BEHAVIOR_GL_LOAD, this.loadGL, this)
this.element.on(Events.BEHAVIOR_GL_UNLOAD, this.unloadGL, this)
}
disconnectedCallback() {
super.disconnectedCallback()
this.unloadGL()
this.element.off(Events.BEHAVIOR_GL_LOAD, this.loadGL, this)
this.element.off(Events.BEHAVIOR_GL_UNLOAD, this.unloadGL, this)
}
get glLoaded() {
return this._glLoaded
}
_glLoaded = false
get cssLoaded() {
return this._cssLoaded
}
_cssLoaded = false
loadGL(): boolean {
if (!this.element.three) return false
if (this._glLoaded) return false
this._glLoaded = true
return true
}
unloadGL(): boolean {
if (!this._glLoaded) return false
this._glLoaded = false
return true
}
} | import {Behavior} from './Behavior.js'
import {Events} from '../core/Events.js' | random_line_split |
|
RenderableBehavior.ts | import {Behavior} from './Behavior.js'
import {Events} from '../core/Events.js'
import {Node} from '../core/Node.js'
/**
* Base class for behaviors relating to rendering. This is for any behavior that renders with CSS or WebGL rendering.
*/
export abstract class RenderableBehavior extends Behavior {
requiredElementType() {
return [Node]
}
connectedCallback() {
super.connectedCallback()
this.loadGL()
this.element.on(Events.BEHAVIOR_GL_LOAD, this.loadGL, this)
this.element.on(Events.BEHAVIOR_GL_UNLOAD, this.unloadGL, this)
}
disconnectedCallback() {
super.disconnectedCallback()
this.unloadGL()
this.element.off(Events.BEHAVIOR_GL_LOAD, this.loadGL, this)
this.element.off(Events.BEHAVIOR_GL_UNLOAD, this.unloadGL, this)
}
get glLoaded() {
return this._glLoaded
}
_glLoaded = false
get cssLoaded() |
_cssLoaded = false
loadGL(): boolean {
if (!this.element.three) return false
if (this._glLoaded) return false
this._glLoaded = true
return true
}
unloadGL(): boolean {
if (!this._glLoaded) return false
this._glLoaded = false
return true
}
}
| {
return this._cssLoaded
} | identifier_body |
RenderableBehavior.ts | import {Behavior} from './Behavior.js'
import {Events} from '../core/Events.js'
import {Node} from '../core/Node.js'
/**
* Base class for behaviors relating to rendering. This is for any behavior that renders with CSS or WebGL rendering.
*/
export abstract class RenderableBehavior extends Behavior {
requiredElementType() {
return [Node]
}
connectedCallback() {
super.connectedCallback()
this.loadGL()
this.element.on(Events.BEHAVIOR_GL_LOAD, this.loadGL, this)
this.element.on(Events.BEHAVIOR_GL_UNLOAD, this.unloadGL, this)
}
disconnectedCallback() {
super.disconnectedCallback()
this.unloadGL()
this.element.off(Events.BEHAVIOR_GL_LOAD, this.loadGL, this)
this.element.off(Events.BEHAVIOR_GL_UNLOAD, this.unloadGL, this)
}
get glLoaded() {
return this._glLoaded
}
_glLoaded = false
get | () {
return this._cssLoaded
}
_cssLoaded = false
loadGL(): boolean {
if (!this.element.three) return false
if (this._glLoaded) return false
this._glLoaded = true
return true
}
unloadGL(): boolean {
if (!this._glLoaded) return false
this._glLoaded = false
return true
}
}
| cssLoaded | identifier_name |
classes.js | var _ = require("lodash");
var Q = require("q");
var utils = require("./utils.js");
_.indexBy = utils.getIndexBy(_);
// Craql 4.2
// Each vertex has an ID, a set of
// properties, and a set of labelled edges. Vertices are stored in a hash table (dictionary) structure
// indexed by the ID for quick lookup. The set of labelled edges is maintained as a hash table
// indexed by label. Each edge is a 3-tuple of a start ID, end ID, and a hash of properties. Edges
// only refer to their start and end vertices by an ID rather than a reference, because the data may
// not be present yet in the graph. This allows for lazily loading vertices as required, but means
// that vertex IDs are duplicated by edges.
function | (vertices, edges) {
this.vertices = _.indexBy(vertices, 'id');
this.edges = _.indexBy(edges, 'label');
}
Graph.prototype = {
addVertex: function(vertex) {this.vertices[vertex.id] = vertex},
addEdge: function(edge) {this.edges[edge.label] = edge}
};
function Vertex(id, properties) {
this.id = id;
this.properties = properties;
this.edges = [];
}
Vertex.prototype = {
addEdge: function(edge) {
// TODO?: check if this edge
this.edges.push(edge);
}
};
function Edge(label, startId, endId, properties) {
return {
label: label,
startId: startId,
endId: endId,
properties: properties
};
}
Edge.prototype = {};
module.exports = {
Graph: Graph,
Vertex: Vertex,
Edge: Edge
}
| Graph | identifier_name |
classes.js | var _ = require("lodash");
var Q = require("q");
var utils = require("./utils.js");
_.indexBy = utils.getIndexBy(_);
// Craql 4.2
// Each vertex has an ID, a set of
// properties, and a set of labelled edges. Vertices are stored in a hash table (dictionary) structure
// indexed by the ID for quick lookup. The set of labelled edges is maintained as a hash table
// indexed by label. Each edge is a 3-tuple of a start ID, end ID, and a hash of properties. Edges
// only refer to their start and end vertices by an ID rather than a reference, because the data may
// not be present yet in the graph. This allows for lazily loading vertices as required, but means
// that vertex IDs are duplicated by edges.
function Graph(vertices, edges) {
this.vertices = _.indexBy(vertices, 'id');
this.edges = _.indexBy(edges, 'label');
}
Graph.prototype = {
addVertex: function(vertex) {this.vertices[vertex.id] = vertex},
addEdge: function(edge) {this.edges[edge.label] = edge}
};
function Vertex(id, properties) {
this.id = id;
this.properties = properties;
this.edges = [];
}
Vertex.prototype = {
addEdge: function(edge) {
// TODO?: check if this edge
this.edges.push(edge);
}
};
function Edge(label, startId, endId, properties) |
Edge.prototype = {};
module.exports = {
Graph: Graph,
Vertex: Vertex,
Edge: Edge
}
| {
return {
label: label,
startId: startId,
endId: endId,
properties: properties
};
} | identifier_body |
classes.js | var _ = require("lodash");
var Q = require("q");
var utils = require("./utils.js");
_.indexBy = utils.getIndexBy(_);
// Craql 4.2
// Each vertex has an ID, a set of
// properties, and a set of labelled edges. Vertices are stored in a hash table (dictionary) structure
// indexed by the ID for quick lookup. The set of labelled edges is maintained as a hash table
// indexed by label. Each edge is a 3-tuple of a start ID, end ID, and a hash of properties. Edges
// only refer to their start and end vertices by an ID rather than a reference, because the data may
// not be present yet in the graph. This allows for lazily loading vertices as required, but means
// that vertex IDs are duplicated by edges.
function Graph(vertices, edges) {
this.vertices = _.indexBy(vertices, 'id');
this.edges = _.indexBy(edges, 'label');
}
Graph.prototype = {
addVertex: function(vertex) {this.vertices[vertex.id] = vertex},
addEdge: function(edge) {this.edges[edge.label] = edge}
};
function Vertex(id, properties) {
this.id = id;
this.properties = properties;
this.edges = [];
}
Vertex.prototype = {
addEdge: function(edge) {
// TODO?: check if this edge
this.edges.push(edge);
}
};
function Edge(label, startId, endId, properties) {
return {
label: label,
startId: startId,
endId: endId,
properties: properties
};
}
Edge.prototype = {};
| Vertex: Vertex,
Edge: Edge
} | module.exports = {
Graph: Graph, | random_line_split |
mod.rs | mod api;
use gcrypt;
use hyper;
use rustc_serialize::base64;
use rustc_serialize::hex;
use rustc_serialize::json;
use crypto;
use std::fmt;
use std::io;
use rustc_serialize::base64::FromBase64;
use rustc_serialize::hex::FromHex;
use rustc_serialize::hex::ToHex;
#[derive(Debug)]
pub enum KeybaseError {
Http(String),
Api(api::Status),
FromBase64(base64::FromBase64Error),
FromHex(hex::FromHexError),
Gcrypt(gcrypt::error::Error),
Hyper(hyper::Error),
Io(io::Error),
Json(json::DecoderError),
}
impl From<api::Status> for KeybaseError {
fn from(err: api::Status) -> KeybaseError {
KeybaseError::Api(err)
}
}
impl From<base64::FromBase64Error> for KeybaseError {
fn from(err: base64::FromBase64Error) -> KeybaseError {
KeybaseError::FromBase64(err)
}
}
impl From<hex::FromHexError> for KeybaseError {
fn from(err: hex::FromHexError) -> KeybaseError {
KeybaseError::FromHex(err)
}
}
impl From<gcrypt::error::Error> for KeybaseError {
fn | (err: gcrypt::error::Error) -> KeybaseError {
KeybaseError::Gcrypt(err)
}
}
impl From<hyper::Error> for KeybaseError {
fn from(err: hyper::Error) -> KeybaseError {
KeybaseError::Hyper(err)
}
}
impl From<io::Error> for KeybaseError {
fn from(err: io::Error) -> KeybaseError {
KeybaseError::Io(err)
}
}
impl From<json::DecoderError> for KeybaseError {
fn from(err: json::DecoderError) -> KeybaseError {
KeybaseError::Json(err)
}
}
impl fmt::Display for KeybaseError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
KeybaseError::Http(ref msg) => write!(f, "Keybase API Error: {}", msg),
KeybaseError::Api(ref err) => match err.desc.as_ref() {
Some(ref desc) => write!(f, "Keybase API error: {} ({})", desc, err.name),
None => write!(f, "Keybase API error: {}", err.name),
},
KeybaseError::FromBase64(ref err) => err.fmt(f),
KeybaseError::FromHex(ref err) => err.fmt(f),
KeybaseError::Gcrypt(ref err) => err.fmt(f),
KeybaseError::Hyper(ref err) => err.fmt(f),
KeybaseError::Io(ref err) => err.fmt(f),
KeybaseError::Json(ref err) => err.fmt(f),
}
}
}
pub type KeybaseResult<T> = Result<T, KeybaseError>;
#[allow(dead_code)]
pub struct Keybase {
session: String,
csrf_token: String,
}
impl Keybase {
pub fn login(user: &str, password: &str, token: gcrypt::Token) -> KeybaseResult<Keybase> {
let getsalt = try!(api::getsalt(user));
let salt = &getsalt.salt.unwrap();
let login_session = &getsalt.login_session.unwrap();
let salt_bytes = try!(salt.from_hex());
let mut pwh = vec![0; 224];
try!(crypto::scrypt(password, &salt_bytes, &mut pwh, token));
let session = try!(login_session.from_base64());
let hmac_pwh = try!(crypto::hmac_sha512(&session, &pwh[192..224], token));
let key = hmac_pwh.to_hex();
let login = try!(api::login(user, &key, login_session));
Ok(Keybase{session : login.session.unwrap(), csrf_token: login.csrf_token.unwrap()})
}
}
#[cfg(test)]
mod tests {
use gcrypt;
use super::*;
use std::env;
#[test]
#[allow(unused_variables)]
fn can_login() {
let token = gcrypt::init(|mut gcry| {
gcry.enable_secmem(16384).unwrap();
});
let username = &env::var("HEDWIG_TEST_KEYBASE_USERNAME").unwrap();
let password = &env::var("HEDWIG_TEST_KEYBASE_PASSWORD").unwrap();
let keybase_session = Keybase::login(&username, &password, token).unwrap();
}
}
| from | identifier_name |
mod.rs | mod api;
use gcrypt;
use hyper;
use rustc_serialize::base64;
use rustc_serialize::hex;
use rustc_serialize::json;
use crypto;
use std::fmt;
use std::io;
use rustc_serialize::base64::FromBase64;
use rustc_serialize::hex::FromHex;
use rustc_serialize::hex::ToHex;
#[derive(Debug)]
pub enum KeybaseError {
Http(String),
Api(api::Status),
FromBase64(base64::FromBase64Error),
FromHex(hex::FromHexError),
Gcrypt(gcrypt::error::Error),
Hyper(hyper::Error),
Io(io::Error),
Json(json::DecoderError),
}
impl From<api::Status> for KeybaseError {
fn from(err: api::Status) -> KeybaseError {
KeybaseError::Api(err)
}
}
impl From<base64::FromBase64Error> for KeybaseError {
fn from(err: base64::FromBase64Error) -> KeybaseError {
KeybaseError::FromBase64(err)
}
}
impl From<hex::FromHexError> for KeybaseError {
fn from(err: hex::FromHexError) -> KeybaseError {
KeybaseError::FromHex(err)
}
}
impl From<gcrypt::error::Error> for KeybaseError {
fn from(err: gcrypt::error::Error) -> KeybaseError {
KeybaseError::Gcrypt(err)
}
}
impl From<hyper::Error> for KeybaseError {
fn from(err: hyper::Error) -> KeybaseError {
KeybaseError::Hyper(err)
}
} | }
}
impl From<json::DecoderError> for KeybaseError {
fn from(err: json::DecoderError) -> KeybaseError {
KeybaseError::Json(err)
}
}
impl fmt::Display for KeybaseError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
KeybaseError::Http(ref msg) => write!(f, "Keybase API Error: {}", msg),
KeybaseError::Api(ref err) => match err.desc.as_ref() {
Some(ref desc) => write!(f, "Keybase API error: {} ({})", desc, err.name),
None => write!(f, "Keybase API error: {}", err.name),
},
KeybaseError::FromBase64(ref err) => err.fmt(f),
KeybaseError::FromHex(ref err) => err.fmt(f),
KeybaseError::Gcrypt(ref err) => err.fmt(f),
KeybaseError::Hyper(ref err) => err.fmt(f),
KeybaseError::Io(ref err) => err.fmt(f),
KeybaseError::Json(ref err) => err.fmt(f),
}
}
}
pub type KeybaseResult<T> = Result<T, KeybaseError>;
#[allow(dead_code)]
pub struct Keybase {
session: String,
csrf_token: String,
}
impl Keybase {
pub fn login(user: &str, password: &str, token: gcrypt::Token) -> KeybaseResult<Keybase> {
let getsalt = try!(api::getsalt(user));
let salt = &getsalt.salt.unwrap();
let login_session = &getsalt.login_session.unwrap();
let salt_bytes = try!(salt.from_hex());
let mut pwh = vec![0; 224];
try!(crypto::scrypt(password, &salt_bytes, &mut pwh, token));
let session = try!(login_session.from_base64());
let hmac_pwh = try!(crypto::hmac_sha512(&session, &pwh[192..224], token));
let key = hmac_pwh.to_hex();
let login = try!(api::login(user, &key, login_session));
Ok(Keybase{session : login.session.unwrap(), csrf_token: login.csrf_token.unwrap()})
}
}
#[cfg(test)]
mod tests {
use gcrypt;
use super::*;
use std::env;
#[test]
#[allow(unused_variables)]
fn can_login() {
let token = gcrypt::init(|mut gcry| {
gcry.enable_secmem(16384).unwrap();
});
let username = &env::var("HEDWIG_TEST_KEYBASE_USERNAME").unwrap();
let password = &env::var("HEDWIG_TEST_KEYBASE_PASSWORD").unwrap();
let keybase_session = Keybase::login(&username, &password, token).unwrap();
}
} |
impl From<io::Error> for KeybaseError {
fn from(err: io::Error) -> KeybaseError {
KeybaseError::Io(err) | random_line_split |
event.js | the default action of the passed event
*/
exports.stopEvent = function(e) {
exports.stopPropagation(e);
exports.preventDefault(e);
return false;
};
exports.stopPropagation = function(e) {
if (e.stopPropagation)
e.stopPropagation();
else
e.cancelBubble = true;
};
exports.preventDefault = function(e) {
if (e.preventDefault)
e.preventDefault();
else
e.returnValue = false;
};
/*
* @return {Number} 0 for left button, 1 for middle button, 2 for right button
*/
exports.getButton = function(e) {
if (e.type == "dblclick")
return 0;
if (e.type == "contextmenu" || (useragent.isMac && (e.ctrlKey && !e.altKey && !e.shiftKey)))
return 2;
// DOM Event
if (e.preventDefault) {
return e.button;
}
// old IE
else {
return {1:0, 2:2, 4:1}[e.button];
}
};
exports.capture = function(el, eventHandler, releaseCaptureHandler) {
function onMouseUp(e) {
eventHandler && eventHandler(e);
releaseCaptureHandler && releaseCaptureHandler(e);
exports.removeListener(document, "mousemove", eventHandler, true);
exports.removeListener(document, "mouseup", onMouseUp, true);
exports.removeListener(document, "dragstart", onMouseUp, true);
}
exports.addListener(document, "mousemove", eventHandler, true);
exports.addListener(document, "mouseup", onMouseUp, true);
exports.addListener(document, "dragstart", onMouseUp, true);
return onMouseUp;
};
exports.addMouseWheelListener = function(el, callback) {
if ("onmousewheel" in el) {
exports.addListener(el, "mousewheel", function(e) {
var factor = 8;
if (e.wheelDeltaX !== undefined) {
e.wheelX = -e.wheelDeltaX / factor;
e.wheelY = -e.wheelDeltaY / factor;
} else {
e.wheelX = 0;
e.wheelY = -e.wheelDelta / factor;
}
callback(e);
});
} else if ("onwheel" in el) {
exports.addListener(el, "wheel", function(e) {
var factor = 0.35;
switch (e.deltaMode) {
case e.DOM_DELTA_PIXEL:
e.wheelX = e.deltaX * factor || 0;
e.wheelY = e.deltaY * factor || 0;
break;
case e.DOM_DELTA_LINE:
case e.DOM_DELTA_PAGE:
e.wheelX = (e.deltaX || 0) * 5;
e.wheelY = (e.deltaY || 0) * 5;
break;
}
callback(e);
});
} else {
exports.addListener(el, "DOMMouseScroll", function(e) {
if (e.axis && e.axis == e.HORIZONTAL_AXIS) {
e.wheelX = (e.detail || 0) * 5;
e.wheelY = 0;
} else {
e.wheelX = 0;
e.wheelY = (e.detail || 0) * 5;
}
callback(e);
});
}
};
exports.addMultiMouseDownListener = function(el, timeouts, eventHandler, callbackName) {
var clicks = 0;
var startX, startY, timer;
var eventNames = {
2: "dblclick",
3: "tripleclick",
4: "quadclick"
};
exports.addListener(el, "mousedown", function(e) {
if (exports.getButton(e) !== 0) {
clicks = 0;
} else if (e.detail > 1) {
clicks++;
if (clicks > 4)
clicks = 1;
} else {
clicks = 1;
}
if (useragent.isIE) {
var isNewClick = Math.abs(e.clientX - startX) > 5 || Math.abs(e.clientY - startY) > 5;
if (!timer || isNewClick)
clicks = 1;
if (timer)
clearTimeout(timer);
timer = setTimeout(function() {timer = null}, timeouts[clicks - 1] || 600);
if (clicks == 1) {
startX = e.clientX;
startY = e.clientY;
}
}
e._clicks = clicks;
eventHandler[callbackName]("mousedown", e);
if (clicks > 4)
clicks = 0;
else if (clicks > 1)
return eventHandler[callbackName](eventNames[clicks], e);
});
if (useragent.isOldIE) {
exports.addListener(el, "dblclick", function(e) {
clicks = 2;
if (timer)
clearTimeout(timer);
timer = setTimeout(function() {timer = null}, timeouts[clicks - 1] || 600);
eventHandler[callbackName]("mousedown", e);
eventHandler[callbackName](eventNames[clicks], e);
});
}
};
var getModifierHash = useragent.isMac && useragent.isOpera && !("KeyboardEvent" in window)
? function(e) {
return 0 | (e.metaKey ? 1 : 0) | (e.altKey ? 2 : 0) | (e.shiftKey ? 4 : 0) | (e.ctrlKey ? 8 : 0);
}
: function(e) {
return 0 | (e.ctrlKey ? 1 : 0) | (e.altKey ? 2 : 0) | (e.shiftKey ? 4 : 0) | (e.metaKey ? 8 : 0);
};
exports.getModifierString = function(e) {
return keys.KEY_MODS[getModifierHash(e)];
};
function normalizeCommandKeys(callback, e, keyCode) {
var hashId = getModifierHash(e);
if (!useragent.isMac && pressedKeys) {
if (pressedKeys[91] || pressedKeys[92])
hashId |= 8;
if (pressedKeys.altGr) {
if ((3 & hashId) != 3)
pressedKeys.altGr = 0;
else
return;
}
if (keyCode === 18 || keyCode === 17) {
var location = "location" in e ? e.location : e.keyLocation;
if (keyCode === 17 && location === 1) {
if (pressedKeys[keyCode] == 1)
ts = e.timeStamp;
} else if (keyCode === 18 && hashId === 3 && location === 2) {
var dt = e.timeStamp - ts;
if (dt < 50)
pressedKeys.altGr = true;
}
}
}
if (keyCode in keys.MODIFIER_KEYS) {
keyCode = -1;
}
if (hashId & 8 && (keyCode === 91 || keyCode === 93)) {
keyCode = -1;
}
if (!hashId && keyCode === 13) {
var location = "location" in e ? e.location : e.keyLocation;
if (location === 3) {
callback(e, hashId, -keyCode);
if (e.defaultPrevented)
return;
}
}
if (useragent.isChromeOS && hashId & 8) {
callback(e, hashId, keyCode);
if (e.defaultPrevented)
return;
else
hashId &= ~8;
}
// If there is no hashId and the keyCode is not a function key, then
// we don't call the callback as we don't handle a command key here
// (it's a normal key/character input).
if (!hashId && !(keyCode in keys.FUNCTION_KEYS) && !(keyCode in keys.PRINTABLE_KEYS)) {
return false;
}
return callback(e, hashId, keyCode);
}
var pressedKeys = null;
var ts = 0;
exports.addCommandKeyListener = function(el, callback) {
var addListener = exports.addListener;
if (useragent.isOldGecko || (useragent.isOpera && !("KeyboardEvent" in window))) {
// Old versions of Gecko aka. Firefox < 4.0 didn't repeat the keydown
// event if the user pressed the key for a longer time. Instead, the
// keydown event was fired once and later on only the keypress event.
// To emulate the 'right' keydown behavior, the keyCode of the initial
// keyDown event is stored and in the following keypress events the
// stores keyCode is used to emulate a keyDown event.
var lastKeyDownKeyCode = null;
addListener(el, "keydown", function(e) {
lastKeyDownKeyCode = e.keyCode;
});
addListener(el, "keypress", function(e) {
return normalizeCommandKeys(callback, e, lastKeyDownKeyCode);
});
} else {
var lastDefaultPrevented = null;
addListener(el, "keydown", function(e) {
pressedKeys[e.keyCode] = (pressedKeys[e.keyCode] || 0) + 1;
var result = normalizeCommandKeys(callback, e, e.keyCode); | lastDefaultPrevented = e.defaultPrevented; | random_line_split |
|
event.js | , OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* ***** END LICENSE BLOCK ***** */
define(function(require, exports, module) {
"use strict";
var keys = require("./keys");
var useragent = require("./useragent");
exports.addListener = function(elem, type, callback) {
if (elem.addEventListener) {
return elem.addEventListener(type, callback, false);
}
if (elem.attachEvent) {
var wrapper = function() {
callback.call(elem, window.event);
};
callback._wrapper = wrapper;
elem.attachEvent("on" + type, wrapper);
}
};
exports.removeListener = function(elem, type, callback) {
if (elem.removeEventListener) {
return elem.removeEventListener(type, callback, false);
}
if (elem.detachEvent) {
elem.detachEvent("on" + type, callback._wrapper || callback);
}
};
/*
* Prevents propagation and clobbers the default action of the passed event
*/
exports.stopEvent = function(e) {
exports.stopPropagation(e);
exports.preventDefault(e);
return false;
};
exports.stopPropagation = function(e) {
if (e.stopPropagation)
e.stopPropagation();
else
e.cancelBubble = true;
};
exports.preventDefault = function(e) {
if (e.preventDefault)
e.preventDefault();
else
e.returnValue = false;
};
/*
* @return {Number} 0 for left button, 1 for middle button, 2 for right button
*/
exports.getButton = function(e) {
if (e.type == "dblclick")
return 0;
if (e.type == "contextmenu" || (useragent.isMac && (e.ctrlKey && !e.altKey && !e.shiftKey)))
return 2;
// DOM Event
if (e.preventDefault) {
return e.button;
}
// old IE
else {
return {1:0, 2:2, 4:1}[e.button];
}
};
exports.capture = function(el, eventHandler, releaseCaptureHandler) {
function | (e) {
eventHandler && eventHandler(e);
releaseCaptureHandler && releaseCaptureHandler(e);
exports.removeListener(document, "mousemove", eventHandler, true);
exports.removeListener(document, "mouseup", onMouseUp, true);
exports.removeListener(document, "dragstart", onMouseUp, true);
}
exports.addListener(document, "mousemove", eventHandler, true);
exports.addListener(document, "mouseup", onMouseUp, true);
exports.addListener(document, "dragstart", onMouseUp, true);
return onMouseUp;
};
exports.addMouseWheelListener = function(el, callback) {
if ("onmousewheel" in el) {
exports.addListener(el, "mousewheel", function(e) {
var factor = 8;
if (e.wheelDeltaX !== undefined) {
e.wheelX = -e.wheelDeltaX / factor;
e.wheelY = -e.wheelDeltaY / factor;
} else {
e.wheelX = 0;
e.wheelY = -e.wheelDelta / factor;
}
callback(e);
});
} else if ("onwheel" in el) {
exports.addListener(el, "wheel", function(e) {
var factor = 0.35;
switch (e.deltaMode) {
case e.DOM_DELTA_PIXEL:
e.wheelX = e.deltaX * factor || 0;
e.wheelY = e.deltaY * factor || 0;
break;
case e.DOM_DELTA_LINE:
case e.DOM_DELTA_PAGE:
e.wheelX = (e.deltaX || 0) * 5;
e.wheelY = (e.deltaY || 0) * 5;
break;
}
callback(e);
});
} else {
exports.addListener(el, "DOMMouseScroll", function(e) {
if (e.axis && e.axis == e.HORIZONTAL_AXIS) {
e.wheelX = (e.detail || 0) * 5;
e.wheelY = 0;
} else {
e.wheelX = 0;
e.wheelY = (e.detail || 0) * 5;
}
callback(e);
});
}
};
exports.addMultiMouseDownListener = function(el, timeouts, eventHandler, callbackName) {
var clicks = 0;
var startX, startY, timer;
var eventNames = {
2: "dblclick",
3: "tripleclick",
4: "quadclick"
};
exports.addListener(el, "mousedown", function(e) {
if (exports.getButton(e) !== 0) {
clicks = 0;
} else if (e.detail > 1) {
clicks++;
if (clicks > 4)
clicks = 1;
} else {
clicks = 1;
}
if (useragent.isIE) {
var isNewClick = Math.abs(e.clientX - startX) > 5 || Math.abs(e.clientY - startY) > 5;
if (!timer || isNewClick)
clicks = 1;
if (timer)
clearTimeout(timer);
timer = setTimeout(function() {timer = null}, timeouts[clicks - 1] || 600);
if (clicks == 1) {
startX = e.clientX;
startY = e.clientY;
}
}
e._clicks = clicks;
eventHandler[callbackName]("mousedown", e);
if (clicks > 4)
clicks = 0;
else if (clicks > 1)
return eventHandler[callbackName](eventNames[clicks], e);
});
if (useragent.isOldIE) {
exports.addListener(el, "dblclick", function(e) {
clicks = 2;
if (timer)
clearTimeout(timer);
timer = setTimeout(function() {timer = null}, timeouts[clicks - 1] || 600);
eventHandler[callbackName]("mousedown", e);
eventHandler[callbackName](eventNames[clicks], e);
});
}
};
var getModifierHash = useragent.isMac && useragent.isOpera && !("KeyboardEvent" in window)
? function(e) {
return 0 | (e.metaKey ? 1 : 0) | (e.altKey ? 2 : 0) | (e.shiftKey ? 4 : 0) | (e.ctrlKey ? 8 : 0);
}
: function(e) {
return 0 | (e.ctrlKey ? 1 : 0) | (e.altKey ? 2 : 0) | (e.shiftKey ? 4 : 0) | (e.metaKey ? 8 : 0);
};
exports.getModifierString = function(e) {
return keys.KEY_MODS[getModifierHash(e)];
};
function normalizeCommandKeys(callback, e, keyCode) {
var hashId = getModifierHash(e);
if (!useragent.isMac && pressedKeys) {
if (pressedKeys[91] || pressedKeys[92])
hashId |= 8;
if (pressedKeys.altGr) {
if ((3 & hashId) != 3)
pressedKeys.altGr = 0;
else
return;
}
if (keyCode === 18 || keyCode === 17) {
var location = "location" in e ? e.location : e.keyLocation;
if (keyCode === 17 && location === 1) {
if (pressedKeys[keyCode] == 1)
ts = e.timeStamp;
} else if (keyCode === 18 && hashId === 3 && location === 2) {
var dt = e.timeStamp - ts;
if (dt < 50)
pressedKeys.altGr = true;
}
}
}
if (keyCode in keys.MODIFIER_KEYS) {
keyCode = -1;
}
if (hashId & 8 && (keyCode === 91 || keyCode === 93)) {
keyCode = -1;
}
if (!hashId && keyCode === 13) {
var location = "location" in e ? e.location : e.keyLocation;
if (location === 3) {
callback(e, hashId, -keyCode);
if (e.defaultPrevented)
return;
}
}
if (useragent.isChromeOS && hashId & 8) {
callback(e, hashId, keyCode);
if (e.defaultPrevented)
return;
else
hashId &= ~8;
}
// If there is no hashId and the keyCode is not a function key, then
// we don't call the callback as we don't handle a command key here
// (it's a normal key/character input).
if (!hashId && !(keyCode in keys.FUNCTION_KEYS) && !(keyCode in keys.PRINTABLE_KEYS)) {
return false;
}
return callback(e, hashId, keyCode);
}
var pressedKeys = null;
var ts = 0;
exports.addCommandKeyListener = function(el | onMouseUp | identifier_name |
event.js | }
};
exports.removeListener = function(elem, type, callback) {
if (elem.removeEventListener) {
return elem.removeEventListener(type, callback, false);
}
if (elem.detachEvent) {
elem.detachEvent("on" + type, callback._wrapper || callback);
}
};
/*
* Prevents propagation and clobbers the default action of the passed event
*/
exports.stopEvent = function(e) {
exports.stopPropagation(e);
exports.preventDefault(e);
return false;
};
exports.stopPropagation = function(e) {
if (e.stopPropagation)
e.stopPropagation();
else
e.cancelBubble = true;
};
exports.preventDefault = function(e) {
if (e.preventDefault)
e.preventDefault();
else
e.returnValue = false;
};
/*
* @return {Number} 0 for left button, 1 for middle button, 2 for right button
*/
exports.getButton = function(e) {
if (e.type == "dblclick")
return 0;
if (e.type == "contextmenu" || (useragent.isMac && (e.ctrlKey && !e.altKey && !e.shiftKey)))
return 2;
// DOM Event
if (e.preventDefault) {
return e.button;
}
// old IE
else {
return {1:0, 2:2, 4:1}[e.button];
}
};
exports.capture = function(el, eventHandler, releaseCaptureHandler) {
function onMouseUp(e) {
eventHandler && eventHandler(e);
releaseCaptureHandler && releaseCaptureHandler(e);
exports.removeListener(document, "mousemove", eventHandler, true);
exports.removeListener(document, "mouseup", onMouseUp, true);
exports.removeListener(document, "dragstart", onMouseUp, true);
}
exports.addListener(document, "mousemove", eventHandler, true);
exports.addListener(document, "mouseup", onMouseUp, true);
exports.addListener(document, "dragstart", onMouseUp, true);
return onMouseUp;
};
exports.addMouseWheelListener = function(el, callback) {
if ("onmousewheel" in el) {
exports.addListener(el, "mousewheel", function(e) {
var factor = 8;
if (e.wheelDeltaX !== undefined) {
e.wheelX = -e.wheelDeltaX / factor;
e.wheelY = -e.wheelDeltaY / factor;
} else {
e.wheelX = 0;
e.wheelY = -e.wheelDelta / factor;
}
callback(e);
});
} else if ("onwheel" in el) {
exports.addListener(el, "wheel", function(e) {
var factor = 0.35;
switch (e.deltaMode) {
case e.DOM_DELTA_PIXEL:
e.wheelX = e.deltaX * factor || 0;
e.wheelY = e.deltaY * factor || 0;
break;
case e.DOM_DELTA_LINE:
case e.DOM_DELTA_PAGE:
e.wheelX = (e.deltaX || 0) * 5;
e.wheelY = (e.deltaY || 0) * 5;
break;
}
callback(e);
});
} else {
exports.addListener(el, "DOMMouseScroll", function(e) {
if (e.axis && e.axis == e.HORIZONTAL_AXIS) {
e.wheelX = (e.detail || 0) * 5;
e.wheelY = 0;
} else {
e.wheelX = 0;
e.wheelY = (e.detail || 0) * 5;
}
callback(e);
});
}
};
exports.addMultiMouseDownListener = function(el, timeouts, eventHandler, callbackName) {
var clicks = 0;
var startX, startY, timer;
var eventNames = {
2: "dblclick",
3: "tripleclick",
4: "quadclick"
};
exports.addListener(el, "mousedown", function(e) {
if (exports.getButton(e) !== 0) {
clicks = 0;
} else if (e.detail > 1) {
clicks++;
if (clicks > 4)
clicks = 1;
} else {
clicks = 1;
}
if (useragent.isIE) {
var isNewClick = Math.abs(e.clientX - startX) > 5 || Math.abs(e.clientY - startY) > 5;
if (!timer || isNewClick)
clicks = 1;
if (timer)
clearTimeout(timer);
timer = setTimeout(function() {timer = null}, timeouts[clicks - 1] || 600);
if (clicks == 1) {
startX = e.clientX;
startY = e.clientY;
}
}
e._clicks = clicks;
eventHandler[callbackName]("mousedown", e);
if (clicks > 4)
clicks = 0;
else if (clicks > 1)
return eventHandler[callbackName](eventNames[clicks], e);
});
if (useragent.isOldIE) {
exports.addListener(el, "dblclick", function(e) {
clicks = 2;
if (timer)
clearTimeout(timer);
timer = setTimeout(function() {timer = null}, timeouts[clicks - 1] || 600);
eventHandler[callbackName]("mousedown", e);
eventHandler[callbackName](eventNames[clicks], e);
});
}
};
var getModifierHash = useragent.isMac && useragent.isOpera && !("KeyboardEvent" in window)
? function(e) {
return 0 | (e.metaKey ? 1 : 0) | (e.altKey ? 2 : 0) | (e.shiftKey ? 4 : 0) | (e.ctrlKey ? 8 : 0);
}
: function(e) {
return 0 | (e.ctrlKey ? 1 : 0) | (e.altKey ? 2 : 0) | (e.shiftKey ? 4 : 0) | (e.metaKey ? 8 : 0);
};
exports.getModifierString = function(e) {
return keys.KEY_MODS[getModifierHash(e)];
};
function normalizeCommandKeys(callback, e, keyCode) {
var hashId = getModifierHash(e);
if (!useragent.isMac && pressedKeys) {
if (pressedKeys[91] || pressedKeys[92])
hashId |= 8;
if (pressedKeys.altGr) {
if ((3 & hashId) != 3)
pressedKeys.altGr = 0;
else
return;
}
if (keyCode === 18 || keyCode === 17) {
var location = "location" in e ? e.location : e.keyLocation;
if (keyCode === 17 && location === 1) {
if (pressedKeys[keyCode] == 1)
ts = e.timeStamp;
} else if (keyCode === 18 && hashId === 3 && location === 2) {
var dt = e.timeStamp - ts;
if (dt < 50)
pressedKeys.altGr = true;
}
}
}
if (keyCode in keys.MODIFIER_KEYS) {
keyCode = -1;
}
if (hashId & 8 && (keyCode === 91 || keyCode === 93)) {
keyCode = -1;
}
if (!hashId && keyCode === 13) {
var location = "location" in e ? e.location : e.keyLocation;
if (location === 3) {
callback(e, hashId, -keyCode);
if (e.defaultPrevented)
return;
}
}
if (useragent.isChromeOS && hashId & 8) {
callback(e, hashId, keyCode);
if (e.defaultPrevented)
return;
else
hashId &= ~8;
}
// If there is no hashId and the keyCode is not a function key, then
// we don't call the callback as we don't handle a command key here
// (it's a normal key/character input).
if (!hashId && !(keyCode in keys.FUNCTION_KEYS) && !(keyCode in keys.PRINTABLE_KEYS)) {
return false;
}
return callback(e, hashId, keyCode);
}
var pressedKeys = null;
var ts = 0;
exports.addCommandKeyListener = function(el, callback) {
var addListener = exports.addListener;
if (useragent.isOldGecko || (useragent.isOpera && !("KeyboardEvent" in window))) | {
// Old versions of Gecko aka. Firefox < 4.0 didn't repeat the keydown
// event if the user pressed the key for a longer time. Instead, the
// keydown event was fired once and later on only the keypress event.
// To emulate the 'right' keydown behavior, the keyCode of the initial
// keyDown event is stored and in the following keypress events the
// stores keyCode is used to emulate a keyDown event.
var lastKeyDownKeyCode = null;
addListener(el, "keydown", function(e) {
lastKeyDownKeyCode = e.keyCode;
});
addListener(el, "keypress", function(e) {
return normalizeCommandKeys(callback, e, lastKeyDownKeyCode);
});
} | conditional_block |
|
event.js | , OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* ***** END LICENSE BLOCK ***** */
define(function(require, exports, module) {
"use strict";
var keys = require("./keys");
var useragent = require("./useragent");
exports.addListener = function(elem, type, callback) {
if (elem.addEventListener) {
return elem.addEventListener(type, callback, false);
}
if (elem.attachEvent) {
var wrapper = function() {
callback.call(elem, window.event);
};
callback._wrapper = wrapper;
elem.attachEvent("on" + type, wrapper);
}
};
exports.removeListener = function(elem, type, callback) {
if (elem.removeEventListener) {
return elem.removeEventListener(type, callback, false);
}
if (elem.detachEvent) {
elem.detachEvent("on" + type, callback._wrapper || callback);
}
};
/*
* Prevents propagation and clobbers the default action of the passed event
*/
exports.stopEvent = function(e) {
exports.stopPropagation(e);
exports.preventDefault(e);
return false;
};
exports.stopPropagation = function(e) {
if (e.stopPropagation)
e.stopPropagation();
else
e.cancelBubble = true;
};
exports.preventDefault = function(e) {
if (e.preventDefault)
e.preventDefault();
else
e.returnValue = false;
};
/*
* @return {Number} 0 for left button, 1 for middle button, 2 for right button
*/
exports.getButton = function(e) {
if (e.type == "dblclick")
return 0;
if (e.type == "contextmenu" || (useragent.isMac && (e.ctrlKey && !e.altKey && !e.shiftKey)))
return 2;
// DOM Event
if (e.preventDefault) {
return e.button;
}
// old IE
else {
return {1:0, 2:2, 4:1}[e.button];
}
};
exports.capture = function(el, eventHandler, releaseCaptureHandler) {
function onMouseUp(e) {
eventHandler && eventHandler(e);
releaseCaptureHandler && releaseCaptureHandler(e);
exports.removeListener(document, "mousemove", eventHandler, true);
exports.removeListener(document, "mouseup", onMouseUp, true);
exports.removeListener(document, "dragstart", onMouseUp, true);
}
exports.addListener(document, "mousemove", eventHandler, true);
exports.addListener(document, "mouseup", onMouseUp, true);
exports.addListener(document, "dragstart", onMouseUp, true);
return onMouseUp;
};
exports.addMouseWheelListener = function(el, callback) {
if ("onmousewheel" in el) {
exports.addListener(el, "mousewheel", function(e) {
var factor = 8;
if (e.wheelDeltaX !== undefined) {
e.wheelX = -e.wheelDeltaX / factor;
e.wheelY = -e.wheelDeltaY / factor;
} else {
e.wheelX = 0;
e.wheelY = -e.wheelDelta / factor;
}
callback(e);
});
} else if ("onwheel" in el) {
exports.addListener(el, "wheel", function(e) {
var factor = 0.35;
switch (e.deltaMode) {
case e.DOM_DELTA_PIXEL:
e.wheelX = e.deltaX * factor || 0;
e.wheelY = e.deltaY * factor || 0;
break;
case e.DOM_DELTA_LINE:
case e.DOM_DELTA_PAGE:
e.wheelX = (e.deltaX || 0) * 5;
e.wheelY = (e.deltaY || 0) * 5;
break;
}
callback(e);
});
} else {
exports.addListener(el, "DOMMouseScroll", function(e) {
if (e.axis && e.axis == e.HORIZONTAL_AXIS) {
e.wheelX = (e.detail || 0) * 5;
e.wheelY = 0;
} else {
e.wheelX = 0;
e.wheelY = (e.detail || 0) * 5;
}
callback(e);
});
}
};
exports.addMultiMouseDownListener = function(el, timeouts, eventHandler, callbackName) {
var clicks = 0;
var startX, startY, timer;
var eventNames = {
2: "dblclick",
3: "tripleclick",
4: "quadclick"
};
exports.addListener(el, "mousedown", function(e) {
if (exports.getButton(e) !== 0) {
clicks = 0;
} else if (e.detail > 1) {
clicks++;
if (clicks > 4)
clicks = 1;
} else {
clicks = 1;
}
if (useragent.isIE) {
var isNewClick = Math.abs(e.clientX - startX) > 5 || Math.abs(e.clientY - startY) > 5;
if (!timer || isNewClick)
clicks = 1;
if (timer)
clearTimeout(timer);
timer = setTimeout(function() {timer = null}, timeouts[clicks - 1] || 600);
if (clicks == 1) {
startX = e.clientX;
startY = e.clientY;
}
}
e._clicks = clicks;
eventHandler[callbackName]("mousedown", e);
if (clicks > 4)
clicks = 0;
else if (clicks > 1)
return eventHandler[callbackName](eventNames[clicks], e);
});
if (useragent.isOldIE) {
exports.addListener(el, "dblclick", function(e) {
clicks = 2;
if (timer)
clearTimeout(timer);
timer = setTimeout(function() {timer = null}, timeouts[clicks - 1] || 600);
eventHandler[callbackName]("mousedown", e);
eventHandler[callbackName](eventNames[clicks], e);
});
}
};
var getModifierHash = useragent.isMac && useragent.isOpera && !("KeyboardEvent" in window)
? function(e) {
return 0 | (e.metaKey ? 1 : 0) | (e.altKey ? 2 : 0) | (e.shiftKey ? 4 : 0) | (e.ctrlKey ? 8 : 0);
}
: function(e) {
return 0 | (e.ctrlKey ? 1 : 0) | (e.altKey ? 2 : 0) | (e.shiftKey ? 4 : 0) | (e.metaKey ? 8 : 0);
};
exports.getModifierString = function(e) {
return keys.KEY_MODS[getModifierHash(e)];
};
function normalizeCommandKeys(callback, e, keyCode) | pressedKeys.altGr = true;
}
}
}
if (keyCode in keys.MODIFIER_KEYS) {
keyCode = -1;
}
if (hashId & 8 && (keyCode === 91 || keyCode === 93)) {
keyCode = -1;
}
if (!hashId && keyCode === 13) {
var location = "location" in e ? e.location : e.keyLocation;
if (location === 3) {
callback(e, hashId, -keyCode);
if (e.defaultPrevented)
return;
}
}
if (useragent.isChromeOS && hashId & 8) {
callback(e, hashId, keyCode);
if (e.defaultPrevented)
return;
else
hashId &= ~8;
}
// If there is no hashId and the keyCode is not a function key, then
// we don't call the callback as we don't handle a command key here
// (it's a normal key/character input).
if (!hashId && !(keyCode in keys.FUNCTION_KEYS) && !(keyCode in keys.PRINTABLE_KEYS)) {
return false;
}
return callback(e, hashId, keyCode);
}
var pressedKeys = null;
var ts = 0;
exports.addCommandKeyListener = function(el, | {
var hashId = getModifierHash(e);
if (!useragent.isMac && pressedKeys) {
if (pressedKeys[91] || pressedKeys[92])
hashId |= 8;
if (pressedKeys.altGr) {
if ((3 & hashId) != 3)
pressedKeys.altGr = 0;
else
return;
}
if (keyCode === 18 || keyCode === 17) {
var location = "location" in e ? e.location : e.keyLocation;
if (keyCode === 17 && location === 1) {
if (pressedKeys[keyCode] == 1)
ts = e.timeStamp;
} else if (keyCode === 18 && hashId === 3 && location === 2) {
var dt = e.timeStamp - ts;
if (dt < 50) | identifier_body |
borrowck-lend-flow-if.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Note: the borrowck analysis is currently flow-insensitive.
// Therefore, some of these errors are marked as spurious and could be
// corrected by a simple change to the analysis. The others are
// either genuine or would require more advanced changes. The latter
// cases are noted.
fn borrow(_v: &int) {}
fn borrow_mut(_v: &mut int) {}
fn cond() -> bool { fail!() }
fn for_func(_f: || -> bool) { fail!() }
fn produce<T>() -> T { fail!(); }
fn inc(v: &mut ~int) {
*v = ~(**v + 1);
}
fn pre_freeze_cond() {
// In this instance, the freeze is conditional and starts before
// the mut borrow.
let mut v = ~3;
let _w;
if cond() {
_w = &v;
}
borrow_mut(v); //~ ERROR cannot borrow
}
fn pre_freeze_else() {
// In this instance, the freeze and mut borrow are on separate sides
// of the if.
let mut v = ~3; | }
}
fn main() {} | let _w;
if cond() {
_w = &v;
} else {
borrow_mut(v); | random_line_split |
borrowck-lend-flow-if.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Note: the borrowck analysis is currently flow-insensitive.
// Therefore, some of these errors are marked as spurious and could be
// corrected by a simple change to the analysis. The others are
// either genuine or would require more advanced changes. The latter
// cases are noted.
fn borrow(_v: &int) {}
fn borrow_mut(_v: &mut int) {}
fn cond() -> bool { fail!() }
fn for_func(_f: || -> bool) { fail!() }
fn produce<T>() -> T { fail!(); }
fn inc(v: &mut ~int) {
*v = ~(**v + 1);
}
fn pre_freeze_cond() {
// In this instance, the freeze is conditional and starts before
// the mut borrow.
let mut v = ~3;
let _w;
if cond() |
borrow_mut(v); //~ ERROR cannot borrow
}
fn pre_freeze_else() {
// In this instance, the freeze and mut borrow are on separate sides
// of the if.
let mut v = ~3;
let _w;
if cond() {
_w = &v;
} else {
borrow_mut(v);
}
}
fn main() {}
| {
_w = &v;
} | conditional_block |
borrowck-lend-flow-if.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Note: the borrowck analysis is currently flow-insensitive.
// Therefore, some of these errors are marked as spurious and could be
// corrected by a simple change to the analysis. The others are
// either genuine or would require more advanced changes. The latter
// cases are noted.
fn borrow(_v: &int) {}
fn borrow_mut(_v: &mut int) {}
fn cond() -> bool { fail!() }
fn for_func(_f: || -> bool) |
fn produce<T>() -> T { fail!(); }
fn inc(v: &mut ~int) {
*v = ~(**v + 1);
}
fn pre_freeze_cond() {
// In this instance, the freeze is conditional and starts before
// the mut borrow.
let mut v = ~3;
let _w;
if cond() {
_w = &v;
}
borrow_mut(v); //~ ERROR cannot borrow
}
fn pre_freeze_else() {
// In this instance, the freeze and mut borrow are on separate sides
// of the if.
let mut v = ~3;
let _w;
if cond() {
_w = &v;
} else {
borrow_mut(v);
}
}
fn main() {}
| { fail!() } | identifier_body |
borrowck-lend-flow-if.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Note: the borrowck analysis is currently flow-insensitive.
// Therefore, some of these errors are marked as spurious and could be
// corrected by a simple change to the analysis. The others are
// either genuine or would require more advanced changes. The latter
// cases are noted.
fn borrow(_v: &int) {}
fn | (_v: &mut int) {}
fn cond() -> bool { fail!() }
fn for_func(_f: || -> bool) { fail!() }
fn produce<T>() -> T { fail!(); }
fn inc(v: &mut ~int) {
*v = ~(**v + 1);
}
fn pre_freeze_cond() {
// In this instance, the freeze is conditional and starts before
// the mut borrow.
let mut v = ~3;
let _w;
if cond() {
_w = &v;
}
borrow_mut(v); //~ ERROR cannot borrow
}
fn pre_freeze_else() {
// In this instance, the freeze and mut borrow are on separate sides
// of the if.
let mut v = ~3;
let _w;
if cond() {
_w = &v;
} else {
borrow_mut(v);
}
}
fn main() {}
| borrow_mut | identifier_name |
cssClient.ts | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { commands, CompletionItem, CompletionItemKind, ExtensionContext, languages, Position, Range, SnippetString, TextEdit, window, TextDocument, CompletionContext, CancellationToken, ProviderResult, CompletionList } from 'vscode';
import { Disposable, LanguageClientOptions, ProvideCompletionItemsSignature, NotificationType, CommonLanguageClient } from 'vscode-languageclient';
import * as nls from 'vscode-nls';
import { getCustomDataSource } from './customData';
import { RequestService, serveFileSystemRequests } from './requests';
namespace CustomDataChangedNotification {
export const type: NotificationType<string[]> = new NotificationType('css/customDataChanged');
}
const localize = nls.loadMessageBundle();
export type LanguageClientConstructor = (name: string, description: string, clientOptions: LanguageClientOptions) => CommonLanguageClient;
export interface Runtime {
TextDecoder: { new(encoding?: string): { decode(buffer: ArrayBuffer): string; } };
fs?: RequestService;
}
export function startClient(context: ExtensionContext, newLanguageClient: LanguageClientConstructor, runtime: Runtime) {
const customDataSource = getCustomDataSource(context.subscriptions);
let documentSelector = ['css', 'scss', 'less'];
// Options to control the language client
let clientOptions: LanguageClientOptions = {
documentSelector,
synchronize: {
configurationSection: ['css', 'scss', 'less']
},
initializationOptions: {
handledSchemas: ['file']
},
middleware: {
provideCompletionItem(document: TextDocument, position: Position, context: CompletionContext, token: CancellationToken, next: ProvideCompletionItemsSignature): ProviderResult<CompletionItem[] | CompletionList> {
// testing the replace / insert mode
function updateRanges(item: CompletionItem) {
const range = item.range;
if (range instanceof Range && range.end.isAfter(position) && range.start.isBeforeOrEqual(position)) {
item.range = { inserting: new Range(range.start, position), replacing: range };
}
}
function updateLabel(item: CompletionItem) {
if (item.kind === CompletionItemKind.Color) {
item.label2 = {
name: item.label,
type: (item.documentation as string)
};
}
}
// testing the new completion
function updateProposals(r: CompletionItem[] | CompletionList | null | undefined): CompletionItem[] | CompletionList | null | undefined {
if (r) {
(Array.isArray(r) ? r : r.items).forEach(updateRanges);
(Array.isArray(r) ? r : r.items).forEach(updateLabel);
}
return r;
}
const isThenable = <T>(obj: ProviderResult<T>): obj is Thenable<T> => obj && (<any>obj)['then'];
const r = next(document, position, context, token);
if (isThenable<CompletionItem[] | CompletionList | null | undefined>(r)) {
return r.then(updateProposals);
}
return updateProposals(r);
}
}
};
// Create the language client and start the client.
let client = newLanguageClient('css', localize('cssserver.name', 'CSS Language Server'), clientOptions);
client.registerProposedFeatures();
client.onReady().then(() => {
client.sendNotification(CustomDataChangedNotification.type, customDataSource.uris);
customDataSource.onDidChange(() => {
client.sendNotification(CustomDataChangedNotification.type, customDataSource.uris);
});
serveFileSystemRequests(client, runtime);
});
let disposable = client.start();
// Push the disposable to the context's subscriptions so that the
// client can be deactivated on extension deactivation
context.subscriptions.push(disposable);
let indentationRules = {
increaseIndentPattern: /(^.*\{[^}]*$)/,
decreaseIndentPattern: /^\s*\}/
};
languages.setLanguageConfiguration('css', {
wordPattern: /(#?-?\d*\.\d\w*%?)|(::?[\w-]*(?=[^,{;]*[,{]))|(([@#.!])?[\w-?]+%?|[@#!.])/g,
indentationRules: indentationRules
});
languages.setLanguageConfiguration('less', {
wordPattern: /(#?-?\d*\.\d\w*%?)|(::?[\w-]+(?=[^,{;]*[,{]))|(([@#.!])?[\w-?]+%?|[@#!.])/g,
indentationRules: indentationRules
});
languages.setLanguageConfiguration('scss', {
wordPattern: /(#?-?\d*\.\d\w*%?)|(::?[\w-]*(?=[^,{;]*[,{]))|(([@$#.!])?[\w-?]+%?|[@#!$.])/g,
indentationRules: indentationRules
});
client.onReady().then(() => {
context.subscriptions.push(initCompletionProvider());
});
function initCompletionProvider(): Disposable {
const regionCompletionRegExpr = /^(\s*)(\/(\*\s*(#\w*)?)?)?$/;
return languages.registerCompletionItemProvider(documentSelector, {
provideCompletionItems(doc: TextDocument, pos: Position) {
let lineUntilPos = doc.getText(new Range(new Position(pos.line, 0), pos));
let match = lineUntilPos.match(regionCompletionRegExpr);
if (match) {
let range = new Range(new Position(pos.line, match[1].length), pos);
let beginProposal = new CompletionItem('#region', CompletionItemKind.Snippet);
beginProposal.range = range; TextEdit.replace(range, '/* #region */');
beginProposal.insertText = new SnippetString('/* #region $1*/');
beginProposal.documentation = localize('folding.start', 'Folding Region Start');
beginProposal.filterText = match[2];
beginProposal.sortText = 'za';
let endProposal = new CompletionItem('#endregion', CompletionItemKind.Snippet);
endProposal.range = range;
endProposal.insertText = '/* #endregion */';
endProposal.documentation = localize('folding.end', 'Folding Region End');
endProposal.sortText = 'zb';
endProposal.filterText = match[2];
return [beginProposal, endProposal]; | }
return null;
}
});
}
commands.registerCommand('_css.applyCodeAction', applyCodeAction);
function applyCodeAction(uri: string, documentVersion: number, edits: TextEdit[]) {
let textEditor = window.activeTextEditor;
if (textEditor && textEditor.document.uri.toString() === uri) {
if (textEditor.document.version !== documentVersion) {
window.showInformationMessage(`CSS fix is outdated and can't be applied to the document.`);
}
textEditor.edit(mutator => {
for (let edit of edits) {
mutator.replace(client.protocol2CodeConverter.asRange(edit.range), edit.newText);
}
}).then(success => {
if (!success) {
window.showErrorMessage('Failed to apply CSS fix to the document. Please consider opening an issue with steps to reproduce.');
}
});
}
}
} | random_line_split |
|
cssClient.ts | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { commands, CompletionItem, CompletionItemKind, ExtensionContext, languages, Position, Range, SnippetString, TextEdit, window, TextDocument, CompletionContext, CancellationToken, ProviderResult, CompletionList } from 'vscode';
import { Disposable, LanguageClientOptions, ProvideCompletionItemsSignature, NotificationType, CommonLanguageClient } from 'vscode-languageclient';
import * as nls from 'vscode-nls';
import { getCustomDataSource } from './customData';
import { RequestService, serveFileSystemRequests } from './requests';
namespace CustomDataChangedNotification {
export const type: NotificationType<string[]> = new NotificationType('css/customDataChanged');
}
const localize = nls.loadMessageBundle();
export type LanguageClientConstructor = (name: string, description: string, clientOptions: LanguageClientOptions) => CommonLanguageClient;
export interface Runtime {
TextDecoder: { new(encoding?: string): { decode(buffer: ArrayBuffer): string; } };
fs?: RequestService;
}
export function startClient(context: ExtensionContext, newLanguageClient: LanguageClientConstructor, runtime: Runtime) {
const customDataSource = getCustomDataSource(context.subscriptions);
let documentSelector = ['css', 'scss', 'less'];
// Options to control the language client
let clientOptions: LanguageClientOptions = {
documentSelector,
synchronize: {
configurationSection: ['css', 'scss', 'less']
},
initializationOptions: {
handledSchemas: ['file']
},
middleware: {
provideCompletionItem(document: TextDocument, position: Position, context: CompletionContext, token: CancellationToken, next: ProvideCompletionItemsSignature): ProviderResult<CompletionItem[] | CompletionList> {
// testing the replace / insert mode
function | (item: CompletionItem) {
const range = item.range;
if (range instanceof Range && range.end.isAfter(position) && range.start.isBeforeOrEqual(position)) {
item.range = { inserting: new Range(range.start, position), replacing: range };
}
}
function updateLabel(item: CompletionItem) {
if (item.kind === CompletionItemKind.Color) {
item.label2 = {
name: item.label,
type: (item.documentation as string)
};
}
}
// testing the new completion
function updateProposals(r: CompletionItem[] | CompletionList | null | undefined): CompletionItem[] | CompletionList | null | undefined {
if (r) {
(Array.isArray(r) ? r : r.items).forEach(updateRanges);
(Array.isArray(r) ? r : r.items).forEach(updateLabel);
}
return r;
}
const isThenable = <T>(obj: ProviderResult<T>): obj is Thenable<T> => obj && (<any>obj)['then'];
const r = next(document, position, context, token);
if (isThenable<CompletionItem[] | CompletionList | null | undefined>(r)) {
return r.then(updateProposals);
}
return updateProposals(r);
}
}
};
// Create the language client and start the client.
let client = newLanguageClient('css', localize('cssserver.name', 'CSS Language Server'), clientOptions);
client.registerProposedFeatures();
client.onReady().then(() => {
client.sendNotification(CustomDataChangedNotification.type, customDataSource.uris);
customDataSource.onDidChange(() => {
client.sendNotification(CustomDataChangedNotification.type, customDataSource.uris);
});
serveFileSystemRequests(client, runtime);
});
let disposable = client.start();
// Push the disposable to the context's subscriptions so that the
// client can be deactivated on extension deactivation
context.subscriptions.push(disposable);
let indentationRules = {
increaseIndentPattern: /(^.*\{[^}]*$)/,
decreaseIndentPattern: /^\s*\}/
};
languages.setLanguageConfiguration('css', {
wordPattern: /(#?-?\d*\.\d\w*%?)|(::?[\w-]*(?=[^,{;]*[,{]))|(([@#.!])?[\w-?]+%?|[@#!.])/g,
indentationRules: indentationRules
});
languages.setLanguageConfiguration('less', {
wordPattern: /(#?-?\d*\.\d\w*%?)|(::?[\w-]+(?=[^,{;]*[,{]))|(([@#.!])?[\w-?]+%?|[@#!.])/g,
indentationRules: indentationRules
});
languages.setLanguageConfiguration('scss', {
wordPattern: /(#?-?\d*\.\d\w*%?)|(::?[\w-]*(?=[^,{;]*[,{]))|(([@$#.!])?[\w-?]+%?|[@#!$.])/g,
indentationRules: indentationRules
});
client.onReady().then(() => {
context.subscriptions.push(initCompletionProvider());
});
function initCompletionProvider(): Disposable {
const regionCompletionRegExpr = /^(\s*)(\/(\*\s*(#\w*)?)?)?$/;
return languages.registerCompletionItemProvider(documentSelector, {
provideCompletionItems(doc: TextDocument, pos: Position) {
let lineUntilPos = doc.getText(new Range(new Position(pos.line, 0), pos));
let match = lineUntilPos.match(regionCompletionRegExpr);
if (match) {
let range = new Range(new Position(pos.line, match[1].length), pos);
let beginProposal = new CompletionItem('#region', CompletionItemKind.Snippet);
beginProposal.range = range; TextEdit.replace(range, '/* #region */');
beginProposal.insertText = new SnippetString('/* #region $1*/');
beginProposal.documentation = localize('folding.start', 'Folding Region Start');
beginProposal.filterText = match[2];
beginProposal.sortText = 'za';
let endProposal = new CompletionItem('#endregion', CompletionItemKind.Snippet);
endProposal.range = range;
endProposal.insertText = '/* #endregion */';
endProposal.documentation = localize('folding.end', 'Folding Region End');
endProposal.sortText = 'zb';
endProposal.filterText = match[2];
return [beginProposal, endProposal];
}
return null;
}
});
}
commands.registerCommand('_css.applyCodeAction', applyCodeAction);
function applyCodeAction(uri: string, documentVersion: number, edits: TextEdit[]) {
let textEditor = window.activeTextEditor;
if (textEditor && textEditor.document.uri.toString() === uri) {
if (textEditor.document.version !== documentVersion) {
window.showInformationMessage(`CSS fix is outdated and can't be applied to the document.`);
}
textEditor.edit(mutator => {
for (let edit of edits) {
mutator.replace(client.protocol2CodeConverter.asRange(edit.range), edit.newText);
}
}).then(success => {
if (!success) {
window.showErrorMessage('Failed to apply CSS fix to the document. Please consider opening an issue with steps to reproduce.');
}
});
}
}
}
| updateRanges | identifier_name |
cssClient.ts | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { commands, CompletionItem, CompletionItemKind, ExtensionContext, languages, Position, Range, SnippetString, TextEdit, window, TextDocument, CompletionContext, CancellationToken, ProviderResult, CompletionList } from 'vscode';
import { Disposable, LanguageClientOptions, ProvideCompletionItemsSignature, NotificationType, CommonLanguageClient } from 'vscode-languageclient';
import * as nls from 'vscode-nls';
import { getCustomDataSource } from './customData';
import { RequestService, serveFileSystemRequests } from './requests';
namespace CustomDataChangedNotification {
export const type: NotificationType<string[]> = new NotificationType('css/customDataChanged');
}
const localize = nls.loadMessageBundle();
export type LanguageClientConstructor = (name: string, description: string, clientOptions: LanguageClientOptions) => CommonLanguageClient;
export interface Runtime {
TextDecoder: { new(encoding?: string): { decode(buffer: ArrayBuffer): string; } };
fs?: RequestService;
}
export function startClient(context: ExtensionContext, newLanguageClient: LanguageClientConstructor, runtime: Runtime) {
const customDataSource = getCustomDataSource(context.subscriptions);
let documentSelector = ['css', 'scss', 'less'];
// Options to control the language client
let clientOptions: LanguageClientOptions = {
documentSelector,
synchronize: {
configurationSection: ['css', 'scss', 'less']
},
initializationOptions: {
handledSchemas: ['file']
},
middleware: {
provideCompletionItem(document: TextDocument, position: Position, context: CompletionContext, token: CancellationToken, next: ProvideCompletionItemsSignature): ProviderResult<CompletionItem[] | CompletionList> {
// testing the replace / insert mode
function updateRanges(item: CompletionItem) {
const range = item.range;
if (range instanceof Range && range.end.isAfter(position) && range.start.isBeforeOrEqual(position)) {
item.range = { inserting: new Range(range.start, position), replacing: range };
}
}
function updateLabel(item: CompletionItem) {
if (item.kind === CompletionItemKind.Color) |
}
// testing the new completion
function updateProposals(r: CompletionItem[] | CompletionList | null | undefined): CompletionItem[] | CompletionList | null | undefined {
if (r) {
(Array.isArray(r) ? r : r.items).forEach(updateRanges);
(Array.isArray(r) ? r : r.items).forEach(updateLabel);
}
return r;
}
const isThenable = <T>(obj: ProviderResult<T>): obj is Thenable<T> => obj && (<any>obj)['then'];
const r = next(document, position, context, token);
if (isThenable<CompletionItem[] | CompletionList | null | undefined>(r)) {
return r.then(updateProposals);
}
return updateProposals(r);
}
}
};
// Create the language client and start the client.
let client = newLanguageClient('css', localize('cssserver.name', 'CSS Language Server'), clientOptions);
client.registerProposedFeatures();
client.onReady().then(() => {
client.sendNotification(CustomDataChangedNotification.type, customDataSource.uris);
customDataSource.onDidChange(() => {
client.sendNotification(CustomDataChangedNotification.type, customDataSource.uris);
});
serveFileSystemRequests(client, runtime);
});
let disposable = client.start();
// Push the disposable to the context's subscriptions so that the
// client can be deactivated on extension deactivation
context.subscriptions.push(disposable);
let indentationRules = {
increaseIndentPattern: /(^.*\{[^}]*$)/,
decreaseIndentPattern: /^\s*\}/
};
languages.setLanguageConfiguration('css', {
wordPattern: /(#?-?\d*\.\d\w*%?)|(::?[\w-]*(?=[^,{;]*[,{]))|(([@#.!])?[\w-?]+%?|[@#!.])/g,
indentationRules: indentationRules
});
languages.setLanguageConfiguration('less', {
wordPattern: /(#?-?\d*\.\d\w*%?)|(::?[\w-]+(?=[^,{;]*[,{]))|(([@#.!])?[\w-?]+%?|[@#!.])/g,
indentationRules: indentationRules
});
languages.setLanguageConfiguration('scss', {
wordPattern: /(#?-?\d*\.\d\w*%?)|(::?[\w-]*(?=[^,{;]*[,{]))|(([@$#.!])?[\w-?]+%?|[@#!$.])/g,
indentationRules: indentationRules
});
client.onReady().then(() => {
context.subscriptions.push(initCompletionProvider());
});
function initCompletionProvider(): Disposable {
const regionCompletionRegExpr = /^(\s*)(\/(\*\s*(#\w*)?)?)?$/;
return languages.registerCompletionItemProvider(documentSelector, {
provideCompletionItems(doc: TextDocument, pos: Position) {
let lineUntilPos = doc.getText(new Range(new Position(pos.line, 0), pos));
let match = lineUntilPos.match(regionCompletionRegExpr);
if (match) {
let range = new Range(new Position(pos.line, match[1].length), pos);
let beginProposal = new CompletionItem('#region', CompletionItemKind.Snippet);
beginProposal.range = range; TextEdit.replace(range, '/* #region */');
beginProposal.insertText = new SnippetString('/* #region $1*/');
beginProposal.documentation = localize('folding.start', 'Folding Region Start');
beginProposal.filterText = match[2];
beginProposal.sortText = 'za';
let endProposal = new CompletionItem('#endregion', CompletionItemKind.Snippet);
endProposal.range = range;
endProposal.insertText = '/* #endregion */';
endProposal.documentation = localize('folding.end', 'Folding Region End');
endProposal.sortText = 'zb';
endProposal.filterText = match[2];
return [beginProposal, endProposal];
}
return null;
}
});
}
commands.registerCommand('_css.applyCodeAction', applyCodeAction);
function applyCodeAction(uri: string, documentVersion: number, edits: TextEdit[]) {
let textEditor = window.activeTextEditor;
if (textEditor && textEditor.document.uri.toString() === uri) {
if (textEditor.document.version !== documentVersion) {
window.showInformationMessage(`CSS fix is outdated and can't be applied to the document.`);
}
textEditor.edit(mutator => {
for (let edit of edits) {
mutator.replace(client.protocol2CodeConverter.asRange(edit.range), edit.newText);
}
}).then(success => {
if (!success) {
window.showErrorMessage('Failed to apply CSS fix to the document. Please consider opening an issue with steps to reproduce.');
}
});
}
}
}
| {
item.label2 = {
name: item.label,
type: (item.documentation as string)
};
} | conditional_block |
cssClient.ts | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { commands, CompletionItem, CompletionItemKind, ExtensionContext, languages, Position, Range, SnippetString, TextEdit, window, TextDocument, CompletionContext, CancellationToken, ProviderResult, CompletionList } from 'vscode';
import { Disposable, LanguageClientOptions, ProvideCompletionItemsSignature, NotificationType, CommonLanguageClient } from 'vscode-languageclient';
import * as nls from 'vscode-nls';
import { getCustomDataSource } from './customData';
import { RequestService, serveFileSystemRequests } from './requests';
namespace CustomDataChangedNotification {
export const type: NotificationType<string[]> = new NotificationType('css/customDataChanged');
}
const localize = nls.loadMessageBundle();
export type LanguageClientConstructor = (name: string, description: string, clientOptions: LanguageClientOptions) => CommonLanguageClient;
export interface Runtime {
TextDecoder: { new(encoding?: string): { decode(buffer: ArrayBuffer): string; } };
fs?: RequestService;
}
export function startClient(context: ExtensionContext, newLanguageClient: LanguageClientConstructor, runtime: Runtime) | if (range instanceof Range && range.end.isAfter(position) && range.start.isBeforeOrEqual(position)) {
item.range = { inserting: new Range(range.start, position), replacing: range };
}
}
function updateLabel(item: CompletionItem) {
if (item.kind === CompletionItemKind.Color) {
item.label2 = {
name: item.label,
type: (item.documentation as string)
};
}
}
// testing the new completion
function updateProposals(r: CompletionItem[] | CompletionList | null | undefined): CompletionItem[] | CompletionList | null | undefined {
if (r) {
(Array.isArray(r) ? r : r.items).forEach(updateRanges);
(Array.isArray(r) ? r : r.items).forEach(updateLabel);
}
return r;
}
const isThenable = <T>(obj: ProviderResult<T>): obj is Thenable<T> => obj && (<any>obj)['then'];
const r = next(document, position, context, token);
if (isThenable<CompletionItem[] | CompletionList | null | undefined>(r)) {
return r.then(updateProposals);
}
return updateProposals(r);
}
}
};
// Create the language client and start the client.
let client = newLanguageClient('css', localize('cssserver.name', 'CSS Language Server'), clientOptions);
client.registerProposedFeatures();
client.onReady().then(() => {
client.sendNotification(CustomDataChangedNotification.type, customDataSource.uris);
customDataSource.onDidChange(() => {
client.sendNotification(CustomDataChangedNotification.type, customDataSource.uris);
});
serveFileSystemRequests(client, runtime);
});
let disposable = client.start();
// Push the disposable to the context's subscriptions so that the
// client can be deactivated on extension deactivation
context.subscriptions.push(disposable);
let indentationRules = {
increaseIndentPattern: /(^.*\{[^}]*$)/,
decreaseIndentPattern: /^\s*\}/
};
languages.setLanguageConfiguration('css', {
wordPattern: /(#?-?\d*\.\d\w*%?)|(::?[\w-]*(?=[^,{;]*[,{]))|(([@#.!])?[\w-?]+%?|[@#!.])/g,
indentationRules: indentationRules
});
languages.setLanguageConfiguration('less', {
wordPattern: /(#?-?\d*\.\d\w*%?)|(::?[\w-]+(?=[^,{;]*[,{]))|(([@#.!])?[\w-?]+%?|[@#!.])/g,
indentationRules: indentationRules
});
languages.setLanguageConfiguration('scss', {
wordPattern: /(#?-?\d*\.\d\w*%?)|(::?[\w-]*(?=[^,{;]*[,{]))|(([@$#.!])?[\w-?]+%?|[@#!$.])/g,
indentationRules: indentationRules
});
client.onReady().then(() => {
context.subscriptions.push(initCompletionProvider());
});
function initCompletionProvider(): Disposable {
const regionCompletionRegExpr = /^(\s*)(\/(\*\s*(#\w*)?)?)?$/;
return languages.registerCompletionItemProvider(documentSelector, {
provideCompletionItems(doc: TextDocument, pos: Position) {
let lineUntilPos = doc.getText(new Range(new Position(pos.line, 0), pos));
let match = lineUntilPos.match(regionCompletionRegExpr);
if (match) {
let range = new Range(new Position(pos.line, match[1].length), pos);
let beginProposal = new CompletionItem('#region', CompletionItemKind.Snippet);
beginProposal.range = range; TextEdit.replace(range, '/* #region */');
beginProposal.insertText = new SnippetString('/* #region $1*/');
beginProposal.documentation = localize('folding.start', 'Folding Region Start');
beginProposal.filterText = match[2];
beginProposal.sortText = 'za';
let endProposal = new CompletionItem('#endregion', CompletionItemKind.Snippet);
endProposal.range = range;
endProposal.insertText = '/* #endregion */';
endProposal.documentation = localize('folding.end', 'Folding Region End');
endProposal.sortText = 'zb';
endProposal.filterText = match[2];
return [beginProposal, endProposal];
}
return null;
}
});
}
commands.registerCommand('_css.applyCodeAction', applyCodeAction);
function applyCodeAction(uri: string, documentVersion: number, edits: TextEdit[]) {
let textEditor = window.activeTextEditor;
if (textEditor && textEditor.document.uri.toString() === uri) {
if (textEditor.document.version !== documentVersion) {
window.showInformationMessage(`CSS fix is outdated and can't be applied to the document.`);
}
textEditor.edit(mutator => {
for (let edit of edits) {
mutator.replace(client.protocol2CodeConverter.asRange(edit.range), edit.newText);
}
}).then(success => {
if (!success) {
window.showErrorMessage('Failed to apply CSS fix to the document. Please consider opening an issue with steps to reproduce.');
}
});
}
}
}
| {
const customDataSource = getCustomDataSource(context.subscriptions);
let documentSelector = ['css', 'scss', 'less'];
// Options to control the language client
let clientOptions: LanguageClientOptions = {
documentSelector,
synchronize: {
configurationSection: ['css', 'scss', 'less']
},
initializationOptions: {
handledSchemas: ['file']
},
middleware: {
provideCompletionItem(document: TextDocument, position: Position, context: CompletionContext, token: CancellationToken, next: ProvideCompletionItemsSignature): ProviderResult<CompletionItem[] | CompletionList> {
// testing the replace / insert mode
function updateRanges(item: CompletionItem) {
const range = item.range; | identifier_body |
FilestoreModel.js | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
Ext.define('OPF.console.domain.model.FilestoreModel', {
extend: 'Ext.data.Model',
statics: {
pageSuffixUrl: 'console/domain',
restSuffixUrl: 'registry/filestore',
editorClassName: 'OPF.console.domain.view.system.FilestoreEditor',
constraintName: 'OPF.registry.Filestore'
},
idProperty: 'id',
fields: [
{ name: 'id', type: 'int', useNull: true },
{ name: 'name', type: 'string' },
{ name: 'type', type: 'string' },
{ name: 'path', type: 'string' },
{ name: 'lookup', type: 'string' },
{ name: 'description', type: 'string' },
|
{ name: 'parentId', type: 'int' },
{ name: 'childCount', type: 'int' },
{ name: 'created', type: 'int' },
{ name: 'canUpdate', type: 'boolean' },
{ name: 'canDelete', type: 'boolean' }
]
}); | { name: 'serverName', type: 'string' },
{ name: 'port', type: 'int', defaultValue: 8080 },
{ name: 'urlPath', type: 'string' },
{ name: 'status', type: 'string' },
{ name: 'serverDirectory', type: 'string' }, | random_line_split |
pipeline.model.ts | import { AsCodeEvents } from './ascode.model';
import { Parameter } from './parameter.model';
import { Stage } from './stage.model';
import { Usage } from './usage.model';
import { Workflow } from './workflow.model';
export const pipelineNamePattern = new RegExp('^[a-zA-Z0-9._-]+$');
export class PipelineStatus {
static BUILDING = 'Building';
static FAIL = 'Fail';
static SUCCESS = 'Success';
static WAITING = 'Waiting';
static DISABLED = 'Disabled';
static SKIPPED = 'Skipped';
static NEVER_BUILT = 'Never Built';
static STOPPED = 'Stopped';
static PENDING = 'Pending';
static priority = [
PipelineStatus.NEVER_BUILT, PipelineStatus.PENDING, PipelineStatus.WAITING,
PipelineStatus.BUILDING, PipelineStatus.STOPPED,
PipelineStatus.FAIL, PipelineStatus.SUCCESS, PipelineStatus.DISABLED, PipelineStatus.SKIPPED
];
static neverRun(status: string) {
return status === this.SKIPPED || status === this.NEVER_BUILT || status === this.SKIPPED || status === this.DISABLED;
}
static isActive(status: string) {
return status === this.WAITING || status === this.BUILDING || status === this.PENDING;
}
static isDone(status: string) {
return status === this.SUCCESS || status === this.STOPPED || status === this.FAIL ||
status === this.SKIPPED || status === this.DISABLED;
}
static sum(status: Array<string>): string {
const sum = status.map(s => PipelineStatus.priority.indexOf(s)).reduce((sum, num) => {
if (num > -1 && num < sum) { return num; }
return sum;
});
if (sum === -1) |
return PipelineStatus.priority[sum];
}
}
export class PipelineAudit {
id: number;
username: string;
versionned: Date;
pipeline: Pipeline;
action: string;
}
export class PipelineAuditDiff {
type: string;
before: any;
after: any;
title: string;
}
export class Pipeline {
id: number;
name: string;
description: string;
icon: string;
stages: Array<Stage>;
parameters: Array<Parameter>;
last_modified: number;
projectKey: string;
usage: Usage;
audits: Array<PipelineAudit>;
preview: Pipeline;
asCode: string;
from_repository: string;
workflow_ascode_holder: Workflow;
ascode_events: Array<AsCodeEvents>;
// true if someone has updated the pipeline ( used for warnings )
externalChange: boolean;
// UI Params
forceRefresh: boolean;
previewMode: boolean;
editModeChanged: boolean;
constructor() {
this.usage = new Usage();
}
// Return true if pattern is good
public static checkName(name: string): boolean {
if (!name) {
return false;
}
return pipelineNamePattern.test(name);
}
public static hasParameterWithoutValue(pipeline: Pipeline) {
if (pipeline.parameters) {
let emptyParams = pipeline.parameters.filter(p => !p.value || p.value === '');
return emptyParams.length > 0;
}
return false;
}
public static mergeAndKeepOld(ref: Array<Parameter>, current: Array<Parameter>): Array<Parameter> {
if (!current) {
return ref;
}
if (!ref) {
return current;
}
let mapParam = current.reduce((m, o) => {
m[o.name] = o;
return m;
}, {});
ref.forEach(a => {
if (!mapParam[a.name]) {
current.push(a);
}
});
return current;
}
/**
* Merge parameters
*
* @param ref
* @param current
*/
public static mergeParams(ref: Array<Parameter>, current: Array<Parameter>): Array<Parameter> {
if (!ref) {
return [];
}
if (!current || current.length === 0) {
return ref;
}
return ref.map(p => {
let idFound = current.findIndex((c) => c.name === p.name);
return idFound === -1 ? p : current[idFound];
});
}
static InitRef(editPipeline: Pipeline) {
if (editPipeline && editPipeline.stages) {
editPipeline.stages.forEach(s => {
let nextRef;
do {
nextRef = Math.random();
} while (editPipeline.stages.findIndex(stg => stg.ref === nextRef) !== -1);
s.ref = nextRef;
if (s.jobs) {
s.jobs.forEach(j => {
let nextJobRef;
let loopAgain = true;
do {
nextJobRef = Math.random();
loopAgain = editPipeline.stages.findIndex(st => st.jobs.findIndex(jb => jb.ref === nextRef) !== -1) !== -1;
} while (loopAgain);
j.ref = nextJobRef;
});
}
});
}
}
}
export class SpawnInfo {
api_time: Date;
remote_time: Date;
type: string;
message: SpawnInfoMessage;
user_message: string;
}
export class SpawnInfoMessage {
args: Array<string>;
id: string;
}
export class CDNLogLink {
item_type: string;
api_ref: string;
}
export class CDNLogsLines {
api_ref: string
lines_count: number
}
export class CDNLogLinks {
item_type: string;
datas: Array<CDNLogLinkData>;
}
export class CDNLogLinkData {
api_ref: string
step_order: number
requirement_id: number
}
export class CDNLinesResponse {
totalCount: number;
lines: Array<CDNLine>;
}
export class CDNLine {
number: number;
value: string;
api_ref_hash: string;
since: number; // the count of milliseconds since job start
// properties used by ui only
extra: Array<string>;
}
export class CDNStreamFilter {
item_type: string;
job_run_id: number;
}
export class Tests {
pipeline_build_id: number;
total: number;
ok: number;
ko: number;
skipped: number;
test_suites: Array<TestSuite>;
static getColor(t: string): string {
switch (t) {
case 'ok':
return '#21BA45';
case 'ko':
return '#FF4F60';
case 'skip':
return '#808080';
}
}
}
export class TestSuite {
disabled: number;
errors: number;
failures: number;
id: string;
name: string;
package: string;
skipped: number;
total: number;
time: string;
timestamp: string;
tests: Array<TestCase>;
}
export class TestCase {
classname: string;
fullname: string;
name: string;
time: string;
errors: Array<Failure>;
failures: Array<Failure>;
status: string;
skipped: Array<Skipped>;
systemout: InnerResult;
systemerr: InnerResult;
// UI param
displayed: boolean;
messages: string;
}
// Failure contains data related to a failed test.
export class Failure {
value: string;
type: string;
message: string;
}
// Skipped contains data related to a skipped test.
export class Skipped {
value: string;
message: string;
}
// InnerResult is used by TestCase
export interface InnerResult {
value: string;
}
| {
return null;
} | conditional_block |
pipeline.model.ts | import { AsCodeEvents } from './ascode.model';
import { Parameter } from './parameter.model';
import { Stage } from './stage.model';
import { Usage } from './usage.model';
import { Workflow } from './workflow.model';
export const pipelineNamePattern = new RegExp('^[a-zA-Z0-9._-]+$');
export class PipelineStatus {
static BUILDING = 'Building';
static FAIL = 'Fail';
static SUCCESS = 'Success';
static WAITING = 'Waiting';
static DISABLED = 'Disabled';
static SKIPPED = 'Skipped';
static NEVER_BUILT = 'Never Built';
static STOPPED = 'Stopped';
static PENDING = 'Pending';
static priority = [
PipelineStatus.NEVER_BUILT, PipelineStatus.PENDING, PipelineStatus.WAITING,
PipelineStatus.BUILDING, PipelineStatus.STOPPED,
PipelineStatus.FAIL, PipelineStatus.SUCCESS, PipelineStatus.DISABLED, PipelineStatus.SKIPPED
];
static neverRun(status: string) {
return status === this.SKIPPED || status === this.NEVER_BUILT || status === this.SKIPPED || status === this.DISABLED;
}
static isActive(status: string) {
return status === this.WAITING || status === this.BUILDING || status === this.PENDING;
}
static isDone(status: string) {
return status === this.SUCCESS || status === this.STOPPED || status === this.FAIL ||
status === this.SKIPPED || status === this.DISABLED;
}
static sum(status: Array<string>): string {
const sum = status.map(s => PipelineStatus.priority.indexOf(s)).reduce((sum, num) => {
if (num > -1 && num < sum) { return num; }
return sum;
});
if (sum === -1) {
return null;
}
return PipelineStatus.priority[sum];
}
}
export class PipelineAudit {
id: number;
username: string;
versionned: Date;
pipeline: Pipeline;
action: string;
}
export class PipelineAuditDiff {
type: string; | before: any;
after: any;
title: string;
}
export class Pipeline {
id: number;
name: string;
description: string;
icon: string;
stages: Array<Stage>;
parameters: Array<Parameter>;
last_modified: number;
projectKey: string;
usage: Usage;
audits: Array<PipelineAudit>;
preview: Pipeline;
asCode: string;
from_repository: string;
workflow_ascode_holder: Workflow;
ascode_events: Array<AsCodeEvents>;
// true if someone has updated the pipeline ( used for warnings )
externalChange: boolean;
// UI Params
forceRefresh: boolean;
previewMode: boolean;
editModeChanged: boolean;
constructor() {
this.usage = new Usage();
}
// Return true if pattern is good
public static checkName(name: string): boolean {
if (!name) {
return false;
}
return pipelineNamePattern.test(name);
}
public static hasParameterWithoutValue(pipeline: Pipeline) {
if (pipeline.parameters) {
let emptyParams = pipeline.parameters.filter(p => !p.value || p.value === '');
return emptyParams.length > 0;
}
return false;
}
public static mergeAndKeepOld(ref: Array<Parameter>, current: Array<Parameter>): Array<Parameter> {
if (!current) {
return ref;
}
if (!ref) {
return current;
}
let mapParam = current.reduce((m, o) => {
m[o.name] = o;
return m;
}, {});
ref.forEach(a => {
if (!mapParam[a.name]) {
current.push(a);
}
});
return current;
}
/**
* Merge parameters
*
* @param ref
* @param current
*/
public static mergeParams(ref: Array<Parameter>, current: Array<Parameter>): Array<Parameter> {
if (!ref) {
return [];
}
if (!current || current.length === 0) {
return ref;
}
return ref.map(p => {
let idFound = current.findIndex((c) => c.name === p.name);
return idFound === -1 ? p : current[idFound];
});
}
static InitRef(editPipeline: Pipeline) {
if (editPipeline && editPipeline.stages) {
editPipeline.stages.forEach(s => {
let nextRef;
do {
nextRef = Math.random();
} while (editPipeline.stages.findIndex(stg => stg.ref === nextRef) !== -1);
s.ref = nextRef;
if (s.jobs) {
s.jobs.forEach(j => {
let nextJobRef;
let loopAgain = true;
do {
nextJobRef = Math.random();
loopAgain = editPipeline.stages.findIndex(st => st.jobs.findIndex(jb => jb.ref === nextRef) !== -1) !== -1;
} while (loopAgain);
j.ref = nextJobRef;
});
}
});
}
}
}
export class SpawnInfo {
api_time: Date;
remote_time: Date;
type: string;
message: SpawnInfoMessage;
user_message: string;
}
export class SpawnInfoMessage {
args: Array<string>;
id: string;
}
export class CDNLogLink {
item_type: string;
api_ref: string;
}
export class CDNLogsLines {
api_ref: string
lines_count: number
}
export class CDNLogLinks {
item_type: string;
datas: Array<CDNLogLinkData>;
}
export class CDNLogLinkData {
api_ref: string
step_order: number
requirement_id: number
}
export class CDNLinesResponse {
totalCount: number;
lines: Array<CDNLine>;
}
export class CDNLine {
number: number;
value: string;
api_ref_hash: string;
since: number; // the count of milliseconds since job start
// properties used by ui only
extra: Array<string>;
}
export class CDNStreamFilter {
item_type: string;
job_run_id: number;
}
export class Tests {
pipeline_build_id: number;
total: number;
ok: number;
ko: number;
skipped: number;
test_suites: Array<TestSuite>;
static getColor(t: string): string {
switch (t) {
case 'ok':
return '#21BA45';
case 'ko':
return '#FF4F60';
case 'skip':
return '#808080';
}
}
}
export class TestSuite {
disabled: number;
errors: number;
failures: number;
id: string;
name: string;
package: string;
skipped: number;
total: number;
time: string;
timestamp: string;
tests: Array<TestCase>;
}
export class TestCase {
classname: string;
fullname: string;
name: string;
time: string;
errors: Array<Failure>;
failures: Array<Failure>;
status: string;
skipped: Array<Skipped>;
systemout: InnerResult;
systemerr: InnerResult;
// UI param
displayed: boolean;
messages: string;
}
// Failure contains data related to a failed test.
export class Failure {
value: string;
type: string;
message: string;
}
// Skipped contains data related to a skipped test.
export class Skipped {
value: string;
message: string;
}
// InnerResult is used by TestCase
export interface InnerResult {
value: string;
} | random_line_split |
|
pipeline.model.ts | import { AsCodeEvents } from './ascode.model';
import { Parameter } from './parameter.model';
import { Stage } from './stage.model';
import { Usage } from './usage.model';
import { Workflow } from './workflow.model';
export const pipelineNamePattern = new RegExp('^[a-zA-Z0-9._-]+$');
export class PipelineStatus {
static BUILDING = 'Building';
static FAIL = 'Fail';
static SUCCESS = 'Success';
static WAITING = 'Waiting';
static DISABLED = 'Disabled';
static SKIPPED = 'Skipped';
static NEVER_BUILT = 'Never Built';
static STOPPED = 'Stopped';
static PENDING = 'Pending';
static priority = [
PipelineStatus.NEVER_BUILT, PipelineStatus.PENDING, PipelineStatus.WAITING,
PipelineStatus.BUILDING, PipelineStatus.STOPPED,
PipelineStatus.FAIL, PipelineStatus.SUCCESS, PipelineStatus.DISABLED, PipelineStatus.SKIPPED
];
static neverRun(status: string) {
return status === this.SKIPPED || status === this.NEVER_BUILT || status === this.SKIPPED || status === this.DISABLED;
}
static isActive(status: string) {
return status === this.WAITING || status === this.BUILDING || status === this.PENDING;
}
static isDone(status: string) {
return status === this.SUCCESS || status === this.STOPPED || status === this.FAIL ||
status === this.SKIPPED || status === this.DISABLED;
}
static sum(status: Array<string>): string {
const sum = status.map(s => PipelineStatus.priority.indexOf(s)).reduce((sum, num) => {
if (num > -1 && num < sum) { return num; }
return sum;
});
if (sum === -1) {
return null;
}
return PipelineStatus.priority[sum];
}
}
export class PipelineAudit {
id: number;
username: string;
versionned: Date;
pipeline: Pipeline;
action: string;
}
export class PipelineAuditDiff {
type: string;
before: any;
after: any;
title: string;
}
export class Pipeline {
id: number;
name: string;
description: string;
icon: string;
stages: Array<Stage>;
parameters: Array<Parameter>;
last_modified: number;
projectKey: string;
usage: Usage;
audits: Array<PipelineAudit>;
preview: Pipeline;
asCode: string;
from_repository: string;
workflow_ascode_holder: Workflow;
ascode_events: Array<AsCodeEvents>;
// true if someone has updated the pipeline ( used for warnings )
externalChange: boolean;
// UI Params
forceRefresh: boolean;
previewMode: boolean;
editModeChanged: boolean;
| () {
this.usage = new Usage();
}
// Return true if pattern is good
public static checkName(name: string): boolean {
if (!name) {
return false;
}
return pipelineNamePattern.test(name);
}
public static hasParameterWithoutValue(pipeline: Pipeline) {
if (pipeline.parameters) {
let emptyParams = pipeline.parameters.filter(p => !p.value || p.value === '');
return emptyParams.length > 0;
}
return false;
}
public static mergeAndKeepOld(ref: Array<Parameter>, current: Array<Parameter>): Array<Parameter> {
if (!current) {
return ref;
}
if (!ref) {
return current;
}
let mapParam = current.reduce((m, o) => {
m[o.name] = o;
return m;
}, {});
ref.forEach(a => {
if (!mapParam[a.name]) {
current.push(a);
}
});
return current;
}
/**
* Merge parameters
*
* @param ref
* @param current
*/
public static mergeParams(ref: Array<Parameter>, current: Array<Parameter>): Array<Parameter> {
if (!ref) {
return [];
}
if (!current || current.length === 0) {
return ref;
}
return ref.map(p => {
let idFound = current.findIndex((c) => c.name === p.name);
return idFound === -1 ? p : current[idFound];
});
}
static InitRef(editPipeline: Pipeline) {
if (editPipeline && editPipeline.stages) {
editPipeline.stages.forEach(s => {
let nextRef;
do {
nextRef = Math.random();
} while (editPipeline.stages.findIndex(stg => stg.ref === nextRef) !== -1);
s.ref = nextRef;
if (s.jobs) {
s.jobs.forEach(j => {
let nextJobRef;
let loopAgain = true;
do {
nextJobRef = Math.random();
loopAgain = editPipeline.stages.findIndex(st => st.jobs.findIndex(jb => jb.ref === nextRef) !== -1) !== -1;
} while (loopAgain);
j.ref = nextJobRef;
});
}
});
}
}
}
export class SpawnInfo {
api_time: Date;
remote_time: Date;
type: string;
message: SpawnInfoMessage;
user_message: string;
}
export class SpawnInfoMessage {
args: Array<string>;
id: string;
}
export class CDNLogLink {
item_type: string;
api_ref: string;
}
export class CDNLogsLines {
api_ref: string
lines_count: number
}
export class CDNLogLinks {
item_type: string;
datas: Array<CDNLogLinkData>;
}
export class CDNLogLinkData {
api_ref: string
step_order: number
requirement_id: number
}
export class CDNLinesResponse {
totalCount: number;
lines: Array<CDNLine>;
}
export class CDNLine {
number: number;
value: string;
api_ref_hash: string;
since: number; // the count of milliseconds since job start
// properties used by ui only
extra: Array<string>;
}
export class CDNStreamFilter {
item_type: string;
job_run_id: number;
}
export class Tests {
pipeline_build_id: number;
total: number;
ok: number;
ko: number;
skipped: number;
test_suites: Array<TestSuite>;
static getColor(t: string): string {
switch (t) {
case 'ok':
return '#21BA45';
case 'ko':
return '#FF4F60';
case 'skip':
return '#808080';
}
}
}
export class TestSuite {
disabled: number;
errors: number;
failures: number;
id: string;
name: string;
package: string;
skipped: number;
total: number;
time: string;
timestamp: string;
tests: Array<TestCase>;
}
export class TestCase {
classname: string;
fullname: string;
name: string;
time: string;
errors: Array<Failure>;
failures: Array<Failure>;
status: string;
skipped: Array<Skipped>;
systemout: InnerResult;
systemerr: InnerResult;
// UI param
displayed: boolean;
messages: string;
}
// Failure contains data related to a failed test.
export class Failure {
value: string;
type: string;
message: string;
}
// Skipped contains data related to a skipped test.
export class Skipped {
value: string;
message: string;
}
// InnerResult is used by TestCase
export interface InnerResult {
value: string;
}
| constructor | identifier_name |
TextAnalyzer.py | import codecs
import logging
logger = logging.getLogger(__name__)
class TextAnalyzer:
def __init__(self):
logger.debug('-- Initializing TextAnalyzer --')
"""
Deze functie leest een stopwoorden file (stoplist_tno.tab) in en retourneert deze woorden in
een dictionary
"""
def readStopWordsFile(self, strStopFile):
if not strStopFile:
|
""" read stopwords from file as dictionary. """
stopWords = {}
try:
f = codecs.open(strStopFile,'rU','utf-8') # NB. Use 'U'-mode for UniversalNewline Support
for line in f.readlines():
word = line.partition('::')[0].strip()#.decode('utf-8')
stopWords[word] = 1
f.close()
except IOError, e:
msg = 'Can\'t open stopfile %s for reading. %s' % (strStopFile, str(e))
logger.error(msg)
return None
return stopWords | strStopFile = self._stopWordsFile | conditional_block |
TextAnalyzer.py | import codecs
import logging
logger = logging.getLogger(__name__)
class TextAnalyzer:
def __init__(self):
logger.debug('-- Initializing TextAnalyzer --')
"""
Deze functie leest een stopwoorden file (stoplist_tno.tab) in en retourneert deze woorden in
een dictionary
"""
def readStopWordsFile(self, strStopFile):
if not strStopFile:
strStopFile = self._stopWordsFile
""" read stopwords from file as dictionary. """
stopWords = {}
try:
f = codecs.open(strStopFile,'rU','utf-8') # NB. Use 'U'-mode for UniversalNewline Support
for line in f.readlines():
word = line.partition('::')[0].strip()#.decode('utf-8') | stopWords[word] = 1
f.close()
except IOError, e:
msg = 'Can\'t open stopfile %s for reading. %s' % (strStopFile, str(e))
logger.error(msg)
return None
return stopWords | random_line_split |
|
TextAnalyzer.py | import codecs
import logging
logger = logging.getLogger(__name__)
class | :
def __init__(self):
logger.debug('-- Initializing TextAnalyzer --')
"""
Deze functie leest een stopwoorden file (stoplist_tno.tab) in en retourneert deze woorden in
een dictionary
"""
def readStopWordsFile(self, strStopFile):
if not strStopFile:
strStopFile = self._stopWordsFile
""" read stopwords from file as dictionary. """
stopWords = {}
try:
f = codecs.open(strStopFile,'rU','utf-8') # NB. Use 'U'-mode for UniversalNewline Support
for line in f.readlines():
word = line.partition('::')[0].strip()#.decode('utf-8')
stopWords[word] = 1
f.close()
except IOError, e:
msg = 'Can\'t open stopfile %s for reading. %s' % (strStopFile, str(e))
logger.error(msg)
return None
return stopWords | TextAnalyzer | identifier_name |
TextAnalyzer.py | import codecs
import logging
logger = logging.getLogger(__name__)
class TextAnalyzer:
def __init__(self):
|
"""
Deze functie leest een stopwoorden file (stoplist_tno.tab) in en retourneert deze woorden in
een dictionary
"""
def readStopWordsFile(self, strStopFile):
if not strStopFile:
strStopFile = self._stopWordsFile
""" read stopwords from file as dictionary. """
stopWords = {}
try:
f = codecs.open(strStopFile,'rU','utf-8') # NB. Use 'U'-mode for UniversalNewline Support
for line in f.readlines():
word = line.partition('::')[0].strip()#.decode('utf-8')
stopWords[word] = 1
f.close()
except IOError, e:
msg = 'Can\'t open stopfile %s for reading. %s' % (strStopFile, str(e))
logger.error(msg)
return None
return stopWords | logger.debug('-- Initializing TextAnalyzer --') | identifier_body |
mod.rs | mod version;
mod init;
mod config;
mod ignore;
mod sweep;
mod burn;
mod start;
mod end;
mod destroy;
mod patrol;
use self::version::Version;
use self::init::Init;
use self::config::Config;
use self::ignore::Ignore;
use self::sweep::Sweep;
use self::burn::Burn;
use self::start::Start;
use self::end::End;
use self::destroy::Destroy;
use self::patrol::Patrol;
use constant::BANNED_DIRS;
use error::{CliError, EssentialLack, EssentialKind, RunningPlaceError, Usage, UsageKind};
use lib::io::*;
use lib::setting;
use std::env;
use std::fmt::Display;
trait Command {
fn allow_to_check_current_dir(&self) -> bool { true }
fn check_current_dir(&self) -> Result<(), CliError> {
let current_dir = try!(env::current_dir());
match BANNED_DIRS.iter().find(|d| current_dir.ends_with(d)) {
Some(d) => Err(From::from(RunningPlaceError::new(d.to_string()))),
None => Ok(()),
}
}
fn | (&self) -> bool { true }
fn check_settings(&self) -> Result<(), EssentialLack> {
if !setting::working_dir_exists() {
return Err(EssentialLack::new(EssentialKind::WorkingDir));
}
if !setting::Storage::exist() {
return Err(EssentialLack::new(EssentialKind::StorageDir));
}
if !setting::Config::exist() {
return Err(EssentialLack::new(EssentialKind::ConfigFile));
}
if !setting::Ignore::exist() {
return Err(EssentialLack::new(EssentialKind::IgnoreFile));
}
Ok(())
}
fn usage(&self) -> Usage;
fn main(&self) -> Result<(), CliError>;
fn exec(&self, need_help: bool) -> Result<(), CliError> {
if need_help {
return Err(From::from(self.usage()));
}
if self.allow_to_check_current_dir() {
try!(self.check_current_dir());
}
if self.allow_to_check_settings() {
try!(self.check_settings());
}
self.main()
}
fn run_after_confirmation<D: Display, F>(message: D, danger_exec: F) -> Result<(), CliError>
where Self: Sized, F: FnOnce() -> Result<(), CliError>
{
echo(format_with_tag(Tag::Caution, format!("{} [yes/no]: ", message)));
let input = try!(read_line_from_stdin()).to_lowercase();
match input.as_ref() {
"y" | "yes" => try!(danger_exec()),
_ => print_with_tag(Tag::Notice, "Interrupted by user"),
};
Ok(())
}
}
pub fn execute(args: Vec<String>) -> Result<(), CliError> {
let mut args = args.into_iter();
let command = try!(args.next().ok_or(Usage::new(UsageKind::Nothing)));
let (command, need_help) = if &command == "help" {
(try!(args.next().ok_or(Usage::new(UsageKind::Help))), true)
} else {
(command, false)
};
match command.as_ref() {
"version" => Version .exec(need_help),
"init" => Init .exec(need_help),
"config" => Config::new(args.next(), args.next(), args.next()).exec(need_help),
"ignore" => Ignore::new(args.next(), args.collect()) .exec(need_help),
"sweep" => Sweep::new(args.next(), args.next()) .exec(need_help),
"burn" => Burn::new(args.next()) .exec(need_help),
"start" => Start .exec(need_help),
"end" => End .exec(need_help),
"destroy" => Destroy .exec(need_help),
"patrol" => Patrol .exec(need_help),
_ => Err(From::from(Usage::new(UsageKind::Nothing))),
}
}
pub fn clean_up() -> Result<(), CliError> {
// This is temporary "if" for avoiding the error except for "unix".
if cfg!(unix) {
End.main()
} else {
Ok(())
}
}
| allow_to_check_settings | identifier_name |
mod.rs | mod version;
mod init;
mod config;
mod ignore;
mod sweep;
mod burn;
mod start;
mod end;
mod destroy;
mod patrol;
use self::version::Version;
use self::init::Init;
use self::config::Config;
use self::ignore::Ignore;
use self::sweep::Sweep;
use self::burn::Burn;
use self::start::Start;
use self::end::End;
use self::destroy::Destroy;
use self::patrol::Patrol;
use constant::BANNED_DIRS;
use error::{CliError, EssentialLack, EssentialKind, RunningPlaceError, Usage, UsageKind};
use lib::io::*;
use lib::setting;
use std::env;
use std::fmt::Display;
trait Command {
fn allow_to_check_current_dir(&self) -> bool { true }
fn check_current_dir(&self) -> Result<(), CliError> |
fn allow_to_check_settings(&self) -> bool { true }
fn check_settings(&self) -> Result<(), EssentialLack> {
if !setting::working_dir_exists() {
return Err(EssentialLack::new(EssentialKind::WorkingDir));
}
if !setting::Storage::exist() {
return Err(EssentialLack::new(EssentialKind::StorageDir));
}
if !setting::Config::exist() {
return Err(EssentialLack::new(EssentialKind::ConfigFile));
}
if !setting::Ignore::exist() {
return Err(EssentialLack::new(EssentialKind::IgnoreFile));
}
Ok(())
}
fn usage(&self) -> Usage;
fn main(&self) -> Result<(), CliError>;
fn exec(&self, need_help: bool) -> Result<(), CliError> {
if need_help {
return Err(From::from(self.usage()));
}
if self.allow_to_check_current_dir() {
try!(self.check_current_dir());
}
if self.allow_to_check_settings() {
try!(self.check_settings());
}
self.main()
}
fn run_after_confirmation<D: Display, F>(message: D, danger_exec: F) -> Result<(), CliError>
where Self: Sized, F: FnOnce() -> Result<(), CliError>
{
echo(format_with_tag(Tag::Caution, format!("{} [yes/no]: ", message)));
let input = try!(read_line_from_stdin()).to_lowercase();
match input.as_ref() {
"y" | "yes" => try!(danger_exec()),
_ => print_with_tag(Tag::Notice, "Interrupted by user"),
};
Ok(())
}
}
pub fn execute(args: Vec<String>) -> Result<(), CliError> {
let mut args = args.into_iter();
let command = try!(args.next().ok_or(Usage::new(UsageKind::Nothing)));
let (command, need_help) = if &command == "help" {
(try!(args.next().ok_or(Usage::new(UsageKind::Help))), true)
} else {
(command, false)
};
match command.as_ref() {
"version" => Version .exec(need_help),
"init" => Init .exec(need_help),
"config" => Config::new(args.next(), args.next(), args.next()).exec(need_help),
"ignore" => Ignore::new(args.next(), args.collect()) .exec(need_help),
"sweep" => Sweep::new(args.next(), args.next()) .exec(need_help),
"burn" => Burn::new(args.next()) .exec(need_help),
"start" => Start .exec(need_help),
"end" => End .exec(need_help),
"destroy" => Destroy .exec(need_help),
"patrol" => Patrol .exec(need_help),
_ => Err(From::from(Usage::new(UsageKind::Nothing))),
}
}
pub fn clean_up() -> Result<(), CliError> {
// This is temporary "if" for avoiding the error except for "unix".
if cfg!(unix) {
End.main()
} else {
Ok(())
}
}
| {
let current_dir = try!(env::current_dir());
match BANNED_DIRS.iter().find(|d| current_dir.ends_with(d)) {
Some(d) => Err(From::from(RunningPlaceError::new(d.to_string()))),
None => Ok(()),
}
} | identifier_body |
mod.rs | mod version;
mod init;
mod config;
mod ignore;
mod sweep;
mod burn;
mod start;
mod end;
mod destroy;
mod patrol;
use self::version::Version;
use self::init::Init;
use self::config::Config;
use self::ignore::Ignore;
use self::sweep::Sweep;
use self::burn::Burn;
use self::start::Start;
use self::end::End;
use self::destroy::Destroy;
use self::patrol::Patrol;
use constant::BANNED_DIRS;
use error::{CliError, EssentialLack, EssentialKind, RunningPlaceError, Usage, UsageKind};
use lib::io::*;
use lib::setting;
use std::env;
use std::fmt::Display;
trait Command {
fn allow_to_check_current_dir(&self) -> bool { true }
fn check_current_dir(&self) -> Result<(), CliError> {
let current_dir = try!(env::current_dir());
match BANNED_DIRS.iter().find(|d| current_dir.ends_with(d)) {
Some(d) => Err(From::from(RunningPlaceError::new(d.to_string()))),
None => Ok(()),
}
}
fn allow_to_check_settings(&self) -> bool { true }
fn check_settings(&self) -> Result<(), EssentialLack> {
if !setting::working_dir_exists() {
return Err(EssentialLack::new(EssentialKind::WorkingDir));
}
if !setting::Storage::exist() {
return Err(EssentialLack::new(EssentialKind::StorageDir));
}
if !setting::Config::exist() {
return Err(EssentialLack::new(EssentialKind::ConfigFile));
}
if !setting::Ignore::exist() {
return Err(EssentialLack::new(EssentialKind::IgnoreFile));
}
Ok(())
}
fn usage(&self) -> Usage;
fn main(&self) -> Result<(), CliError>;
fn exec(&self, need_help: bool) -> Result<(), CliError> {
if need_help {
return Err(From::from(self.usage()));
}
if self.allow_to_check_current_dir() {
try!(self.check_current_dir());
}
if self.allow_to_check_settings() {
try!(self.check_settings());
}
self.main()
}
fn run_after_confirmation<D: Display, F>(message: D, danger_exec: F) -> Result<(), CliError>
where Self: Sized, F: FnOnce() -> Result<(), CliError>
{
echo(format_with_tag(Tag::Caution, format!("{} [yes/no]: ", message)));
let input = try!(read_line_from_stdin()).to_lowercase();
match input.as_ref() {
"y" | "yes" => try!(danger_exec()),
_ => print_with_tag(Tag::Notice, "Interrupted by user"),
};
Ok(())
}
}
pub fn execute(args: Vec<String>) -> Result<(), CliError> {
let mut args = args.into_iter();
let command = try!(args.next().ok_or(Usage::new(UsageKind::Nothing)));
let (command, need_help) = if &command == "help" {
(try!(args.next().ok_or(Usage::new(UsageKind::Help))), true) | };
match command.as_ref() {
"version" => Version .exec(need_help),
"init" => Init .exec(need_help),
"config" => Config::new(args.next(), args.next(), args.next()).exec(need_help),
"ignore" => Ignore::new(args.next(), args.collect()) .exec(need_help),
"sweep" => Sweep::new(args.next(), args.next()) .exec(need_help),
"burn" => Burn::new(args.next()) .exec(need_help),
"start" => Start .exec(need_help),
"end" => End .exec(need_help),
"destroy" => Destroy .exec(need_help),
"patrol" => Patrol .exec(need_help),
_ => Err(From::from(Usage::new(UsageKind::Nothing))),
}
}
pub fn clean_up() -> Result<(), CliError> {
// This is temporary "if" for avoiding the error except for "unix".
if cfg!(unix) {
End.main()
} else {
Ok(())
}
} | } else {
(command, false) | random_line_split |
es_backup.py | #!/usr/bin/env python
import time, logging, argparse, json, sys
from es_manager import ElasticsearchSnapshotManager, get_parser
from elasticsearch import exceptions
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger('elasticsearch')
def take_snapshot(options):
| logger.info('TOTAL: %d - Will delete 1 -> %d' % (num_snaps, up_to + 1))
for snap in snapshots[0:up_to]:
sh.delete(repository=options.repository, snapshot=snap['snapshot'], request_timeout=3600)
logger.info('Deleted snapshot %s' % snap['snapshot'])
except exceptions.TransportError as e:
pass
if __name__ == '__main__':
parser = get_parser("This script will take a snapshot and upload to S3")
parser.add_argument("--wait", action="store_true", default=True, help="Wait for the backup to complete")
parser.add_argument("--keep", action="store", default=60, help="Number of Elasticsearch snapshots to keep in S3")
options = parser.parse_args()
if options.debug:
logger.setLevel(logging.DEBUG)
take_snapshot(options)
| esm = ElasticsearchSnapshotManager(options)
sh = esm.sh
snapshot = options.snapshot and options.snapshot or 'all_' + time.strftime('%Y%m%d%H')
snapdef = {
"include_global_state": True
}
if options.indices:
snapdef['indices'] = ','.join(options.indices)
try:
sh.create(repository=options.repository, snapshot=snapshot, body=json.dumps(snapdef), wait_for_completion=options.wait, request_timeout=7200)
# Housekeeping - delete old snapshots
snapshots = sh.get(repository=options.repository, snapshot="_all", request_timeout=120)['snapshots']
num_snaps = len(snapshots)
if num_snaps > options.keep:
up_to = num_snaps - options.keep | identifier_body |
es_backup.py | from elasticsearch import exceptions
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger('elasticsearch')
def take_snapshot(options):
esm = ElasticsearchSnapshotManager(options)
sh = esm.sh
snapshot = options.snapshot and options.snapshot or 'all_' + time.strftime('%Y%m%d%H')
snapdef = {
"include_global_state": True
}
if options.indices:
snapdef['indices'] = ','.join(options.indices)
try:
sh.create(repository=options.repository, snapshot=snapshot, body=json.dumps(snapdef), wait_for_completion=options.wait, request_timeout=7200)
# Housekeeping - delete old snapshots
snapshots = sh.get(repository=options.repository, snapshot="_all", request_timeout=120)['snapshots']
num_snaps = len(snapshots)
if num_snaps > options.keep:
up_to = num_snaps - options.keep
logger.info('TOTAL: %d - Will delete 1 -> %d' % (num_snaps, up_to + 1))
for snap in snapshots[0:up_to]:
sh.delete(repository=options.repository, snapshot=snap['snapshot'], request_timeout=3600)
logger.info('Deleted snapshot %s' % snap['snapshot'])
except exceptions.TransportError as e:
pass
if __name__ == '__main__':
parser = get_parser("This script will take a snapshot and upload to S3")
parser.add_argument("--wait", action="store_true", default=True, help="Wait for the backup to complete")
parser.add_argument("--keep", action="store", default=60, help="Number of Elasticsearch snapshots to keep in S3")
options = parser.parse_args()
if options.debug:
logger.setLevel(logging.DEBUG)
take_snapshot(options) | #!/usr/bin/env python
import time, logging, argparse, json, sys
from es_manager import ElasticsearchSnapshotManager, get_parser | random_line_split |
|
es_backup.py | #!/usr/bin/env python
import time, logging, argparse, json, sys
from es_manager import ElasticsearchSnapshotManager, get_parser
from elasticsearch import exceptions
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger('elasticsearch')
def | (options):
esm = ElasticsearchSnapshotManager(options)
sh = esm.sh
snapshot = options.snapshot and options.snapshot or 'all_' + time.strftime('%Y%m%d%H')
snapdef = {
"include_global_state": True
}
if options.indices:
snapdef['indices'] = ','.join(options.indices)
try:
sh.create(repository=options.repository, snapshot=snapshot, body=json.dumps(snapdef), wait_for_completion=options.wait, request_timeout=7200)
# Housekeeping - delete old snapshots
snapshots = sh.get(repository=options.repository, snapshot="_all", request_timeout=120)['snapshots']
num_snaps = len(snapshots)
if num_snaps > options.keep:
up_to = num_snaps - options.keep
logger.info('TOTAL: %d - Will delete 1 -> %d' % (num_snaps, up_to + 1))
for snap in snapshots[0:up_to]:
sh.delete(repository=options.repository, snapshot=snap['snapshot'], request_timeout=3600)
logger.info('Deleted snapshot %s' % snap['snapshot'])
except exceptions.TransportError as e:
pass
if __name__ == '__main__':
parser = get_parser("This script will take a snapshot and upload to S3")
parser.add_argument("--wait", action="store_true", default=True, help="Wait for the backup to complete")
parser.add_argument("--keep", action="store", default=60, help="Number of Elasticsearch snapshots to keep in S3")
options = parser.parse_args()
if options.debug:
logger.setLevel(logging.DEBUG)
take_snapshot(options)
| take_snapshot | identifier_name |
es_backup.py | #!/usr/bin/env python
import time, logging, argparse, json, sys
from es_manager import ElasticsearchSnapshotManager, get_parser
from elasticsearch import exceptions
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger('elasticsearch')
def take_snapshot(options):
esm = ElasticsearchSnapshotManager(options)
sh = esm.sh
snapshot = options.snapshot and options.snapshot or 'all_' + time.strftime('%Y%m%d%H')
snapdef = {
"include_global_state": True
}
if options.indices:
snapdef['indices'] = ','.join(options.indices)
try:
sh.create(repository=options.repository, snapshot=snapshot, body=json.dumps(snapdef), wait_for_completion=options.wait, request_timeout=7200)
# Housekeeping - delete old snapshots
snapshots = sh.get(repository=options.repository, snapshot="_all", request_timeout=120)['snapshots']
num_snaps = len(snapshots)
if num_snaps > options.keep:
up_to = num_snaps - options.keep
logger.info('TOTAL: %d - Will delete 1 -> %d' % (num_snaps, up_to + 1))
for snap in snapshots[0:up_to]:
|
except exceptions.TransportError as e:
pass
if __name__ == '__main__':
parser = get_parser("This script will take a snapshot and upload to S3")
parser.add_argument("--wait", action="store_true", default=True, help="Wait for the backup to complete")
parser.add_argument("--keep", action="store", default=60, help="Number of Elasticsearch snapshots to keep in S3")
options = parser.parse_args()
if options.debug:
logger.setLevel(logging.DEBUG)
take_snapshot(options)
| sh.delete(repository=options.repository, snapshot=snap['snapshot'], request_timeout=3600)
logger.info('Deleted snapshot %s' % snap['snapshot']) | conditional_block |
error.rs | use crate::{constants::MAX_PRECISION_U32, Decimal};
use alloc::string::String;
use core::fmt;
/// Error type for the library.
#[derive(Clone, Debug, PartialEq)]
pub enum Error {
ErrorString(String),
ExceedsMaximumPossibleValue,
LessThanMinimumPossibleValue,
Underflow,
ScaleExceedsMaximumPrecision(u32),
}
impl<S> From<S> for Error
where
S: Into<String>,
{
#[inline]
fn from(from: S) -> Self {
Self::ErrorString(from.into())
}
}
#[cold]
pub(crate) fn tail_error(from: &'static str) -> Result<Decimal, Error> {
Err(from.into())
}
#[cfg(feature = "std")]
impl std::error::Error for Error {}
impl fmt::Display for Error {
fn | (&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
Self::ErrorString(ref err) => f.pad(err),
Self::ExceedsMaximumPossibleValue => {
write!(f, "Number exceeds maximum value that can be represented.")
}
Self::LessThanMinimumPossibleValue => {
write!(f, "Number less than minimum value that can be represented.")
}
Self::Underflow => {
write!(f, "Number has a high precision that can not be represented.")
}
Self::ScaleExceedsMaximumPrecision(ref scale) => {
write!(
f,
"Scale exceeds the maximum precision allowed: {} > {}",
scale, MAX_PRECISION_U32
)
}
}
}
}
| fmt | identifier_name |
error.rs | use crate::{constants::MAX_PRECISION_U32, Decimal};
use alloc::string::String;
use core::fmt;
/// Error type for the library.
#[derive(Clone, Debug, PartialEq)]
pub enum Error {
ErrorString(String), | }
impl<S> From<S> for Error
where
S: Into<String>,
{
#[inline]
fn from(from: S) -> Self {
Self::ErrorString(from.into())
}
}
#[cold]
pub(crate) fn tail_error(from: &'static str) -> Result<Decimal, Error> {
Err(from.into())
}
#[cfg(feature = "std")]
impl std::error::Error for Error {}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
Self::ErrorString(ref err) => f.pad(err),
Self::ExceedsMaximumPossibleValue => {
write!(f, "Number exceeds maximum value that can be represented.")
}
Self::LessThanMinimumPossibleValue => {
write!(f, "Number less than minimum value that can be represented.")
}
Self::Underflow => {
write!(f, "Number has a high precision that can not be represented.")
}
Self::ScaleExceedsMaximumPrecision(ref scale) => {
write!(
f,
"Scale exceeds the maximum precision allowed: {} > {}",
scale, MAX_PRECISION_U32
)
}
}
}
} | ExceedsMaximumPossibleValue,
LessThanMinimumPossibleValue,
Underflow,
ScaleExceedsMaximumPrecision(u32), | random_line_split |
error.rs | use crate::{constants::MAX_PRECISION_U32, Decimal};
use alloc::string::String;
use core::fmt;
/// Error type for the library.
#[derive(Clone, Debug, PartialEq)]
pub enum Error {
ErrorString(String),
ExceedsMaximumPossibleValue,
LessThanMinimumPossibleValue,
Underflow,
ScaleExceedsMaximumPrecision(u32),
}
impl<S> From<S> for Error
where
S: Into<String>,
{
#[inline]
fn from(from: S) -> Self {
Self::ErrorString(from.into())
}
}
#[cold]
pub(crate) fn tail_error(from: &'static str) -> Result<Decimal, Error> {
Err(from.into())
}
#[cfg(feature = "std")]
impl std::error::Error for Error {}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
Self::ErrorString(ref err) => f.pad(err),
Self::ExceedsMaximumPossibleValue => {
write!(f, "Number exceeds maximum value that can be represented.")
}
Self::LessThanMinimumPossibleValue => {
write!(f, "Number less than minimum value that can be represented.")
}
Self::Underflow => |
Self::ScaleExceedsMaximumPrecision(ref scale) => {
write!(
f,
"Scale exceeds the maximum precision allowed: {} > {}",
scale, MAX_PRECISION_U32
)
}
}
}
}
| {
write!(f, "Number has a high precision that can not be represented.")
} | conditional_block |
error.rs | use crate::{constants::MAX_PRECISION_U32, Decimal};
use alloc::string::String;
use core::fmt;
/// Error type for the library.
#[derive(Clone, Debug, PartialEq)]
pub enum Error {
ErrorString(String),
ExceedsMaximumPossibleValue,
LessThanMinimumPossibleValue,
Underflow,
ScaleExceedsMaximumPrecision(u32),
}
impl<S> From<S> for Error
where
S: Into<String>,
{
#[inline]
fn from(from: S) -> Self {
Self::ErrorString(from.into())
}
}
#[cold]
pub(crate) fn tail_error(from: &'static str) -> Result<Decimal, Error> {
Err(from.into())
}
#[cfg(feature = "std")]
impl std::error::Error for Error {}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result | }
}
| {
match *self {
Self::ErrorString(ref err) => f.pad(err),
Self::ExceedsMaximumPossibleValue => {
write!(f, "Number exceeds maximum value that can be represented.")
}
Self::LessThanMinimumPossibleValue => {
write!(f, "Number less than minimum value that can be represented.")
}
Self::Underflow => {
write!(f, "Number has a high precision that can not be represented.")
}
Self::ScaleExceedsMaximumPrecision(ref scale) => {
write!(
f,
"Scale exceeds the maximum precision allowed: {} > {}",
scale, MAX_PRECISION_U32
)
}
} | identifier_body |
cli.js | #!/usr/bin/env node
//
// cli.js
//
// Copyright (c) 2016-2017 Junpei Kawamoto
//
// This software is released under the MIT License.
//
// http://opensource.org/licenses/mit-license.php
//
const {
start,
crawl
} = require("../lib/crawler");
const argv = require("yargs")
.option("lang", {
describe: "Language to be used to scrape trand pages. Not used in crawl command."
})
.default("lang", "EN")
.option("dir", {
describe: "Path to the directory to store database files"
})
.demandOption(["dir"])
.command("*", "Start crawling", () => {}, (argv) => {
start(argv.lang, argv.dir);
})
.command("crawl", "Crawl comments form a video", () => {}, (argv) => {
crawl(argv.dir).catch((err) => {
console.error(err);
});
}) | .argv; | .help("h")
.alias("h", "help") | random_line_split |
AutoincrementalField.py | # -*- coding: utf-8 -*-
"""
***************************************************************************
AutoincrementalField.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from PyQt4.QtCore import QVariant
from qgis.core import QgsField, QgsFeature, QgsGeometry
from processing.core.GeoAlgorithm import GeoAlgorithm
from processing.core.parameters import ParameterVector
from processing.core.outputs import OutputVector
from processing.tools import dataobjects, vector
class | (GeoAlgorithm):
INPUT = 'INPUT'
OUTPUT = 'OUTPUT'
def processAlgorithm(self, progress):
output = self.getOutputFromName(self.OUTPUT)
vlayer = \
dataobjects.getObjectFromUri(self.getParameterValue(self.INPUT))
vprovider = vlayer.dataProvider()
fields = vprovider.fields()
fields.append(QgsField('AUTO', QVariant.Int))
writer = output.getVectorWriter(fields, vprovider.geometryType(),
vlayer.crs())
inFeat = QgsFeature()
outFeat = QgsFeature()
inGeom = QgsGeometry()
nElement = 0
features = vector.features(vlayer)
nFeat = len(features)
for inFeat in features:
progress.setPercentage(int(100 * nElement / nFeat))
nElement += 1
inGeom = inFeat.geometry()
outFeat.setGeometry(inGeom)
attrs = inFeat.attributes()
attrs.append(nElement)
outFeat.setAttributes(attrs)
writer.addFeature(outFeat)
del writer
def defineCharacteristics(self):
self.name = 'Add autoincremental field'
self.group = 'Vector table tools'
self.addParameter(ParameterVector(self.INPUT,
self.tr('Input layer'), [ParameterVector.VECTOR_TYPE_ANY]))
self.addOutput(OutputVector(self.OUTPUT, self.tr('Incremented')))
| AutoincrementalField | identifier_name |
AutoincrementalField.py | # -*- coding: utf-8 -*-
"""
***************************************************************************
AutoincrementalField.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from PyQt4.QtCore import QVariant
from qgis.core import QgsField, QgsFeature, QgsGeometry
from processing.core.GeoAlgorithm import GeoAlgorithm
from processing.core.parameters import ParameterVector
from processing.core.outputs import OutputVector
from processing.tools import dataobjects, vector
class AutoincrementalField(GeoAlgorithm):
INPUT = 'INPUT'
OUTPUT = 'OUTPUT'
def processAlgorithm(self, progress):
output = self.getOutputFromName(self.OUTPUT)
vlayer = \
dataobjects.getObjectFromUri(self.getParameterValue(self.INPUT))
vprovider = vlayer.dataProvider()
fields = vprovider.fields()
fields.append(QgsField('AUTO', QVariant.Int))
writer = output.getVectorWriter(fields, vprovider.geometryType(),
vlayer.crs())
inFeat = QgsFeature()
outFeat = QgsFeature()
inGeom = QgsGeometry()
nElement = 0
features = vector.features(vlayer)
nFeat = len(features)
for inFeat in features:
progress.setPercentage(int(100 * nElement / nFeat))
nElement += 1
inGeom = inFeat.geometry()
outFeat.setGeometry(inGeom)
attrs = inFeat.attributes()
attrs.append(nElement)
outFeat.setAttributes(attrs)
writer.addFeature(outFeat)
del writer
def defineCharacteristics(self):
| self.name = 'Add autoincremental field'
self.group = 'Vector table tools'
self.addParameter(ParameterVector(self.INPUT,
self.tr('Input layer'), [ParameterVector.VECTOR_TYPE_ANY]))
self.addOutput(OutputVector(self.OUTPUT, self.tr('Incremented'))) | identifier_body |
|
AutoincrementalField.py | # -*- coding: utf-8 -*-
"""
***************************************************************************
AutoincrementalField.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from PyQt4.QtCore import QVariant
from qgis.core import QgsField, QgsFeature, QgsGeometry
from processing.core.GeoAlgorithm import GeoAlgorithm
from processing.core.parameters import ParameterVector
from processing.core.outputs import OutputVector
from processing.tools import dataobjects, vector
class AutoincrementalField(GeoAlgorithm):
INPUT = 'INPUT'
OUTPUT = 'OUTPUT'
def processAlgorithm(self, progress):
output = self.getOutputFromName(self.OUTPUT)
vlayer = \
dataobjects.getObjectFromUri(self.getParameterValue(self.INPUT))
vprovider = vlayer.dataProvider()
fields = vprovider.fields()
fields.append(QgsField('AUTO', QVariant.Int))
writer = output.getVectorWriter(fields, vprovider.geometryType(),
vlayer.crs())
inFeat = QgsFeature()
outFeat = QgsFeature()
inGeom = QgsGeometry()
nElement = 0
features = vector.features(vlayer)
nFeat = len(features)
for inFeat in features:
|
del writer
def defineCharacteristics(self):
self.name = 'Add autoincremental field'
self.group = 'Vector table tools'
self.addParameter(ParameterVector(self.INPUT,
self.tr('Input layer'), [ParameterVector.VECTOR_TYPE_ANY]))
self.addOutput(OutputVector(self.OUTPUT, self.tr('Incremented')))
| progress.setPercentage(int(100 * nElement / nFeat))
nElement += 1
inGeom = inFeat.geometry()
outFeat.setGeometry(inGeom)
attrs = inFeat.attributes()
attrs.append(nElement)
outFeat.setAttributes(attrs)
writer.addFeature(outFeat) | conditional_block |
AutoincrementalField.py | # -*- coding: utf-8 -*-
"""
***************************************************************************
AutoincrementalField.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya' | __date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from PyQt4.QtCore import QVariant
from qgis.core import QgsField, QgsFeature, QgsGeometry
from processing.core.GeoAlgorithm import GeoAlgorithm
from processing.core.parameters import ParameterVector
from processing.core.outputs import OutputVector
from processing.tools import dataobjects, vector
class AutoincrementalField(GeoAlgorithm):
INPUT = 'INPUT'
OUTPUT = 'OUTPUT'
def processAlgorithm(self, progress):
output = self.getOutputFromName(self.OUTPUT)
vlayer = \
dataobjects.getObjectFromUri(self.getParameterValue(self.INPUT))
vprovider = vlayer.dataProvider()
fields = vprovider.fields()
fields.append(QgsField('AUTO', QVariant.Int))
writer = output.getVectorWriter(fields, vprovider.geometryType(),
vlayer.crs())
inFeat = QgsFeature()
outFeat = QgsFeature()
inGeom = QgsGeometry()
nElement = 0
features = vector.features(vlayer)
nFeat = len(features)
for inFeat in features:
progress.setPercentage(int(100 * nElement / nFeat))
nElement += 1
inGeom = inFeat.geometry()
outFeat.setGeometry(inGeom)
attrs = inFeat.attributes()
attrs.append(nElement)
outFeat.setAttributes(attrs)
writer.addFeature(outFeat)
del writer
def defineCharacteristics(self):
self.name = 'Add autoincremental field'
self.group = 'Vector table tools'
self.addParameter(ParameterVector(self.INPUT,
self.tr('Input layer'), [ParameterVector.VECTOR_TYPE_ANY]))
self.addOutput(OutputVector(self.OUTPUT, self.tr('Incremented'))) | random_line_split |
|
index.d.ts | /* Pick files From the cloud direct to your site */
pick(options: FilepickerOptions, onSuccess: (result: FilepickerInkBlob) => void, onError?: (fpError: any) => void);
/* Pick files From the cloud direct to your site */
pick(onSuccess: (result: FilepickerInkBlob) => void, onError?: (fpError: any) => void);
/* To select multiple files at once, use the pickMultiple call. */
pickMultiple(options: FilepickerMultipleFilePickOptions, onSuccess: (result: FilepickerInkBlob[]) => void, onError?: (fpError: any) => void);
/* To select multiple files at once, use the pickMultiple call. */
pickMultiple(onSuccess: (result: FilepickerInkBlob[]) => void, onError?: (fpError: any) => void);
/* To take care of everything at once, you can send uploaded files directly to S3, Rackspace, Azure and Dropbox. Note that the URLs that are returned will point to copies that are stored, not the versions that exist in Google Drive, Dropbox, etc. */
pickAndStore(options: FilepickerMultipleFilePickOptions, storageOptions: FilepickerStoreOptions, onSuccess: (result: FilepickerInkBlob) => void, onError: (fpError: any) => void);
}
interface FilepickerOptions {
/*
* Specify the type of file that the user is allowed to pick. For example, if you wanted images, specify image/* and users will only be able to select images to upload. Similarly, you could specify application/msword for only Word Documents.
*
* You can also specify an array of mimetypes to allow the user to select a file from any of the given types.
*/
mimetype?: string;
mimetypes?: string[];
/*
* Specify the type of file that the user is allowed to pick by extension. Don't use this option with mimetype(s) specified as well
*
* You can also specify an array of extensions to allow the user to select a file from any of the given types.
*/
extension?: string;
extensions?: string[];
/*
* Where to load the Ink file picker UI into. Possible values are "window", "modal", or the id of an iframe in the current document. Defaults to "modal". Note that if the browser disables 3rd party cookies, the dialog will automatically fall back to being served in a new window.
*/
container?: string;
/*
* Specify which services are displayed on the left panel, and in which order, by name.
*
* Be sure that the services you select are compatible with the mimetype(s) or extension(s) specified.
* Currently, the Ink file picker supports the following services, and we're adding more all the time: BOX
*
* COMPUTER
* DROPBOX
* EVERNOTE
* FACEBOOK
* FLICKR
* FTP
* GITHUB
* GOOGLE_DRIVE
* SKYDRIVE
* PICASA
* WEBDAV
*
* Pick only:
* GMAIL
* IMAGE_SEARCH
* INSTAGRAM
* URL
* VIDEO
* WEBCAM
*
* Export only:
* SEND_EMAIL
*/
service?: string;
services?: string[];
/*
* Specifies which service to show upon opening. If not set, the user is shown their most recently used location, or otherwise the computer upload page.
*/
openTo?: string;
/*
* Limit file uploads to be at max maxSize bytes.
*/
maxSize?: number;
/*
* Useful when developing, makes it so the onSuccess callback is fired immediately with dummy data.
*/
debug?: boolean;
/*
* If you have security enabled, you'll need to have a valid Ink file picker policy and signature in order to perform the requested call. This allows you to select who can and cannot perform certain actions on your site.
*/
policy?: string;
/*
* If you have security enabled, you'll need to have a valid Ink file picker policy and signature in order to perform the requested call. This allows you to select who can and cannot perform certain actions on your site.
*/
signature?: string;
/*
* The function to call if a file is picked successfully.
*
* We'll return an InkBlob as a JSON object with the following properties.
*
* url: The core Ink file url on which all other operations are based.
* filename: The filename of the uploaded file.
* mimetype: The mimetype of the uploaded file.
* size: The size of the uploaded file in bytes, if available.
* isWriteable: Whether the file can be written to using filepicker.write.
* Note: the "key" parameter is deprecated and will be removed soon. If you want to store files immediately after picking, use the filepicker.pickAndStore call.
*/
onSuccess?: (result: FilepickerInkBlob) => void;
onError?: (result: any) => void;
}
| }
interface FilepickerStoreOptions {
/*
* Where to store the file. The default is S3. Other options are 'azure', 'dropbox' and 'rackspace'. You must have configured your storage in the developer portal to enable this feature.
*
* Rackspace, Azure and Dropbox are only available on the Grow and higher plans.
*/
location?: string;
/*
* The path to store the file at within the specified file store. For S3, this is the key where the file will be stored at. By default, Ink stores the file at the root at a unique id, followed by an underscore, followed by the filename, for example "3AB239102DB_myphoto.png".
*
* If the provided path ends in a '/', it will be treated as a folder, so if the provided path is "myfiles/" and the uploaded file is named "myphoto.png", the file will be stored at "myfiles/909DFAC9CB12_myphoto.png", for example.
*
* If the multiple option is set to be true, only paths that end in '/' are allowed.
*/
path?: string;
/*
* The bucket or container in the specified file store where the file should end up. This is especially useful if you have different containers for testing and production and you want to use them both on the same filepicker app. If this parameter is omitted, the file is stored in the default container specified in your developer portal.
*
* Note that this parameter does not apply to the Dropbox file store.
*/
container?: string;
/*
* Indicates that the file should be stored in a way that allows public access going directly to the underlying file store. For instance, if the file is stored on S3, this will allow the S3 url to be used directly. This has no impact on the ability of users to read from the Ink file URL. Defaults to 'private'.
*/
access?: string;
}
interface FilepickerInkBlob {
/* The most critical part of the file, the url points to where the file is stored and acts as a sort of "file path". The url is what is used when making the underlying GET and POST calls to Ink when you do a filepicker.read or filepicker.write call. */
url: string;
/* The name of the file, if available */
filename: string;
/* The mimetype of the file, if available. */
mimetype: string;
/* The size of the file in bytes, if available. We will attach this directly to the InkBlob when we have it, otherwise you can always get the size by calling filepicker.stat */
size: number;
/* If the file was stored in one of the file stores you specified or configured (S3, Rackspace, Azure, etc.), this parameter will tell you where in the file store this file was put. */
key: string;
/* If the file was stored in one of the file stores you specified or configured (S3, Rackspace, Azure, etc.), this parameter will tell you in which container this file was put. */
container: string;
/* This flag specifies whether the underlying file is writeable. In most cases this will be true, but if a user uploads a photo from facebook, for instance, the original file cannot be written to. In these cases, you should use the filepicker.exportFile call as a way to give the user the ability to save their content. */
isWriteable: boolean;
/* The path of the InkBlob indicates its position in the hierarchy of files uploaded when {folders:true} is set. In situations where the file was not uploaded as part of or along with a folder, path will not be defined | interface FilepickerMultipleFilePickOptions extends FilepickerOptions {
/* Specify the maximum number of files that the user can upload at a time. If the user tries to upload more than this, they will be presented with an error message. By default, there is no cap on the number of files. */
maxFiles?: number;
/* Indicate that users should be able to upload entires folders worth of files at a time.Due to browser support, this is currently only available on recent versions of Chrome and Safari. By default, this is of(false). Folder upload is a premium feature available only on the "Grow" and higher plans. */
folders?: boolean; | random_line_split |
Utils.ts | /**
* Copyright (c) Tiny Technologies, Inc. All rights reserved.
* Licensed under the LGPL or a commercial license.
* For LGPL see License.txt in the project root for license information.
* For commercial licenses see https://www.tiny.cloud/
*/
import { Unicode } from '@ephox/katamari';
import DomParser from 'tinymce/core/api/html/DomParser';
import Schema from 'tinymce/core/api/html/Schema';
import Tools from 'tinymce/core/api/util/Tools';
/**
* This class contails various utility functions for the paste plugin.
*
* @class tinymce.pasteplugin.Utils
*/
const filter = (content, items) => {
Tools.each(items, (v) => {
if (v.constructor === RegExp) | else {
content = content.replace(v[0], v[1]);
}
});
return content;
};
/**
* Gets the innerText of the specified element. It will handle edge cases
* and works better than textContent on Gecko.
*
* @param {String} html HTML string to get text from.
* @return {String} String of text with line feeds.
*/
const innerText = (html: string) => {
const schema = Schema();
const domParser = DomParser({}, schema);
let text = '';
const shortEndedElements = schema.getShortEndedElements();
const ignoreElements = Tools.makeMap('script noscript style textarea video audio iframe object', ' ');
const blockElements = schema.getBlockElements();
const walk = (node) => {
const name = node.name, currentNode = node;
if (name === 'br') {
text += '\n';
return;
}
// Ignore wbr, to replicate innerText on Chrome/Firefox
if (name === 'wbr') {
return;
}
// img/input/hr but ignore wbr as it's just a potential word break
if (shortEndedElements[name]) {
text += ' ';
}
// Ignore script, video contents
if (ignoreElements[name]) {
text += ' ';
return;
}
if (node.type === 3) {
text += node.value;
}
// Walk all children
if (!node.shortEnded) {
if ((node = node.firstChild)) {
do {
walk(node);
} while ((node = node.next));
}
}
// Add \n or \n\n for blocks or P
if (blockElements[name] && currentNode.next) {
text += '\n';
if (name === 'p') {
text += '\n';
}
}
};
html = filter(html, [
/<!\[[^\]]+\]>/g // Conditional comments
]);
walk(domParser.parse(html));
return text;
};
/**
* Trims the specified HTML by removing all WebKit fragments, all elements wrapping the body trailing BR elements etc.
*
* @param {String} html Html string to trim contents on.
* @return {String} Html contents that got trimmed.
*/
const trimHtml = (html: string) => {
const trimSpaces = (all, s1, s2) => {
// WebKit meant to preserve multiple spaces but instead inserted around all inline tags,
// including the spans with inline styles created on paste
if (!s1 && !s2) {
return ' ';
}
return Unicode.nbsp;
};
html = filter(html, [
/^[\s\S]*<body[^>]*>\s*|\s*<\/body[^>]*>[\s\S]*$/ig, // Remove anything but the contents within the BODY element
/<!--StartFragment-->|<!--EndFragment-->/g, // Inner fragments (tables from excel on mac)
[ /( ?)<span class="Apple-converted-space">\u00a0<\/span>( ?)/g, trimSpaces ],
/<br class="Apple-interchange-newline">/g,
/<br>$/i // Trailing BR elements
]);
return html;
};
// TODO: Should be in some global class
const createIdGenerator = (prefix: string) => {
let count = 0;
return () => {
return prefix + (count++);
};
};
const getImageMimeType = (ext: string): string => {
const lowerExt = ext.toLowerCase();
const mimeOverrides = {
jpg: 'jpeg',
jpe: 'jpeg',
jfi: 'jpeg',
jif: 'jpeg',
jfif: 'jpeg',
pjpeg: 'jpeg',
pjp: 'jpeg',
svg: 'svg+xml'
};
return Tools.hasOwn(mimeOverrides, lowerExt) ? 'image/' + mimeOverrides[lowerExt] : 'image/' + lowerExt;
};
export {
filter,
innerText,
trimHtml,
createIdGenerator,
getImageMimeType
};
| {
content = content.replace(v, '');
} | conditional_block |
Utils.ts | /**
* Copyright (c) Tiny Technologies, Inc. All rights reserved.
* Licensed under the LGPL or a commercial license.
* For LGPL see License.txt in the project root for license information.
* For commercial licenses see https://www.tiny.cloud/
*/
import { Unicode } from '@ephox/katamari';
import DomParser from 'tinymce/core/api/html/DomParser';
import Schema from 'tinymce/core/api/html/Schema';
import Tools from 'tinymce/core/api/util/Tools';
/**
* This class contails various utility functions for the paste plugin.
*
* @class tinymce.pasteplugin.Utils
*/
const filter = (content, items) => {
Tools.each(items, (v) => {
if (v.constructor === RegExp) {
content = content.replace(v, '');
} else {
content = content.replace(v[0], v[1]);
}
});
return content;
};
/**
* Gets the innerText of the specified element. It will handle edge cases
* and works better than textContent on Gecko.
*
* @param {String} html HTML string to get text from.
* @return {String} String of text with line feeds.
*/
const innerText = (html: string) => {
const schema = Schema();
const domParser = DomParser({}, schema);
let text = '';
const shortEndedElements = schema.getShortEndedElements();
const ignoreElements = Tools.makeMap('script noscript style textarea video audio iframe object', ' ');
const blockElements = schema.getBlockElements();
const walk = (node) => {
const name = node.name, currentNode = node; |
if (name === 'br') {
text += '\n';
return;
}
// Ignore wbr, to replicate innerText on Chrome/Firefox
if (name === 'wbr') {
return;
}
// img/input/hr but ignore wbr as it's just a potential word break
if (shortEndedElements[name]) {
text += ' ';
}
// Ignore script, video contents
if (ignoreElements[name]) {
text += ' ';
return;
}
if (node.type === 3) {
text += node.value;
}
// Walk all children
if (!node.shortEnded) {
if ((node = node.firstChild)) {
do {
walk(node);
} while ((node = node.next));
}
}
// Add \n or \n\n for blocks or P
if (blockElements[name] && currentNode.next) {
text += '\n';
if (name === 'p') {
text += '\n';
}
}
};
html = filter(html, [
/<!\[[^\]]+\]>/g // Conditional comments
]);
walk(domParser.parse(html));
return text;
};
/**
* Trims the specified HTML by removing all WebKit fragments, all elements wrapping the body trailing BR elements etc.
*
* @param {String} html Html string to trim contents on.
* @return {String} Html contents that got trimmed.
*/
const trimHtml = (html: string) => {
const trimSpaces = (all, s1, s2) => {
// WebKit meant to preserve multiple spaces but instead inserted around all inline tags,
// including the spans with inline styles created on paste
if (!s1 && !s2) {
return ' ';
}
return Unicode.nbsp;
};
html = filter(html, [
/^[\s\S]*<body[^>]*>\s*|\s*<\/body[^>]*>[\s\S]*$/ig, // Remove anything but the contents within the BODY element
/<!--StartFragment-->|<!--EndFragment-->/g, // Inner fragments (tables from excel on mac)
[ /( ?)<span class="Apple-converted-space">\u00a0<\/span>( ?)/g, trimSpaces ],
/<br class="Apple-interchange-newline">/g,
/<br>$/i // Trailing BR elements
]);
return html;
};
// TODO: Should be in some global class
const createIdGenerator = (prefix: string) => {
let count = 0;
return () => {
return prefix + (count++);
};
};
const getImageMimeType = (ext: string): string => {
const lowerExt = ext.toLowerCase();
const mimeOverrides = {
jpg: 'jpeg',
jpe: 'jpeg',
jfi: 'jpeg',
jif: 'jpeg',
jfif: 'jpeg',
pjpeg: 'jpeg',
pjp: 'jpeg',
svg: 'svg+xml'
};
return Tools.hasOwn(mimeOverrides, lowerExt) ? 'image/' + mimeOverrides[lowerExt] : 'image/' + lowerExt;
};
export {
filter,
innerText,
trimHtml,
createIdGenerator,
getImageMimeType
}; | random_line_split |
|
user-config.ts | import { BuildType, Environment, ModuleType } from '.'
/**
* Types for config file
*/
export interface UserConfig {
/** javascript environment */
environments: Environment[]
/** directory to output to */
outdir: string | null
/** Definitions generation type */
buildType?: BuildType
/** Module type, can be CommonJS or ESM */
moduleType?: ModuleType
/** GIR directories */
girDirectories: string[]
/** Switch on/off the verbose mode */
verbose: boolean
/** Do not ask for package versions if multiple versions are found */
ignoreConflicts: boolean
/** print the output to console and create no files */
print: boolean
/** prettifies the generated .d.ts files */
pretty: boolean
/** GIR modules to load, e.g. 'Gio-2.0'. Accepts multiple modules */
modules: string[]
/** modules that should be ignored */
ignore?: string[]
/** Export all symbols for each module as a namespace */
useNamespace: boolean | /** Do not generate documentation comments */
noComments: boolean
} | random_line_split |
|
compare_crud_spec.ts | /*
* Copyright 2019 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {ApiResult, SuccessResponse} from "helpers/api_request_builder";
import {SparkRoutes} from "helpers/spark_routes";
import {Comparison} from "../compare";
import {ComparisonCRUD} from "../compare_crud";
import {ComparisonData} from "./test_data";
describe('CompareCrudSpec', () => {
beforeEach(() => jasmine.Ajax.install());
afterEach(() => jasmine.Ajax.uninstall());
it('should get the difference between the two counters for the given pipeline', (done) => {
const apiPath = SparkRoutes.comparePipelines("pipeline1", 1, 3);
jasmine.Ajax.stubRequest(apiPath).andReturn(comparisonResponse());
const onResponse = jasmine.createSpy().and.callFake((response: ApiResult<any>) => {
const responseJSON = response.unwrap() as SuccessResponse<any>;
const object = (responseJSON.body as Comparison);
expect(object.pipelineName).toEqual("pipeline1");
expect(object.fromCounter).toEqual(1);
expect(object.toCounter).toEqual(3);
done();
});
ComparisonCRUD.getDifference("pipeline1", 1, 3).then(onResponse);
const request = jasmine.Ajax.requests.mostRecent();
expect(request.url).toEqual(apiPath);
expect(request.method).toEqual("GET");
expect(request.requestHeaders.Accept).toEqual("application/vnd.go.cd.v1+json");
});
});
function | () {
return {
status: 200,
responseHeaders: {
"Content-Type": "application/vnd.go.cd.v1+json; charset=utf-8",
"ETag": "some-etag"
},
responseText: JSON.stringify(ComparisonData.compare())
};
}
| comparisonResponse | identifier_name |
compare_crud_spec.ts | /*
* Copyright 2019 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {ApiResult, SuccessResponse} from "helpers/api_request_builder";
import {SparkRoutes} from "helpers/spark_routes";
import {Comparison} from "../compare";
import {ComparisonCRUD} from "../compare_crud";
import {ComparisonData} from "./test_data";
describe('CompareCrudSpec', () => {
beforeEach(() => jasmine.Ajax.install());
afterEach(() => jasmine.Ajax.uninstall()); | const onResponse = jasmine.createSpy().and.callFake((response: ApiResult<any>) => {
const responseJSON = response.unwrap() as SuccessResponse<any>;
const object = (responseJSON.body as Comparison);
expect(object.pipelineName).toEqual("pipeline1");
expect(object.fromCounter).toEqual(1);
expect(object.toCounter).toEqual(3);
done();
});
ComparisonCRUD.getDifference("pipeline1", 1, 3).then(onResponse);
const request = jasmine.Ajax.requests.mostRecent();
expect(request.url).toEqual(apiPath);
expect(request.method).toEqual("GET");
expect(request.requestHeaders.Accept).toEqual("application/vnd.go.cd.v1+json");
});
});
function comparisonResponse() {
return {
status: 200,
responseHeaders: {
"Content-Type": "application/vnd.go.cd.v1+json; charset=utf-8",
"ETag": "some-etag"
},
responseText: JSON.stringify(ComparisonData.compare())
};
} |
it('should get the difference between the two counters for the given pipeline', (done) => {
const apiPath = SparkRoutes.comparePipelines("pipeline1", 1, 3);
jasmine.Ajax.stubRequest(apiPath).andReturn(comparisonResponse());
| random_line_split |
compare_crud_spec.ts | /*
* Copyright 2019 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {ApiResult, SuccessResponse} from "helpers/api_request_builder";
import {SparkRoutes} from "helpers/spark_routes";
import {Comparison} from "../compare";
import {ComparisonCRUD} from "../compare_crud";
import {ComparisonData} from "./test_data";
describe('CompareCrudSpec', () => {
beforeEach(() => jasmine.Ajax.install());
afterEach(() => jasmine.Ajax.uninstall());
it('should get the difference between the two counters for the given pipeline', (done) => {
const apiPath = SparkRoutes.comparePipelines("pipeline1", 1, 3);
jasmine.Ajax.stubRequest(apiPath).andReturn(comparisonResponse());
const onResponse = jasmine.createSpy().and.callFake((response: ApiResult<any>) => {
const responseJSON = response.unwrap() as SuccessResponse<any>;
const object = (responseJSON.body as Comparison);
expect(object.pipelineName).toEqual("pipeline1");
expect(object.fromCounter).toEqual(1);
expect(object.toCounter).toEqual(3);
done();
});
ComparisonCRUD.getDifference("pipeline1", 1, 3).then(onResponse);
const request = jasmine.Ajax.requests.mostRecent();
expect(request.url).toEqual(apiPath);
expect(request.method).toEqual("GET");
expect(request.requestHeaders.Accept).toEqual("application/vnd.go.cd.v1+json");
});
});
function comparisonResponse() | {
return {
status: 200,
responseHeaders: {
"Content-Type": "application/vnd.go.cd.v1+json; charset=utf-8",
"ETag": "some-etag"
},
responseText: JSON.stringify(ComparisonData.compare())
};
} | identifier_body |
|
createSampleVideo.py | # -*- coding: utf-8 -*-
"""
Created on Wed May 18 18:22:12 2016
@author: ajaver
"""
import os
import cv2
import tables
import numpy as np
from tierpsy.helper.params import read_fps
from tierpsy.helper.misc import TimeCounter, print_flush
def getSubSampleVidName(masked_image_file):
#used by AnalysisPoints.py and CheckFinished.py
return masked_image_file.replace('.hdf5', '_subsample.avi')
def _getCorrectedTimeVec(fid, tot_frames):
'''time vector used to account for missing frames'''
try:
timestamp_ind = fid.get_node('/timestamp/raw')[:]
#remove any nan, I notice that sometimes the last number is a nan
timestamp_ind = timestamp_ind[~np.isnan(timestamp_ind)]
tot_timestamps = int(timestamp_ind[-1])
if timestamp_ind.size < tot_frames-1 or tot_timestamps < tot_frames-1: #invalid timestamp
#if there is not valid frames skip
raise ValueError
except (tables.exceptions.NoSuchNodeError, ValueError, IndexError):
return np.arange(tot_frames)
#make sure to compensate for missing frames, so the video will have similar length.
tt_vec = np.full(tot_timestamps+1, np.nan)
current_frame = 0
for ii in range(tot_timestamps+1):
tt_vec[ii] = current_frame
current_timestamp = timestamp_ind[current_frame]
if current_timestamp <= ii:
current_frame += 1
return tt_vec
def | (masked_image_file,
sample_video_name = '',
time_factor = 8,
size_factor = 5,
skip_factor = 2,
dflt_fps=30,
codec='MPEG',
shift_bgnd = False):
#skip factor is to reduce the size of the movie by using less frames (so we use 15fps for example instead of 30fps)
#%%
if not sample_video_name:
sample_video_name = getSubSampleVidName(masked_image_file)
# initialize timers
base_name = masked_image_file.rpartition('.')[0].rpartition(os.sep)[-1]
progressTime = TimeCounter('{} Generating subsampled video.'.format(base_name))
with tables.File(masked_image_file, 'r') as fid:
masks = fid.get_node('/mask')
tot_frames, im_h, im_w = masks.shape
im_h, im_w = im_h//size_factor, im_w//size_factor
fps = read_fps(masked_image_file, dflt_fps)
tt_vec = _getCorrectedTimeVec(fid, tot_frames)
#%%
#codec values that work 'H264' #'MPEG' #XVID
vid_writer = cv2.VideoWriter(sample_video_name, \
cv2.VideoWriter_fourcc(*codec), fps/skip_factor, (im_w,im_h), isColor=False)
assert vid_writer.isOpened()
if shift_bgnd:
#lazy bgnd calculation, just take the last and first frame and get the top 95 pixel value
mm = masks[[0,-1], :, :]
_bgnd_val = np.percentile(mm[mm!=0], [97.5])[0]
for frame_number in range(0, tot_frames, int(time_factor*skip_factor)):
current_frame = int(tt_vec[frame_number])
img = masks[current_frame]
if shift_bgnd:
img[img==0] = _bgnd_val
im_new = cv2.resize(img, (im_w,im_h))
vid_writer.write(im_new)
if frame_number % (500*time_factor) == 0:
# calculate the progress and put it in a string
print_flush(progressTime.get_str(frame_number))
vid_writer.release()
print_flush(progressTime.get_str(frame_number) + ' DONE.')
#%%
if __name__ == '__main__':
#mask_file_name = '/Volumes/behavgenom_archive$/Avelino/Worm_Rig_Tests/Agar_Test/MaskedVideos/Agar_Screening_101116/N2_N10_F1-3_Set1_Pos3_Ch6_12112016_002739.hdf5'
#masked_image_file = '/Volumes/behavgenom_archive$/Avelino/Worm_Rig_Tests/Agar_Test/MaskedVideos/Agar_Screening_101116/unc-9_N3_F1-3_Set1_Pos3_Ch4_12112016_002739.hdf5'
masked_image_file = r'C:\Users\wormrig\Documents\GitHub\Multiworm_Tracking\Tests\data\test_1\MaskedVideos\Capture_Ch1_18062015_140908.hdf5'
createSampleVideo(masked_image_file)
| createSampleVideo | identifier_name |
createSampleVideo.py | # -*- coding: utf-8 -*-
"""
Created on Wed May 18 18:22:12 2016
@author: ajaver
"""
import os
import cv2
import tables
import numpy as np
from tierpsy.helper.params import read_fps
from tierpsy.helper.misc import TimeCounter, print_flush
def getSubSampleVidName(masked_image_file):
#used by AnalysisPoints.py and CheckFinished.py
return masked_image_file.replace('.hdf5', '_subsample.avi')
def _getCorrectedTimeVec(fid, tot_frames):
'''time vector used to account for missing frames'''
try:
timestamp_ind = fid.get_node('/timestamp/raw')[:]
#remove any nan, I notice that sometimes the last number is a nan
timestamp_ind = timestamp_ind[~np.isnan(timestamp_ind)]
tot_timestamps = int(timestamp_ind[-1])
if timestamp_ind.size < tot_frames-1 or tot_timestamps < tot_frames-1: #invalid timestamp
#if there is not valid frames skip
raise ValueError
except (tables.exceptions.NoSuchNodeError, ValueError, IndexError):
return np.arange(tot_frames)
#make sure to compensate for missing frames, so the video will have similar length.
tt_vec = np.full(tot_timestamps+1, np.nan)
current_frame = 0
for ii in range(tot_timestamps+1):
tt_vec[ii] = current_frame
current_timestamp = timestamp_ind[current_frame]
if current_timestamp <= ii:
current_frame += 1
return tt_vec
def createSampleVideo(masked_image_file,
sample_video_name = '',
time_factor = 8,
size_factor = 5,
skip_factor = 2,
dflt_fps=30,
codec='MPEG',
shift_bgnd = False):
#skip factor is to reduce the size of the movie by using less frames (so we use 15fps for example instead of 30fps)
#%%
if not sample_video_name:
sample_video_name = getSubSampleVidName(masked_image_file)
# initialize timers
base_name = masked_image_file.rpartition('.')[0].rpartition(os.sep)[-1]
progressTime = TimeCounter('{} Generating subsampled video.'.format(base_name))
with tables.File(masked_image_file, 'r') as fid:
masks = fid.get_node('/mask')
tot_frames, im_h, im_w = masks.shape
im_h, im_w = im_h//size_factor, im_w//size_factor
fps = read_fps(masked_image_file, dflt_fps)
tt_vec = _getCorrectedTimeVec(fid, tot_frames)
#%%
#codec values that work 'H264' #'MPEG' #XVID
vid_writer = cv2.VideoWriter(sample_video_name, \
cv2.VideoWriter_fourcc(*codec), fps/skip_factor, (im_w,im_h), isColor=False) |
if shift_bgnd:
#lazy bgnd calculation, just take the last and first frame and get the top 95 pixel value
mm = masks[[0,-1], :, :]
_bgnd_val = np.percentile(mm[mm!=0], [97.5])[0]
for frame_number in range(0, tot_frames, int(time_factor*skip_factor)):
current_frame = int(tt_vec[frame_number])
img = masks[current_frame]
if shift_bgnd:
img[img==0] = _bgnd_val
im_new = cv2.resize(img, (im_w,im_h))
vid_writer.write(im_new)
if frame_number % (500*time_factor) == 0:
# calculate the progress and put it in a string
print_flush(progressTime.get_str(frame_number))
vid_writer.release()
print_flush(progressTime.get_str(frame_number) + ' DONE.')
#%%
if __name__ == '__main__':
#mask_file_name = '/Volumes/behavgenom_archive$/Avelino/Worm_Rig_Tests/Agar_Test/MaskedVideos/Agar_Screening_101116/N2_N10_F1-3_Set1_Pos3_Ch6_12112016_002739.hdf5'
#masked_image_file = '/Volumes/behavgenom_archive$/Avelino/Worm_Rig_Tests/Agar_Test/MaskedVideos/Agar_Screening_101116/unc-9_N3_F1-3_Set1_Pos3_Ch4_12112016_002739.hdf5'
masked_image_file = r'C:\Users\wormrig\Documents\GitHub\Multiworm_Tracking\Tests\data\test_1\MaskedVideos\Capture_Ch1_18062015_140908.hdf5'
createSampleVideo(masked_image_file) | assert vid_writer.isOpened()
| random_line_split |
createSampleVideo.py | # -*- coding: utf-8 -*-
"""
Created on Wed May 18 18:22:12 2016
@author: ajaver
"""
import os
import cv2
import tables
import numpy as np
from tierpsy.helper.params import read_fps
from tierpsy.helper.misc import TimeCounter, print_flush
def getSubSampleVidName(masked_image_file):
#used by AnalysisPoints.py and CheckFinished.py
return masked_image_file.replace('.hdf5', '_subsample.avi')
def _getCorrectedTimeVec(fid, tot_frames):
'''time vector used to account for missing frames'''
try:
timestamp_ind = fid.get_node('/timestamp/raw')[:]
#remove any nan, I notice that sometimes the last number is a nan
timestamp_ind = timestamp_ind[~np.isnan(timestamp_ind)]
tot_timestamps = int(timestamp_ind[-1])
if timestamp_ind.size < tot_frames-1 or tot_timestamps < tot_frames-1: #invalid timestamp
#if there is not valid frames skip
raise ValueError
except (tables.exceptions.NoSuchNodeError, ValueError, IndexError):
return np.arange(tot_frames)
#make sure to compensate for missing frames, so the video will have similar length.
tt_vec = np.full(tot_timestamps+1, np.nan)
current_frame = 0
for ii in range(tot_timestamps+1):
tt_vec[ii] = current_frame
current_timestamp = timestamp_ind[current_frame]
if current_timestamp <= ii:
current_frame += 1
return tt_vec
def createSampleVideo(masked_image_file,
sample_video_name = '',
time_factor = 8,
size_factor = 5,
skip_factor = 2,
dflt_fps=30,
codec='MPEG',
shift_bgnd = False):
#skip factor is to reduce the size of the movie by using less frames (so we use 15fps for example instead of 30fps)
#%%
|
if shift_bgnd:
#lazy bgnd calculation, just take the last and first frame and get the top 95 pixel value
mm = masks[[0,-1], :, :]
_bgnd_val = np.percentile(mm[mm!=0], [97.5])[0]
for frame_number in range(0, tot_frames, int(time_factor*skip_factor)):
current_frame = int(tt_vec[frame_number])
img = masks[current_frame]
if shift_bgnd:
img[img==0] = _bgnd_val
im_new = cv2.resize(img, (im_w,im_h))
vid_writer.write(im_new)
if frame_number % (500*time_factor) == 0:
# calculate the progress and put it in a string
print_flush(progressTime.get_str(frame_number))
vid_writer.release()
print_flush(progressTime.get_str(frame_number) + ' DONE.')
#%%
if __name__ == '__main__':
#mask_file_name = '/Volumes/behavgenom_archive$/Avelino/Worm_Rig_Tests/Agar_Test/MaskedVideos/Agar_Screening_101116/N2_N10_F1-3_Set1_Pos3_Ch6_12112016_002739.hdf5'
#masked_image_file = '/Volumes/behavgenom_archive$/Avelino/Worm_Rig_Tests/Agar_Test/MaskedVideos/Agar_Screening_101116/unc-9_N3_F1-3_Set1_Pos3_Ch4_12112016_002739.hdf5'
masked_image_file = r'C:\Users\wormrig\Documents\GitHub\Multiworm_Tracking\Tests\data\test_1\MaskedVideos\Capture_Ch1_18062015_140908.hdf5'
createSampleVideo(masked_image_file)
| if not sample_video_name:
sample_video_name = getSubSampleVidName(masked_image_file)
# initialize timers
base_name = masked_image_file.rpartition('.')[0].rpartition(os.sep)[-1]
progressTime = TimeCounter('{} Generating subsampled video.'.format(base_name))
with tables.File(masked_image_file, 'r') as fid:
masks = fid.get_node('/mask')
tot_frames, im_h, im_w = masks.shape
im_h, im_w = im_h//size_factor, im_w//size_factor
fps = read_fps(masked_image_file, dflt_fps)
tt_vec = _getCorrectedTimeVec(fid, tot_frames)
#%%
#codec values that work 'H264' #'MPEG' #XVID
vid_writer = cv2.VideoWriter(sample_video_name, \
cv2.VideoWriter_fourcc(*codec), fps/skip_factor, (im_w,im_h), isColor=False)
assert vid_writer.isOpened() | identifier_body |
createSampleVideo.py | # -*- coding: utf-8 -*-
"""
Created on Wed May 18 18:22:12 2016
@author: ajaver
"""
import os
import cv2
import tables
import numpy as np
from tierpsy.helper.params import read_fps
from tierpsy.helper.misc import TimeCounter, print_flush
def getSubSampleVidName(masked_image_file):
#used by AnalysisPoints.py and CheckFinished.py
return masked_image_file.replace('.hdf5', '_subsample.avi')
def _getCorrectedTimeVec(fid, tot_frames):
'''time vector used to account for missing frames'''
try:
timestamp_ind = fid.get_node('/timestamp/raw')[:]
#remove any nan, I notice that sometimes the last number is a nan
timestamp_ind = timestamp_ind[~np.isnan(timestamp_ind)]
tot_timestamps = int(timestamp_ind[-1])
if timestamp_ind.size < tot_frames-1 or tot_timestamps < tot_frames-1: #invalid timestamp
#if there is not valid frames skip
raise ValueError
except (tables.exceptions.NoSuchNodeError, ValueError, IndexError):
return np.arange(tot_frames)
#make sure to compensate for missing frames, so the video will have similar length.
tt_vec = np.full(tot_timestamps+1, np.nan)
current_frame = 0
for ii in range(tot_timestamps+1):
tt_vec[ii] = current_frame
current_timestamp = timestamp_ind[current_frame]
if current_timestamp <= ii:
current_frame += 1
return tt_vec
def createSampleVideo(masked_image_file,
sample_video_name = '',
time_factor = 8,
size_factor = 5,
skip_factor = 2,
dflt_fps=30,
codec='MPEG',
shift_bgnd = False):
#skip factor is to reduce the size of the movie by using less frames (so we use 15fps for example instead of 30fps)
#%%
if not sample_video_name:
sample_video_name = getSubSampleVidName(masked_image_file)
# initialize timers
base_name = masked_image_file.rpartition('.')[0].rpartition(os.sep)[-1]
progressTime = TimeCounter('{} Generating subsampled video.'.format(base_name))
with tables.File(masked_image_file, 'r') as fid:
masks = fid.get_node('/mask')
tot_frames, im_h, im_w = masks.shape
im_h, im_w = im_h//size_factor, im_w//size_factor
fps = read_fps(masked_image_file, dflt_fps)
tt_vec = _getCorrectedTimeVec(fid, tot_frames)
#%%
#codec values that work 'H264' #'MPEG' #XVID
vid_writer = cv2.VideoWriter(sample_video_name, \
cv2.VideoWriter_fourcc(*codec), fps/skip_factor, (im_w,im_h), isColor=False)
assert vid_writer.isOpened()
if shift_bgnd:
#lazy bgnd calculation, just take the last and first frame and get the top 95 pixel value
mm = masks[[0,-1], :, :]
_bgnd_val = np.percentile(mm[mm!=0], [97.5])[0]
for frame_number in range(0, tot_frames, int(time_factor*skip_factor)):
|
vid_writer.release()
print_flush(progressTime.get_str(frame_number) + ' DONE.')
#%%
if __name__ == '__main__':
#mask_file_name = '/Volumes/behavgenom_archive$/Avelino/Worm_Rig_Tests/Agar_Test/MaskedVideos/Agar_Screening_101116/N2_N10_F1-3_Set1_Pos3_Ch6_12112016_002739.hdf5'
#masked_image_file = '/Volumes/behavgenom_archive$/Avelino/Worm_Rig_Tests/Agar_Test/MaskedVideos/Agar_Screening_101116/unc-9_N3_F1-3_Set1_Pos3_Ch4_12112016_002739.hdf5'
masked_image_file = r'C:\Users\wormrig\Documents\GitHub\Multiworm_Tracking\Tests\data\test_1\MaskedVideos\Capture_Ch1_18062015_140908.hdf5'
createSampleVideo(masked_image_file)
| current_frame = int(tt_vec[frame_number])
img = masks[current_frame]
if shift_bgnd:
img[img==0] = _bgnd_val
im_new = cv2.resize(img, (im_w,im_h))
vid_writer.write(im_new)
if frame_number % (500*time_factor) == 0:
# calculate the progress and put it in a string
print_flush(progressTime.get_str(frame_number)) | conditional_block |
app-store.purchase-handler.ts | import {PurchaseHandler} from "./purchase-handler";
import {ProductData, productDataMap} from "./products";
import * as appleReceiptVerify from "node-apple-receipt-verify";
import {APP_STORE_SHARED_SECRET} from "./constants";
import {IapRepository} from "./iap.repository";
import {firestore} from "firebase-admin/lib/firestore";
// Add typings for missing property in library interface.
declare module "node-apple-receipt-verify" {
interface PurchasedProducts {
originalTransactionId: string;
}
}
export class AppStorePurchaseHandler extends PurchaseHandler {
constructor(private iapRepository: IapRepository) {
super();
appleReceiptVerify.config({
verbose: false,
secret: APP_STORE_SHARED_SECRET,
extended: true,
environment: ["sandbox"], // Optional, defaults to ['production'],
excludeOldTransactions: true,
});
}
async handleNonSubscription(
userId: string,
productData: ProductData,
token: string,
): Promise<boolean> {
return this.handleValidation(userId, token);
}
async handleSubscription(
userId: string,
productData: ProductData,
token: string,
): Promise<boolean> {
return this.handleValidation(userId, token);
}
private async | (
userId: string,
token: string,
): Promise<boolean> {
// Validate receipt and fetch the products
let products: appleReceiptVerify.PurchasedProducts[];
try {
products = await appleReceiptVerify.validate({receipt: token});
} catch (e) {
if (e instanceof appleReceiptVerify.EmptyError) {
// Receipt is valid but it is now empty.
console.warn(
"Received valid empty receipt");
return true;
} else if (e instanceof
appleReceiptVerify.ServiceUnavailableError) {
console.warn(
"App store is currently unavailable, could not validate");
// Handle app store services not being available
return false;
}
return false;
}
// Process the received products
for (const product of products) {
// Skip processing the product if it is unknown
const productData = productDataMap[product.productId];
if (!productData) continue;
// Process the product
switch (productData.type) {
case "SUBSCRIPTION":
await this.iapRepository.createOrUpdatePurchase({
type: productData.type,
iapSource: "app_store",
orderId: product.originalTransactionId,
productId: product.productId,
userId,
purchaseDate: firestore.Timestamp.fromMillis(product.purchaseDate),
expiryDate: firestore.Timestamp.fromMillis(
product.expirationDate ?? 0,
),
status: (product.expirationDate ?? 0) <= Date.now() ? "EXPIRED" : "ACTIVE",
});
break;
case "NON_SUBSCRIPTION":
await this.iapRepository.createOrUpdatePurchase({
type: productData.type,
iapSource: "app_store",
orderId: product.originalTransactionId,
productId: product.productId,
userId,
purchaseDate: firestore.Timestamp.fromMillis(product.purchaseDate),
status: "COMPLETED",
});
break;
}
}
return true;
}
}
| handleValidation | identifier_name |
app-store.purchase-handler.ts | import {PurchaseHandler} from "./purchase-handler";
import {ProductData, productDataMap} from "./products";
import * as appleReceiptVerify from "node-apple-receipt-verify";
import {APP_STORE_SHARED_SECRET} from "./constants";
import {IapRepository} from "./iap.repository";
import {firestore} from "firebase-admin/lib/firestore";
// Add typings for missing property in library interface.
declare module "node-apple-receipt-verify" {
interface PurchasedProducts { |
export class AppStorePurchaseHandler extends PurchaseHandler {
constructor(private iapRepository: IapRepository) {
super();
appleReceiptVerify.config({
verbose: false,
secret: APP_STORE_SHARED_SECRET,
extended: true,
environment: ["sandbox"], // Optional, defaults to ['production'],
excludeOldTransactions: true,
});
}
async handleNonSubscription(
userId: string,
productData: ProductData,
token: string,
): Promise<boolean> {
return this.handleValidation(userId, token);
}
async handleSubscription(
userId: string,
productData: ProductData,
token: string,
): Promise<boolean> {
return this.handleValidation(userId, token);
}
private async handleValidation(
userId: string,
token: string,
): Promise<boolean> {
// Validate receipt and fetch the products
let products: appleReceiptVerify.PurchasedProducts[];
try {
products = await appleReceiptVerify.validate({receipt: token});
} catch (e) {
if (e instanceof appleReceiptVerify.EmptyError) {
// Receipt is valid but it is now empty.
console.warn(
"Received valid empty receipt");
return true;
} else if (e instanceof
appleReceiptVerify.ServiceUnavailableError) {
console.warn(
"App store is currently unavailable, could not validate");
// Handle app store services not being available
return false;
}
return false;
}
// Process the received products
for (const product of products) {
// Skip processing the product if it is unknown
const productData = productDataMap[product.productId];
if (!productData) continue;
// Process the product
switch (productData.type) {
case "SUBSCRIPTION":
await this.iapRepository.createOrUpdatePurchase({
type: productData.type,
iapSource: "app_store",
orderId: product.originalTransactionId,
productId: product.productId,
userId,
purchaseDate: firestore.Timestamp.fromMillis(product.purchaseDate),
expiryDate: firestore.Timestamp.fromMillis(
product.expirationDate ?? 0,
),
status: (product.expirationDate ?? 0) <= Date.now() ? "EXPIRED" : "ACTIVE",
});
break;
case "NON_SUBSCRIPTION":
await this.iapRepository.createOrUpdatePurchase({
type: productData.type,
iapSource: "app_store",
orderId: product.originalTransactionId,
productId: product.productId,
userId,
purchaseDate: firestore.Timestamp.fromMillis(product.purchaseDate),
status: "COMPLETED",
});
break;
}
}
return true;
}
} | originalTransactionId: string;
}
} | random_line_split |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.