file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
12.1k
| suffix
large_stringlengths 0
12k
| middle
large_stringlengths 0
7.51k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
extractMenu.ts | // Extract menu still frames.
/// <reference path="../../references.ts" />
'use strict';
import path = require('path');
import serverUtils = require('../../server/utils/index');
import utils = require('../../utils');
import editMetadataFile = require('../../server/utils/editMetadataFile');
export = extractMenu;
/**
* Extract menu still frames.
*
* @param {string} dvdPath
* @param {function} callback
*/
function extractMenu(dvdPath: string, callback) {
process.stdout.write('\nExtracting menu still frames:\n');
var webPath = serverUtils.getWebPath(dvdPath);
var ifoPath = getWebName('metadata');
var filesList = require(ifoPath);
var menu = [];
var pointer = 0;
next(filesList[pointer].ifo);
// There are better ways to do async...
function next(ifoFile: string) | var pgci_srp = lu.pgcit.pgci_srp[j];
var pgcIndex = j + 1;
var vobID = null;
var cellID = null;
if (pgci_srp.pgc.cell_position && pgci_srp.pgc.cell_position.length) {
vobID = pgci_srp.pgc.cell_position[0].vob_id_nr;
cellID = pgci_srp.pgc.cell_position[0].cell_nr;
}
menu[pointer].menu[lang].push({
pgc: pgcIndex,
entry: pgci_srp.entry_id,
vobID: vobID,
cellID: cellID
});
}
}
callNext();
function callNext() {
pointer++;
if (pointer < filesList.length) {
setTimeout(function() {
next(filesList[pointer].ifo);
}, 0);
} else {
// At the end of all iterations.
// Save a metadata file containing the list of all IFO files.
editMetadataFile(getWebName('metadata'), menu, function() {
callback();
});
}
}
}
}
/**
* Return the file path for the web given a file.
* Used for naming both the IFO files and the metadata file.
*
* @param name A file name.
* @return {string}
*/
function getWebName(name: string): string {
return path.join(webPath, getJsonFileName(name));
}
}
/**
* Transform the file name of a JSON file.
*
* @param {string} name A file name.
* @return {string}
*/
function getJsonFileName(name: string): string {
return name.replace(/\.IFO$/i, '') + '.json';
}
| {
ifoFile = path.join(webPath, '../', ifoFile);
var json = require(ifoFile);
menu[pointer] = {};
menu[pointer].menu = {};
extractMenuData();
function extractMenuData() {
if (!json.pgci_ut || !json.pgci_ut.lu || !Array.isArray(json.pgci_ut.lu)) {
callNext();
return;
}
for (var i = 0; i < json.pgci_ut.nr_of_lus; i++) {
var lu = json.pgci_ut.lu[i];
var lang = utils.bit2str(lu.lang_code);
menu[pointer].menu[lang] = [];
for (var j = 0; j < lu.pgcit.nr_of_pgci_srp; j++) { | identifier_body |
extractMenu.ts | // Extract menu still frames.
/// <reference path="../../references.ts" />
'use strict';
import path = require('path');
import serverUtils = require('../../server/utils/index');
import utils = require('../../utils');
import editMetadataFile = require('../../server/utils/editMetadataFile');
export = extractMenu;
/**
* Extract menu still frames.
*
* @param {string} dvdPath
* @param {function} callback
*/
function extractMenu(dvdPath: string, callback) {
process.stdout.write('\nExtracting menu still frames:\n');
var webPath = serverUtils.getWebPath(dvdPath);
var ifoPath = getWebName('metadata');
var filesList = require(ifoPath);
var menu = [];
var pointer = 0;
next(filesList[pointer].ifo);
// There are better ways to do async...
function next(ifoFile: string) {
ifoFile = path.join(webPath, '../', ifoFile);
var json = require(ifoFile);
menu[pointer] = {};
menu[pointer].menu = {};
extractMenuData();
function extractMenuData() {
if (!json.pgci_ut || !json.pgci_ut.lu || !Array.isArray(json.pgci_ut.lu)) {
callNext();
return;
}
for (var i = 0; i < json.pgci_ut.nr_of_lus; i++) {
var lu = json.pgci_ut.lu[i];
var lang = utils.bit2str(lu.lang_code);
menu[pointer].menu[lang] = [];
for (var j = 0; j < lu.pgcit.nr_of_pgci_srp; j++) |
}
callNext();
function callNext() {
pointer++;
if (pointer < filesList.length) {
setTimeout(function() {
next(filesList[pointer].ifo);
}, 0);
} else {
// At the end of all iterations.
// Save a metadata file containing the list of all IFO files.
editMetadataFile(getWebName('metadata'), menu, function() {
callback();
});
}
}
}
}
/**
* Return the file path for the web given a file.
* Used for naming both the IFO files and the metadata file.
*
* @param name A file name.
* @return {string}
*/
function getWebName(name: string): string {
return path.join(webPath, getJsonFileName(name));
}
}
/**
* Transform the file name of a JSON file.
*
* @param {string} name A file name.
* @return {string}
*/
function getJsonFileName(name: string): string {
return name.replace(/\.IFO$/i, '') + '.json';
}
| {
var pgci_srp = lu.pgcit.pgci_srp[j];
var pgcIndex = j + 1;
var vobID = null;
var cellID = null;
if (pgci_srp.pgc.cell_position && pgci_srp.pgc.cell_position.length) {
vobID = pgci_srp.pgc.cell_position[0].vob_id_nr;
cellID = pgci_srp.pgc.cell_position[0].cell_nr;
}
menu[pointer].menu[lang].push({
pgc: pgcIndex,
entry: pgci_srp.entry_id,
vobID: vobID,
cellID: cellID
});
} | conditional_block |
extractMenu.ts | // Extract menu still frames.
/// <reference path="../../references.ts" />
'use strict';
import path = require('path'); |
export = extractMenu;
/**
* Extract menu still frames.
*
* @param {string} dvdPath
* @param {function} callback
*/
function extractMenu(dvdPath: string, callback) {
process.stdout.write('\nExtracting menu still frames:\n');
var webPath = serverUtils.getWebPath(dvdPath);
var ifoPath = getWebName('metadata');
var filesList = require(ifoPath);
var menu = [];
var pointer = 0;
next(filesList[pointer].ifo);
// There are better ways to do async...
function next(ifoFile: string) {
ifoFile = path.join(webPath, '../', ifoFile);
var json = require(ifoFile);
menu[pointer] = {};
menu[pointer].menu = {};
extractMenuData();
function extractMenuData() {
if (!json.pgci_ut || !json.pgci_ut.lu || !Array.isArray(json.pgci_ut.lu)) {
callNext();
return;
}
for (var i = 0; i < json.pgci_ut.nr_of_lus; i++) {
var lu = json.pgci_ut.lu[i];
var lang = utils.bit2str(lu.lang_code);
menu[pointer].menu[lang] = [];
for (var j = 0; j < lu.pgcit.nr_of_pgci_srp; j++) {
var pgci_srp = lu.pgcit.pgci_srp[j];
var pgcIndex = j + 1;
var vobID = null;
var cellID = null;
if (pgci_srp.pgc.cell_position && pgci_srp.pgc.cell_position.length) {
vobID = pgci_srp.pgc.cell_position[0].vob_id_nr;
cellID = pgci_srp.pgc.cell_position[0].cell_nr;
}
menu[pointer].menu[lang].push({
pgc: pgcIndex,
entry: pgci_srp.entry_id,
vobID: vobID,
cellID: cellID
});
}
}
callNext();
function callNext() {
pointer++;
if (pointer < filesList.length) {
setTimeout(function() {
next(filesList[pointer].ifo);
}, 0);
} else {
// At the end of all iterations.
// Save a metadata file containing the list of all IFO files.
editMetadataFile(getWebName('metadata'), menu, function() {
callback();
});
}
}
}
}
/**
* Return the file path for the web given a file.
* Used for naming both the IFO files and the metadata file.
*
* @param name A file name.
* @return {string}
*/
function getWebName(name: string): string {
return path.join(webPath, getJsonFileName(name));
}
}
/**
* Transform the file name of a JSON file.
*
* @param {string} name A file name.
* @return {string}
*/
function getJsonFileName(name: string): string {
return name.replace(/\.IFO$/i, '') + '.json';
} |
import serverUtils = require('../../server/utils/index');
import utils = require('../../utils');
import editMetadataFile = require('../../server/utils/editMetadataFile'); | random_line_split |
type_cameras.py | pix_fmt yuv420p "
"-r {fps} "
"-b:v {v_max_bitrate}k -bufsize {v_bufsize}k -maxrate {v_max_bitrate}k "
"-payload_type 99 "
"-ssrc {v_ssrc} -f rtp "
"-srtp_out_suite AES_CM_128_HMAC_SHA1_80 -srtp_out_params {v_srtp_key} "
"srtp://{address}:{v_port}?rtcpport={v_port}&"
"localrtcpport={v_port}&pkt_size={v_pkt_size}"
)
AUDIO_OUTPUT = (
"-map {a_map} -vn "
"-c:a {a_encoder} "
"{a_application}"
"-ac 1 -ar {a_sample_rate}k "
"-b:a {a_max_bitrate}k -bufsize {a_bufsize}k "
"-payload_type 110 "
"-ssrc {a_ssrc} -f rtp "
"-srtp_out_suite AES_CM_128_HMAC_SHA1_80 -srtp_out_params {a_srtp_key} "
"srtp://{address}:{a_port}?rtcpport={a_port}&"
"localrtcpport={a_port}&pkt_size={a_pkt_size}"
)
SLOW_RESOLUTIONS = [
(320, 180, 15),
(320, 240, 15),
]
RESOLUTIONS = [
(320, 180),
(320, 240),
(480, 270),
(480, 360),
(640, 360),
(640, 480),
(1024, 576),
(1024, 768),
(1280, 720),
(1280, 960),
(1920, 1080),
]
VIDEO_PROFILE_NAMES = ["baseline", "main", "high"]
FFMPEG_WATCH_INTERVAL = timedelta(seconds=5)
FFMPEG_WATCHER = "ffmpeg_watcher"
FFMPEG_PID = "ffmpeg_pid"
SESSION_ID = "session_id"
CONFIG_DEFAULTS = {
CONF_SUPPORT_AUDIO: DEFAULT_SUPPORT_AUDIO,
CONF_MAX_WIDTH: DEFAULT_MAX_WIDTH,
CONF_MAX_HEIGHT: DEFAULT_MAX_HEIGHT,
CONF_MAX_FPS: DEFAULT_MAX_FPS,
CONF_AUDIO_CODEC: DEFAULT_AUDIO_CODEC,
CONF_AUDIO_MAP: DEFAULT_AUDIO_MAP,
CONF_VIDEO_MAP: DEFAULT_VIDEO_MAP,
CONF_VIDEO_CODEC: DEFAULT_VIDEO_CODEC,
CONF_AUDIO_PACKET_SIZE: DEFAULT_AUDIO_PACKET_SIZE,
CONF_VIDEO_PACKET_SIZE: DEFAULT_VIDEO_PACKET_SIZE,
CONF_STREAM_COUNT: DEFAULT_STREAM_COUNT,
}
@TYPES.register("Camera")
class Camera(HomeAccessory, PyhapCamera):
"""Generate a Camera accessory."""
def __init__(self, hass, driver, name, entity_id, aid, config):
"""Initialize a Camera accessory object."""
self._ffmpeg = hass.data[DATA_FFMPEG]
for config_key in CONFIG_DEFAULTS:
if config_key not in config:
config[config_key] = CONFIG_DEFAULTS[config_key]
max_fps = config[CONF_MAX_FPS]
max_width = config[CONF_MAX_WIDTH]
max_height = config[CONF_MAX_HEIGHT]
resolutions = [
(w, h, fps)
for w, h, fps in SLOW_RESOLUTIONS
if w <= max_width and h <= max_height and fps < max_fps
] + [
(w, h, max_fps)
for w, h in RESOLUTIONS
if w <= max_width and h <= max_height
]
video_options = {
"codec": {
"profiles": [
VIDEO_CODEC_PARAM_PROFILE_ID_TYPES["BASELINE"],
VIDEO_CODEC_PARAM_PROFILE_ID_TYPES["MAIN"],
VIDEO_CODEC_PARAM_PROFILE_ID_TYPES["HIGH"],
],
"levels": [
VIDEO_CODEC_PARAM_LEVEL_TYPES["TYPE3_1"],
VIDEO_CODEC_PARAM_LEVEL_TYPES["TYPE3_2"],
VIDEO_CODEC_PARAM_LEVEL_TYPES["TYPE4_0"],
],
},
"resolutions": resolutions,
}
audio_options = {
"codecs": [
{"type": "OPUS", "samplerate": 24},
{"type": "OPUS", "samplerate": 16},
]
}
stream_address = config.get(CONF_STREAM_ADDRESS, get_local_ip())
options = {
"video": video_options,
"audio": audio_options,
"address": stream_address,
"srtp": True,
"stream_count": config[CONF_STREAM_COUNT],
}
super().__init__(
hass,
driver,
name,
entity_id,
aid,
config,
category=CATEGORY_CAMERA,
options=options,
)
self._char_motion_detected = None
self.linked_motion_sensor = self.config.get(CONF_LINKED_MOTION_SENSOR)
if self.linked_motion_sensor:
state = self.hass.states.get(self.linked_motion_sensor)
if state:
serv_motion = self.add_preload_service(SERV_MOTION_SENSOR)
self._char_motion_detected = serv_motion.configure_char(
CHAR_MOTION_DETECTED, value=False
)
self._async_update_motion_state(state)
self._char_doorbell_detected = None
self._char_doorbell_detected_switch = None
self.linked_doorbell_sensor = self.config.get(CONF_LINKED_DOORBELL_SENSOR)
if self.linked_doorbell_sensor:
state = self.hass.states.get(self.linked_doorbell_sensor)
if state:
|
async def run_handler(self):
"""Handle accessory driver started event.
Run inside the Home Assistant event loop.
"""
if self._char_motion_detected:
async_track_state_change_event(
self.hass,
[self.linked_motion_sensor],
self._async_update_motion_state_event,
)
if self._char_doorbell_detected:
async_track_state_change_event(
self.hass,
[self.linked_doorbell_sensor],
self._async_update_doorbell_state_event,
)
await super().run_handler()
@callback
def _async_update_motion_state_event(self, event):
"""Handle state change event listener callback."""
self._async_update_motion_state(event.data.get("new_state"))
@callback
def _async_update_motion_state(self, new_state):
"""Handle link motion sensor state change to update HomeKit value."""
if not new_state:
return
detected = new_state.state == STATE_ON
if self._char_motion_detected.value == detected:
return
self._char_motion_detected.set_value(detected)
_LOGGER.debug(
"%s: Set linked motion %s sensor to %d",
self.entity_id,
self.linked_motion_sensor,
detected,
)
@callback
def _async_update_doorbell_state_event(self, event):
"""Handle state change event listener callback."""
self._async_update_doorbell_state(event.data.get("new_state"))
@callback
def _async_update_doorbell_state(self, new_state):
"""Handle link doorbell sensor state change to update HomeKit value."""
if not new_state:
return
if new_state.state == STATE_ON:
self._char_doorbell_detected.set_value(DOORBELL_SINGLE_PRESS)
self._char_doorbell_detected_switch.set_value(DOORBELL_SINGLE_PRESS)
_LOGGER.debug(
"%s: Set linked doorbell %s sensor to %d",
self.entity_id,
self.linked_doorbell_sensor,
DOORBELL_SINGLE_PRESS,
)
@callback
def async_update_state(self, new_state):
"""Handle state change to update HomeKit value."""
pass # pylint: disable=unnecessary-pass
async def _async_get_stream_source(self):
"""Find the camera stream source url."""
stream_source = self.config.get(CONF_STREAM_SOURCE)
if stream_source:
return stream_source
try:
stream_source = await self.hass.components.camera.async_get_stream_source(
self.entity_id
)
except Exception: # pylint: disable=broad-except
_LOGGER.exception(
"Failed to get stream source - this could be a transient error or your camera might not be compatible with HomeKit yet"
)
if stream_source:
self.config[CONF_STREAM_SOURCE] = stream_source
return stream_source
async def start_stream(self, session_info, stream_config):
"""Start a new stream with the given configuration."""
_LOGGER.debug(
"[%s] Starting stream with the following parameters: %s | serv_doorbell = self.add_preload_service(SERV_DOORBELL)
self.set_primary_service(serv_doorbell)
self._char_doorbell_detected = serv_doorbell.configure_char(
CHAR_PROGRAMMABLE_SWITCH_EVENT, value=0,
)
serv_stateless_switch = self.add_preload_service(
SERV_STATELESS_PROGRAMMABLE_SWITCH
)
self._char_doorbell_detected_switch = serv_stateless_switch.configure_char(
CHAR_PROGRAMMABLE_SWITCH_EVENT,
value=0,
valid_values={"SinglePress": DOORBELL_SINGLE_PRESS},
)
serv_speaker = self.add_preload_service(SERV_SPEAKER)
serv_speaker.configure_char(CHAR_MUTE, value=0)
self._async_update_doorbell_state(state) | conditional_block |
type_cameras.py | pix_fmt yuv420p "
"-r {fps} "
"-b:v {v_max_bitrate}k -bufsize {v_bufsize}k -maxrate {v_max_bitrate}k "
"-payload_type 99 "
"-ssrc {v_ssrc} -f rtp "
"-srtp_out_suite AES_CM_128_HMAC_SHA1_80 -srtp_out_params {v_srtp_key} "
"srtp://{address}:{v_port}?rtcpport={v_port}&"
"localrtcpport={v_port}&pkt_size={v_pkt_size}"
)
AUDIO_OUTPUT = (
"-map {a_map} -vn "
"-c:a {a_encoder} "
"{a_application}"
"-ac 1 -ar {a_sample_rate}k "
"-b:a {a_max_bitrate}k -bufsize {a_bufsize}k "
"-payload_type 110 "
"-ssrc {a_ssrc} -f rtp "
"-srtp_out_suite AES_CM_128_HMAC_SHA1_80 -srtp_out_params {a_srtp_key} "
"srtp://{address}:{a_port}?rtcpport={a_port}&"
"localrtcpport={a_port}&pkt_size={a_pkt_size}"
)
SLOW_RESOLUTIONS = [
(320, 180, 15),
(320, 240, 15),
]
RESOLUTIONS = [
(320, 180),
(320, 240),
(480, 270),
(480, 360),
(640, 360),
(640, 480),
(1024, 576),
(1024, 768),
(1280, 720),
(1280, 960),
(1920, 1080),
]
VIDEO_PROFILE_NAMES = ["baseline", "main", "high"]
FFMPEG_WATCH_INTERVAL = timedelta(seconds=5)
FFMPEG_WATCHER = "ffmpeg_watcher"
FFMPEG_PID = "ffmpeg_pid"
SESSION_ID = "session_id"
CONFIG_DEFAULTS = {
CONF_SUPPORT_AUDIO: DEFAULT_SUPPORT_AUDIO,
CONF_MAX_WIDTH: DEFAULT_MAX_WIDTH,
CONF_MAX_HEIGHT: DEFAULT_MAX_HEIGHT,
CONF_MAX_FPS: DEFAULT_MAX_FPS,
CONF_AUDIO_CODEC: DEFAULT_AUDIO_CODEC,
CONF_AUDIO_MAP: DEFAULT_AUDIO_MAP,
CONF_VIDEO_MAP: DEFAULT_VIDEO_MAP,
CONF_VIDEO_CODEC: DEFAULT_VIDEO_CODEC,
CONF_AUDIO_PACKET_SIZE: DEFAULT_AUDIO_PACKET_SIZE,
CONF_VIDEO_PACKET_SIZE: DEFAULT_VIDEO_PACKET_SIZE,
CONF_STREAM_COUNT: DEFAULT_STREAM_COUNT,
}
@TYPES.register("Camera")
class Camera(HomeAccessory, PyhapCamera):
"""Generate a Camera accessory."""
def __init__(self, hass, driver, name, entity_id, aid, config):
"""Initialize a Camera accessory object."""
self._ffmpeg = hass.data[DATA_FFMPEG]
for config_key in CONFIG_DEFAULTS:
if config_key not in config:
config[config_key] = CONFIG_DEFAULTS[config_key]
max_fps = config[CONF_MAX_FPS]
max_width = config[CONF_MAX_WIDTH]
max_height = config[CONF_MAX_HEIGHT]
resolutions = [
(w, h, fps)
for w, h, fps in SLOW_RESOLUTIONS
if w <= max_width and h <= max_height and fps < max_fps
] + [
(w, h, max_fps)
for w, h in RESOLUTIONS
if w <= max_width and h <= max_height
]
video_options = {
"codec": {
"profiles": [
VIDEO_CODEC_PARAM_PROFILE_ID_TYPES["BASELINE"],
VIDEO_CODEC_PARAM_PROFILE_ID_TYPES["MAIN"],
VIDEO_CODEC_PARAM_PROFILE_ID_TYPES["HIGH"],
],
"levels": [
VIDEO_CODEC_PARAM_LEVEL_TYPES["TYPE3_1"],
VIDEO_CODEC_PARAM_LEVEL_TYPES["TYPE3_2"],
VIDEO_CODEC_PARAM_LEVEL_TYPES["TYPE4_0"],
],
},
"resolutions": resolutions,
}
audio_options = {
"codecs": [
{"type": "OPUS", "samplerate": 24},
{"type": "OPUS", "samplerate": 16},
]
}
stream_address = config.get(CONF_STREAM_ADDRESS, get_local_ip())
options = {
"video": video_options,
"audio": audio_options,
"address": stream_address,
"srtp": True,
"stream_count": config[CONF_STREAM_COUNT],
}
super().__init__(
hass,
driver,
name,
entity_id,
aid,
config,
category=CATEGORY_CAMERA,
options=options,
)
self._char_motion_detected = None
self.linked_motion_sensor = self.config.get(CONF_LINKED_MOTION_SENSOR)
if self.linked_motion_sensor:
state = self.hass.states.get(self.linked_motion_sensor)
if state:
serv_motion = self.add_preload_service(SERV_MOTION_SENSOR)
self._char_motion_detected = serv_motion.configure_char(
CHAR_MOTION_DETECTED, value=False
)
self._async_update_motion_state(state)
self._char_doorbell_detected = None
self._char_doorbell_detected_switch = None
self.linked_doorbell_sensor = self.config.get(CONF_LINKED_DOORBELL_SENSOR)
if self.linked_doorbell_sensor:
state = self.hass.states.get(self.linked_doorbell_sensor)
if state:
serv_doorbell = self.add_preload_service(SERV_DOORBELL)
self.set_primary_service(serv_doorbell)
self._char_doorbell_detected = serv_doorbell.configure_char(
CHAR_PROGRAMMABLE_SWITCH_EVENT, value=0,
)
serv_stateless_switch = self.add_preload_service(
SERV_STATELESS_PROGRAMMABLE_SWITCH
)
self._char_doorbell_detected_switch = serv_stateless_switch.configure_char(
CHAR_PROGRAMMABLE_SWITCH_EVENT,
value=0,
valid_values={"SinglePress": DOORBELL_SINGLE_PRESS},
)
serv_speaker = self.add_preload_service(SERV_SPEAKER)
serv_speaker.configure_char(CHAR_MUTE, value=0)
self._async_update_doorbell_state(state)
async def run_handler(self):
"""Handle accessory driver started event.
Run inside the Home Assistant event loop.
"""
if self._char_motion_detected:
async_track_state_change_event(
self.hass,
[self.linked_motion_sensor],
self._async_update_motion_state_event,
)
if self._char_doorbell_detected:
async_track_state_change_event(
self.hass,
[self.linked_doorbell_sensor],
self._async_update_doorbell_state_event,
)
await super().run_handler()
@callback
def _async_update_motion_state_event(self, event):
"""Handle state change event listener callback."""
self._async_update_motion_state(event.data.get("new_state"))
@callback
def | (self, new_state):
"""Handle link motion sensor state change to update HomeKit value."""
if not new_state:
return
detected = new_state.state == STATE_ON
if self._char_motion_detected.value == detected:
return
self._char_motion_detected.set_value(detected)
_LOGGER.debug(
"%s: Set linked motion %s sensor to %d",
self.entity_id,
self.linked_motion_sensor,
detected,
)
@callback
def _async_update_doorbell_state_event(self, event):
"""Handle state change event listener callback."""
self._async_update_doorbell_state(event.data.get("new_state"))
@callback
def _async_update_doorbell_state(self, new_state):
"""Handle link doorbell sensor state change to update HomeKit value."""
if not new_state:
return
if new_state.state == STATE_ON:
self._char_doorbell_detected.set_value(DOORBELL_SINGLE_PRESS)
self._char_doorbell_detected_switch.set_value(DOORBELL_SINGLE_PRESS)
_LOGGER.debug(
"%s: Set linked doorbell %s sensor to %d",
self.entity_id,
self.linked_doorbell_sensor,
DOORBELL_SINGLE_PRESS,
)
@callback
def async_update_state(self, new_state):
"""Handle state change to update HomeKit value."""
pass # pylint: disable=unnecessary-pass
async def _async_get_stream_source(self):
"""Find the camera stream source url."""
stream_source = self.config.get(CONF_STREAM_SOURCE)
if stream_source:
return stream_source
try:
stream_source = await self.hass.components.camera.async_get_stream_source(
self.entity_id
)
except Exception: # pylint: disable=broad-except
_LOGGER.exception(
"Failed to get stream source - this could be a transient error or your camera might not be compatible with HomeKit yet"
)
if stream_source:
self.config[CONF_STREAM_SOURCE] = stream_source
return stream_source
async def start_stream(self, session_info, stream_config):
"""Start a new stream with the given configuration."""
_LOGGER.debug(
"[%s] Starting stream with the following parameters: %s",
| _async_update_motion_state | identifier_name |
type_cameras.py | CONFIG_DEFAULTS:
if config_key not in config:
config[config_key] = CONFIG_DEFAULTS[config_key]
max_fps = config[CONF_MAX_FPS]
max_width = config[CONF_MAX_WIDTH]
max_height = config[CONF_MAX_HEIGHT]
resolutions = [
(w, h, fps)
for w, h, fps in SLOW_RESOLUTIONS
if w <= max_width and h <= max_height and fps < max_fps
] + [
(w, h, max_fps)
for w, h in RESOLUTIONS
if w <= max_width and h <= max_height
]
video_options = {
"codec": {
"profiles": [
VIDEO_CODEC_PARAM_PROFILE_ID_TYPES["BASELINE"],
VIDEO_CODEC_PARAM_PROFILE_ID_TYPES["MAIN"],
VIDEO_CODEC_PARAM_PROFILE_ID_TYPES["HIGH"],
],
"levels": [
VIDEO_CODEC_PARAM_LEVEL_TYPES["TYPE3_1"],
VIDEO_CODEC_PARAM_LEVEL_TYPES["TYPE3_2"],
VIDEO_CODEC_PARAM_LEVEL_TYPES["TYPE4_0"],
],
},
"resolutions": resolutions,
}
audio_options = {
"codecs": [
{"type": "OPUS", "samplerate": 24},
{"type": "OPUS", "samplerate": 16},
]
}
stream_address = config.get(CONF_STREAM_ADDRESS, get_local_ip())
options = {
"video": video_options,
"audio": audio_options,
"address": stream_address,
"srtp": True,
"stream_count": config[CONF_STREAM_COUNT],
}
super().__init__(
hass,
driver,
name,
entity_id,
aid,
config,
category=CATEGORY_CAMERA,
options=options,
)
self._char_motion_detected = None
self.linked_motion_sensor = self.config.get(CONF_LINKED_MOTION_SENSOR)
if self.linked_motion_sensor:
state = self.hass.states.get(self.linked_motion_sensor)
if state:
serv_motion = self.add_preload_service(SERV_MOTION_SENSOR)
self._char_motion_detected = serv_motion.configure_char(
CHAR_MOTION_DETECTED, value=False
)
self._async_update_motion_state(state)
self._char_doorbell_detected = None
self._char_doorbell_detected_switch = None
self.linked_doorbell_sensor = self.config.get(CONF_LINKED_DOORBELL_SENSOR)
if self.linked_doorbell_sensor:
state = self.hass.states.get(self.linked_doorbell_sensor)
if state:
serv_doorbell = self.add_preload_service(SERV_DOORBELL)
self.set_primary_service(serv_doorbell)
self._char_doorbell_detected = serv_doorbell.configure_char(
CHAR_PROGRAMMABLE_SWITCH_EVENT, value=0,
)
serv_stateless_switch = self.add_preload_service(
SERV_STATELESS_PROGRAMMABLE_SWITCH
)
self._char_doorbell_detected_switch = serv_stateless_switch.configure_char(
CHAR_PROGRAMMABLE_SWITCH_EVENT,
value=0,
valid_values={"SinglePress": DOORBELL_SINGLE_PRESS},
)
serv_speaker = self.add_preload_service(SERV_SPEAKER)
serv_speaker.configure_char(CHAR_MUTE, value=0)
self._async_update_doorbell_state(state)
async def run_handler(self):
"""Handle accessory driver started event.
Run inside the Home Assistant event loop.
"""
if self._char_motion_detected:
async_track_state_change_event(
self.hass,
[self.linked_motion_sensor],
self._async_update_motion_state_event,
)
if self._char_doorbell_detected:
async_track_state_change_event(
self.hass,
[self.linked_doorbell_sensor],
self._async_update_doorbell_state_event,
)
await super().run_handler()
@callback
def _async_update_motion_state_event(self, event):
"""Handle state change event listener callback."""
self._async_update_motion_state(event.data.get("new_state"))
@callback
def _async_update_motion_state(self, new_state):
"""Handle link motion sensor state change to update HomeKit value."""
if not new_state:
return
detected = new_state.state == STATE_ON
if self._char_motion_detected.value == detected:
return
self._char_motion_detected.set_value(detected)
_LOGGER.debug(
"%s: Set linked motion %s sensor to %d",
self.entity_id,
self.linked_motion_sensor,
detected,
)
@callback
def _async_update_doorbell_state_event(self, event):
"""Handle state change event listener callback."""
self._async_update_doorbell_state(event.data.get("new_state"))
@callback
def _async_update_doorbell_state(self, new_state):
"""Handle link doorbell sensor state change to update HomeKit value."""
if not new_state:
return
if new_state.state == STATE_ON:
self._char_doorbell_detected.set_value(DOORBELL_SINGLE_PRESS)
self._char_doorbell_detected_switch.set_value(DOORBELL_SINGLE_PRESS)
_LOGGER.debug(
"%s: Set linked doorbell %s sensor to %d",
self.entity_id,
self.linked_doorbell_sensor,
DOORBELL_SINGLE_PRESS,
)
@callback
def async_update_state(self, new_state):
"""Handle state change to update HomeKit value."""
pass # pylint: disable=unnecessary-pass
async def _async_get_stream_source(self):
"""Find the camera stream source url."""
stream_source = self.config.get(CONF_STREAM_SOURCE)
if stream_source:
return stream_source
try:
stream_source = await self.hass.components.camera.async_get_stream_source(
self.entity_id
)
except Exception: # pylint: disable=broad-except
_LOGGER.exception(
"Failed to get stream source - this could be a transient error or your camera might not be compatible with HomeKit yet"
)
if stream_source:
self.config[CONF_STREAM_SOURCE] = stream_source
return stream_source
async def start_stream(self, session_info, stream_config):
"""Start a new stream with the given configuration."""
_LOGGER.debug(
"[%s] Starting stream with the following parameters: %s",
session_info["id"],
stream_config,
)
input_source = await self._async_get_stream_source()
if not input_source:
_LOGGER.error("Camera has no stream source")
return False
if "-i " not in input_source:
input_source = "-i " + input_source
video_profile = ""
if self.config[CONF_VIDEO_CODEC] != "copy":
video_profile = (
"-profile:v "
+ VIDEO_PROFILE_NAMES[
int.from_bytes(stream_config["v_profile_id"], byteorder="big")
]
+ " "
)
audio_application = ""
if self.config[CONF_AUDIO_CODEC] == "libopus":
audio_application = "-application lowdelay "
output_vars = stream_config.copy()
output_vars.update(
{
"v_profile": video_profile,
"v_bufsize": stream_config["v_max_bitrate"] * 4,
"v_map": self.config[CONF_VIDEO_MAP],
"v_pkt_size": self.config[CONF_VIDEO_PACKET_SIZE],
"v_codec": self.config[CONF_VIDEO_CODEC],
"a_bufsize": stream_config["a_max_bitrate"] * 4,
"a_map": self.config[CONF_AUDIO_MAP],
"a_pkt_size": self.config[CONF_AUDIO_PACKET_SIZE],
"a_encoder": self.config[CONF_AUDIO_CODEC],
"a_application": audio_application,
}
)
output = VIDEO_OUTPUT.format(**output_vars)
if self.config[CONF_SUPPORT_AUDIO]:
output = output + " " + AUDIO_OUTPUT.format(**output_vars)
_LOGGER.debug("FFmpeg output settings: %s", output)
stream = HAFFmpeg(self._ffmpeg.binary, loop=self.driver.loop)
opened = await stream.open(
cmd=[], input_source=input_source, output=output, stdout_pipe=False
)
if not opened:
_LOGGER.error("Failed to open ffmpeg stream")
return False
_LOGGER.info(
"[%s] Started stream process - PID %d",
session_info["id"],
stream.process.pid,
)
session_info["stream"] = stream
session_info[FFMPEG_PID] = stream.process.pid
async def watch_session(_):
await self._async_ffmpeg_watch(session_info["id"])
session_info[FFMPEG_WATCHER] = async_track_time_interval(
self.hass, watch_session, FFMPEG_WATCH_INTERVAL,
)
return await self._async_ffmpeg_watch(session_info["id"])
async def _async_ffmpeg_watch(self, session_id):
"""Check to make sure ffmpeg is still running and cleanup if not."""
ffmpeg_pid = self.sessions[session_id][FFMPEG_PID]
if pid_is_alive(ffmpeg_pid):
return True
_LOGGER.warning("Streaming process ended unexpectedly - PID %d", ffmpeg_pid)
self._async_stop_ffmpeg_watch(session_id)
self.set_streaming_available(self.sessions[session_id]["stream_idx"])
return False
@callback
def _async_stop_ffmpeg_watch(self, session_id):
| """Cleanup a streaming session after stopping."""
if FFMPEG_WATCHER not in self.sessions[session_id]:
return
self.sessions[session_id].pop(FFMPEG_WATCHER)() | identifier_body |
|
type_cameras.py | PyhapCamera):
"""Generate a Camera accessory."""
def __init__(self, hass, driver, name, entity_id, aid, config):
"""Initialize a Camera accessory object."""
self._ffmpeg = hass.data[DATA_FFMPEG]
for config_key in CONFIG_DEFAULTS:
if config_key not in config:
config[config_key] = CONFIG_DEFAULTS[config_key]
max_fps = config[CONF_MAX_FPS]
max_width = config[CONF_MAX_WIDTH]
max_height = config[CONF_MAX_HEIGHT]
resolutions = [
(w, h, fps)
for w, h, fps in SLOW_RESOLUTIONS
if w <= max_width and h <= max_height and fps < max_fps
] + [
(w, h, max_fps)
for w, h in RESOLUTIONS
if w <= max_width and h <= max_height
]
video_options = {
"codec": {
"profiles": [
VIDEO_CODEC_PARAM_PROFILE_ID_TYPES["BASELINE"],
VIDEO_CODEC_PARAM_PROFILE_ID_TYPES["MAIN"],
VIDEO_CODEC_PARAM_PROFILE_ID_TYPES["HIGH"],
],
"levels": [
VIDEO_CODEC_PARAM_LEVEL_TYPES["TYPE3_1"],
VIDEO_CODEC_PARAM_LEVEL_TYPES["TYPE3_2"],
VIDEO_CODEC_PARAM_LEVEL_TYPES["TYPE4_0"],
],
},
"resolutions": resolutions,
}
audio_options = {
"codecs": [
{"type": "OPUS", "samplerate": 24},
{"type": "OPUS", "samplerate": 16},
]
}
stream_address = config.get(CONF_STREAM_ADDRESS, get_local_ip())
options = {
"video": video_options,
"audio": audio_options,
"address": stream_address,
"srtp": True,
"stream_count": config[CONF_STREAM_COUNT],
}
super().__init__(
hass,
driver,
name,
entity_id,
aid,
config,
category=CATEGORY_CAMERA,
options=options,
)
self._char_motion_detected = None
self.linked_motion_sensor = self.config.get(CONF_LINKED_MOTION_SENSOR)
if self.linked_motion_sensor:
state = self.hass.states.get(self.linked_motion_sensor)
if state:
serv_motion = self.add_preload_service(SERV_MOTION_SENSOR)
self._char_motion_detected = serv_motion.configure_char(
CHAR_MOTION_DETECTED, value=False
)
self._async_update_motion_state(state)
self._char_doorbell_detected = None
self._char_doorbell_detected_switch = None
self.linked_doorbell_sensor = self.config.get(CONF_LINKED_DOORBELL_SENSOR)
if self.linked_doorbell_sensor:
state = self.hass.states.get(self.linked_doorbell_sensor)
if state:
serv_doorbell = self.add_preload_service(SERV_DOORBELL)
self.set_primary_service(serv_doorbell)
self._char_doorbell_detected = serv_doorbell.configure_char(
CHAR_PROGRAMMABLE_SWITCH_EVENT, value=0,
)
serv_stateless_switch = self.add_preload_service(
SERV_STATELESS_PROGRAMMABLE_SWITCH
)
self._char_doorbell_detected_switch = serv_stateless_switch.configure_char(
CHAR_PROGRAMMABLE_SWITCH_EVENT,
value=0,
valid_values={"SinglePress": DOORBELL_SINGLE_PRESS},
)
serv_speaker = self.add_preload_service(SERV_SPEAKER)
serv_speaker.configure_char(CHAR_MUTE, value=0)
self._async_update_doorbell_state(state)
async def run_handler(self):
"""Handle accessory driver started event.
Run inside the Home Assistant event loop.
"""
if self._char_motion_detected:
async_track_state_change_event(
self.hass,
[self.linked_motion_sensor],
self._async_update_motion_state_event,
)
if self._char_doorbell_detected:
async_track_state_change_event(
self.hass,
[self.linked_doorbell_sensor],
self._async_update_doorbell_state_event,
)
await super().run_handler()
@callback
def _async_update_motion_state_event(self, event):
"""Handle state change event listener callback."""
self._async_update_motion_state(event.data.get("new_state"))
@callback
def _async_update_motion_state(self, new_state):
"""Handle link motion sensor state change to update HomeKit value."""
if not new_state:
return
detected = new_state.state == STATE_ON
if self._char_motion_detected.value == detected:
return
self._char_motion_detected.set_value(detected)
_LOGGER.debug(
"%s: Set linked motion %s sensor to %d",
self.entity_id,
self.linked_motion_sensor,
detected,
)
@callback
def _async_update_doorbell_state_event(self, event):
"""Handle state change event listener callback."""
self._async_update_doorbell_state(event.data.get("new_state"))
@callback
def _async_update_doorbell_state(self, new_state):
"""Handle link doorbell sensor state change to update HomeKit value."""
if not new_state:
return
if new_state.state == STATE_ON:
self._char_doorbell_detected.set_value(DOORBELL_SINGLE_PRESS)
self._char_doorbell_detected_switch.set_value(DOORBELL_SINGLE_PRESS)
_LOGGER.debug(
"%s: Set linked doorbell %s sensor to %d",
self.entity_id,
self.linked_doorbell_sensor,
DOORBELL_SINGLE_PRESS,
)
@callback
def async_update_state(self, new_state):
"""Handle state change to update HomeKit value."""
pass # pylint: disable=unnecessary-pass
async def _async_get_stream_source(self):
"""Find the camera stream source url."""
stream_source = self.config.get(CONF_STREAM_SOURCE)
if stream_source:
return stream_source
try:
stream_source = await self.hass.components.camera.async_get_stream_source(
self.entity_id
)
except Exception: # pylint: disable=broad-except
_LOGGER.exception(
"Failed to get stream source - this could be a transient error or your camera might not be compatible with HomeKit yet"
)
if stream_source:
self.config[CONF_STREAM_SOURCE] = stream_source
return stream_source
async def start_stream(self, session_info, stream_config):
"""Start a new stream with the given configuration."""
_LOGGER.debug(
"[%s] Starting stream with the following parameters: %s",
session_info["id"],
stream_config,
)
input_source = await self._async_get_stream_source()
if not input_source:
_LOGGER.error("Camera has no stream source")
return False
if "-i " not in input_source:
input_source = "-i " + input_source
video_profile = ""
if self.config[CONF_VIDEO_CODEC] != "copy":
video_profile = (
"-profile:v "
+ VIDEO_PROFILE_NAMES[
int.from_bytes(stream_config["v_profile_id"], byteorder="big")
]
+ " "
)
audio_application = ""
if self.config[CONF_AUDIO_CODEC] == "libopus":
audio_application = "-application lowdelay "
output_vars = stream_config.copy()
output_vars.update(
{
"v_profile": video_profile,
"v_bufsize": stream_config["v_max_bitrate"] * 4,
"v_map": self.config[CONF_VIDEO_MAP],
"v_pkt_size": self.config[CONF_VIDEO_PACKET_SIZE],
"v_codec": self.config[CONF_VIDEO_CODEC],
"a_bufsize": stream_config["a_max_bitrate"] * 4,
"a_map": self.config[CONF_AUDIO_MAP],
"a_pkt_size": self.config[CONF_AUDIO_PACKET_SIZE],
"a_encoder": self.config[CONF_AUDIO_CODEC],
"a_application": audio_application,
}
)
output = VIDEO_OUTPUT.format(**output_vars)
if self.config[CONF_SUPPORT_AUDIO]:
output = output + " " + AUDIO_OUTPUT.format(**output_vars)
_LOGGER.debug("FFmpeg output settings: %s", output)
stream = HAFFmpeg(self._ffmpeg.binary, loop=self.driver.loop)
opened = await stream.open(
cmd=[], input_source=input_source, output=output, stdout_pipe=False
)
if not opened:
_LOGGER.error("Failed to open ffmpeg stream")
return False
_LOGGER.info(
"[%s] Started stream process - PID %d",
session_info["id"],
stream.process.pid,
)
session_info["stream"] = stream
session_info[FFMPEG_PID] = stream.process.pid
async def watch_session(_):
await self._async_ffmpeg_watch(session_info["id"])
session_info[FFMPEG_WATCHER] = async_track_time_interval(
self.hass, watch_session, FFMPEG_WATCH_INTERVAL,
)
return await self._async_ffmpeg_watch(session_info["id"])
async def _async_ffmpeg_watch(self, session_id):
"""Check to make sure ffmpeg is still running and cleanup if not."""
ffmpeg_pid = self.sessions[session_id][FFMPEG_PID]
if pid_is_alive(ffmpeg_pid):
return True
_LOGGER.warning("Streaming process ended unexpectedly - PID %d", ffmpeg_pid)
self._async_stop_ffmpeg_watch(session_id)
self.set_streaming_available(self.sessions[session_id]["stream_idx"])
return False
| @callback | random_line_split |
|
index.ts | import low from 'lowdb';
import FileSync from 'lowdb/adapters/FileSync';
import { v4 as uuidv4 } from 'uuid';
import { Game, LAN_PARTY_GAMES } from './games';
import { Event } from './events';
interface Database {
eventTypes: string[];
events: Event[];
games: Game[];
}
const getDb = () => {
const adapter = new FileSync<Database>('db.json');
const db = low(adapter);
db.defaults({
eventTypes: [
'Board Game Party',
'Couch Party',
'LAN Party',
'Night Trip',
'RPG',
'Sleigh ride',
'Warhammer 40k',
'Zwardoń',
],
events: [],
games: [
...LAN_PARTY_GAMES.map((game) => ({ |
return db;
};
export default getDb; | ...game,
id: uuidv4(),
})),
],
}).write(); | random_line_split |
ViewInRoom.jest.tsx | import { graphql } from "relay-runtime"
import { setupTestWrapper } from "v2/DevTools/setupTestWrapper"
import { ViewInRoomFragmentContainer } from "../ViewInRoom"
jest.unmock("react-relay")
const { getWrapper } = setupTestWrapper({
Component: ViewInRoomFragmentContainer,
query: graphql`
query ViewInRoom_Test_Query @relay_test_operation {
artwork(id: "example") {
...ViewInRoom_artwork
} | `,
})
describe("ViewInRoom", () => {
it("renders correctly", () => {
const wrapper = getWrapper({
Artwork: () => ({ widthCm: 33, heightCm: 66 }),
ResizedImageUrl: () => ({
src: "example.jpg",
srcSet: "example.jpg 1x",
}),
})
expect(wrapper.html()).toContain(
'src="example.jpg" srcset="example.jpg 1x"'
)
})
}) | } | random_line_split |
api.ts | }
}
export interface QueryRevisionResponse {
query: {
normalized: {
fromencoded: boolean
from: string
to: string
}
pages: Array<{
pageid: number
ns: number
title: string
revisions: Array<{
slots: {[slot: string]: {
contentmodel: string
contentformat: string
content: string
}}
}>
}>
}
}
export interface QueryTokenResponse {
batchcomplete: boolean
query: {
tokens: {
csrftoken: string
}
}
}
export type EditRequest = ({ title: string } | { pageid: number }) & {
section?: number
sectiontitle?: string
text?: string
summary?: string
tags?: string
minor?: boolean
notminor?: boolean
bot?: boolean
baserevid?: number
basetimestamp?: Date
starttimestamp?: Date
recreate?: boolean
createonly?: boolean
nocreate?: boolean
watchlist?: 'nochange' | 'preferences' | 'unwatch' | 'watch'
md5?: string
prependtext?: string
appendtext?: string
undo?: number
undoafter?: number
redirect?: boolean
contentformat?: 'application/json' | 'application/octet-stream' | 'application/unknown' | 'application/x-binary' | 'text/css' | 'text/javascript' | 'text/plain' | 'text/unknown' | 'text/x-wiki' | 'unknown/unknown'
contentmodel?: 'GadgetDefinition' | 'Json.JsonConfig' | 'JsonSchema' | 'Map.JsonConfig' | 'MassMessageListContent' | 'NewsletterContent' | 'Scribunto' | 'SecurePoll' | 'Tabular.JsonConfig' | 'css' | 'flow-board' | 'javascript' | 'json' | 'sanitized-css' | 'text' | 'unknown' | 'wikitext'
}
export interface EditResponse {
edit: {
result: string
pageid: number
title: string
contentmodel: string
oldrevid: number
newrevid: number
newtimestamp: string
}
} | export interface ParseResponse {
parse: {
title: string
pageid: number
text: string | random_line_split |
|
htmlbaseelement.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::attr::Attr;
use dom::bindings::codegen::Bindings::HTMLBaseElementBinding;
use dom::bindings::inheritance::Castable;
use dom::bindings::js::Root;
use dom::document::Document;
use dom::element::{AttributeMutation, Element};
use dom::htmlelement::HTMLElement;
use dom::node::{Node, document_from_node};
use dom::virtualmethods::VirtualMethods;
use url::{Url, UrlParser};
use util::str::DOMString;
#[dom_struct]
pub struct HTMLBaseElement {
htmlelement: HTMLElement
}
impl HTMLBaseElement {
fn new_inherited(localName: DOMString, prefix: Option<DOMString>, document: &Document) -> HTMLBaseElement {
HTMLBaseElement {
htmlelement: HTMLElement::new_inherited(localName, prefix, document)
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString,
prefix: Option<DOMString>,
document: &Document) -> Root<HTMLBaseElement> {
let element = HTMLBaseElement::new_inherited(localName, prefix, document);
Node::reflect_node(box element, document, HTMLBaseElementBinding::Wrap)
}
/// https://html.spec.whatwg.org/multipage/#frozen-base-url
pub fn frozen_base_url(&self) -> Url {
let href = self.upcast::<Element>().get_attribute(&ns!(""), &atom!("href"))
.expect("The frozen base url is only defined for base elements \
that have a base url.");
let document = document_from_node(self);
let base = document.fallback_base_url();
let parsed = UrlParser::new().base_url(&base).parse(&href.value());
parsed.unwrap_or(base)
}
/// Update the cached base element in response to binding or unbinding from
/// a tree.
pub fn bind_unbind(&self, tree_in_doc: bool) |
}
impl VirtualMethods for HTMLBaseElement {
fn super_type(&self) -> Option<&VirtualMethods> {
Some(self.upcast::<HTMLElement>() as &VirtualMethods)
}
fn attribute_mutated(&self, attr: &Attr, mutation: AttributeMutation) {
self.super_type().unwrap().attribute_mutated(attr, mutation);
if *attr.local_name() == atom!(href) {
document_from_node(self).refresh_base_element();
}
}
fn bind_to_tree(&self, tree_in_doc: bool) {
self.super_type().unwrap().bind_to_tree(tree_in_doc);
self.bind_unbind(tree_in_doc);
}
fn unbind_from_tree(&self, tree_in_doc: bool) {
self.super_type().unwrap().unbind_from_tree(tree_in_doc);
self.bind_unbind(tree_in_doc);
}
}
| {
if !tree_in_doc {
return;
}
if self.upcast::<Element>().has_attribute(&atom!("href")) {
let document = document_from_node(self);
document.refresh_base_element();
}
} | identifier_body |
htmlbaseelement.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::attr::Attr;
use dom::bindings::codegen::Bindings::HTMLBaseElementBinding;
use dom::bindings::inheritance::Castable;
use dom::bindings::js::Root;
use dom::document::Document;
use dom::element::{AttributeMutation, Element};
use dom::htmlelement::HTMLElement;
use dom::node::{Node, document_from_node};
use dom::virtualmethods::VirtualMethods;
use url::{Url, UrlParser};
use util::str::DOMString;
#[dom_struct]
pub struct HTMLBaseElement {
htmlelement: HTMLElement
}
impl HTMLBaseElement {
fn new_inherited(localName: DOMString, prefix: Option<DOMString>, document: &Document) -> HTMLBaseElement {
HTMLBaseElement {
htmlelement: HTMLElement::new_inherited(localName, prefix, document)
}
}
#[allow(unrooted_must_root)]
pub fn | (localName: DOMString,
prefix: Option<DOMString>,
document: &Document) -> Root<HTMLBaseElement> {
let element = HTMLBaseElement::new_inherited(localName, prefix, document);
Node::reflect_node(box element, document, HTMLBaseElementBinding::Wrap)
}
/// https://html.spec.whatwg.org/multipage/#frozen-base-url
pub fn frozen_base_url(&self) -> Url {
let href = self.upcast::<Element>().get_attribute(&ns!(""), &atom!("href"))
.expect("The frozen base url is only defined for base elements \
that have a base url.");
let document = document_from_node(self);
let base = document.fallback_base_url();
let parsed = UrlParser::new().base_url(&base).parse(&href.value());
parsed.unwrap_or(base)
}
/// Update the cached base element in response to binding or unbinding from
/// a tree.
pub fn bind_unbind(&self, tree_in_doc: bool) {
if !tree_in_doc {
return;
}
if self.upcast::<Element>().has_attribute(&atom!("href")) {
let document = document_from_node(self);
document.refresh_base_element();
}
}
}
impl VirtualMethods for HTMLBaseElement {
fn super_type(&self) -> Option<&VirtualMethods> {
Some(self.upcast::<HTMLElement>() as &VirtualMethods)
}
fn attribute_mutated(&self, attr: &Attr, mutation: AttributeMutation) {
self.super_type().unwrap().attribute_mutated(attr, mutation);
if *attr.local_name() == atom!(href) {
document_from_node(self).refresh_base_element();
}
}
fn bind_to_tree(&self, tree_in_doc: bool) {
self.super_type().unwrap().bind_to_tree(tree_in_doc);
self.bind_unbind(tree_in_doc);
}
fn unbind_from_tree(&self, tree_in_doc: bool) {
self.super_type().unwrap().unbind_from_tree(tree_in_doc);
self.bind_unbind(tree_in_doc);
}
}
| new | identifier_name |
htmlbaseelement.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::attr::Attr;
use dom::bindings::codegen::Bindings::HTMLBaseElementBinding;
use dom::bindings::inheritance::Castable;
use dom::bindings::js::Root;
use dom::document::Document;
use dom::element::{AttributeMutation, Element};
use dom::htmlelement::HTMLElement;
use dom::node::{Node, document_from_node};
use dom::virtualmethods::VirtualMethods;
use url::{Url, UrlParser};
use util::str::DOMString;
#[dom_struct]
pub struct HTMLBaseElement {
htmlelement: HTMLElement
}
impl HTMLBaseElement {
fn new_inherited(localName: DOMString, prefix: Option<DOMString>, document: &Document) -> HTMLBaseElement {
HTMLBaseElement {
htmlelement: HTMLElement::new_inherited(localName, prefix, document)
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString,
prefix: Option<DOMString>,
document: &Document) -> Root<HTMLBaseElement> {
let element = HTMLBaseElement::new_inherited(localName, prefix, document);
Node::reflect_node(box element, document, HTMLBaseElementBinding::Wrap)
}
/// https://html.spec.whatwg.org/multipage/#frozen-base-url
pub fn frozen_base_url(&self) -> Url {
let href = self.upcast::<Element>().get_attribute(&ns!(""), &atom!("href"))
.expect("The frozen base url is only defined for base elements \
that have a base url.");
let document = document_from_node(self);
let base = document.fallback_base_url();
let parsed = UrlParser::new().base_url(&base).parse(&href.value());
parsed.unwrap_or(base)
}
/// Update the cached base element in response to binding or unbinding from
/// a tree.
pub fn bind_unbind(&self, tree_in_doc: bool) {
if !tree_in_doc {
return;
}
if self.upcast::<Element>().has_attribute(&atom!("href")) {
let document = document_from_node(self);
document.refresh_base_element();
}
}
}
impl VirtualMethods for HTMLBaseElement {
fn super_type(&self) -> Option<&VirtualMethods> {
Some(self.upcast::<HTMLElement>() as &VirtualMethods)
}
fn attribute_mutated(&self, attr: &Attr, mutation: AttributeMutation) {
self.super_type().unwrap().attribute_mutated(attr, mutation);
if *attr.local_name() == atom!(href) {
document_from_node(self).refresh_base_element();
}
}
fn bind_to_tree(&self, tree_in_doc: bool) {
self.super_type().unwrap().bind_to_tree(tree_in_doc);
self.bind_unbind(tree_in_doc); | fn unbind_from_tree(&self, tree_in_doc: bool) {
self.super_type().unwrap().unbind_from_tree(tree_in_doc);
self.bind_unbind(tree_in_doc);
}
} | }
| random_line_split |
htmlbaseelement.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::attr::Attr;
use dom::bindings::codegen::Bindings::HTMLBaseElementBinding;
use dom::bindings::inheritance::Castable;
use dom::bindings::js::Root;
use dom::document::Document;
use dom::element::{AttributeMutation, Element};
use dom::htmlelement::HTMLElement;
use dom::node::{Node, document_from_node};
use dom::virtualmethods::VirtualMethods;
use url::{Url, UrlParser};
use util::str::DOMString;
#[dom_struct]
pub struct HTMLBaseElement {
htmlelement: HTMLElement
}
impl HTMLBaseElement {
fn new_inherited(localName: DOMString, prefix: Option<DOMString>, document: &Document) -> HTMLBaseElement {
HTMLBaseElement {
htmlelement: HTMLElement::new_inherited(localName, prefix, document)
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString,
prefix: Option<DOMString>,
document: &Document) -> Root<HTMLBaseElement> {
let element = HTMLBaseElement::new_inherited(localName, prefix, document);
Node::reflect_node(box element, document, HTMLBaseElementBinding::Wrap)
}
/// https://html.spec.whatwg.org/multipage/#frozen-base-url
pub fn frozen_base_url(&self) -> Url {
let href = self.upcast::<Element>().get_attribute(&ns!(""), &atom!("href"))
.expect("The frozen base url is only defined for base elements \
that have a base url.");
let document = document_from_node(self);
let base = document.fallback_base_url();
let parsed = UrlParser::new().base_url(&base).parse(&href.value());
parsed.unwrap_or(base)
}
/// Update the cached base element in response to binding or unbinding from
/// a tree.
pub fn bind_unbind(&self, tree_in_doc: bool) {
if !tree_in_doc |
if self.upcast::<Element>().has_attribute(&atom!("href")) {
let document = document_from_node(self);
document.refresh_base_element();
}
}
}
impl VirtualMethods for HTMLBaseElement {
fn super_type(&self) -> Option<&VirtualMethods> {
Some(self.upcast::<HTMLElement>() as &VirtualMethods)
}
fn attribute_mutated(&self, attr: &Attr, mutation: AttributeMutation) {
self.super_type().unwrap().attribute_mutated(attr, mutation);
if *attr.local_name() == atom!(href) {
document_from_node(self).refresh_base_element();
}
}
fn bind_to_tree(&self, tree_in_doc: bool) {
self.super_type().unwrap().bind_to_tree(tree_in_doc);
self.bind_unbind(tree_in_doc);
}
fn unbind_from_tree(&self, tree_in_doc: bool) {
self.super_type().unwrap().unbind_from_tree(tree_in_doc);
self.bind_unbind(tree_in_doc);
}
}
| {
return;
} | conditional_block |
table_of_contents.py | #!/usr/bin/env python3
import os
from wsgiref.handlers import CGIHandler
import orjson
import sys
sys.path.append("..")
import custom_functions
try:
from custom_functions import generate_toc_object
except ImportError:
from philologic.runtime import generate_toc_object
try:
from custom_functions import WebConfig
except ImportError:
from philologic.runtime import WebConfig
try:
from custom_functions import WSGIHandler
except ImportError:
from philologic.runtime import WSGIHandler
def table_of_contents(environ, start_response):
config = WebConfig(os.path.abspath(os.path.dirname(__file__)).replace("reports", ""))
request = WSGIHandler(environ, config)
headers = [("Content-type", "application/json; charset=UTF-8"), ("Access-Control-Allow-Origin", "*")]
start_response("200 OK", headers)
toc_object = generate_toc_object(request, config)
yield orjson.dumps(toc_object)
if __name__ == "__main__":
| CGIHandler().run(table_of_contents) | conditional_block |
|
table_of_contents.py | #!/usr/bin/env python3
import os
from wsgiref.handlers import CGIHandler
import orjson
import sys
sys.path.append("..")
import custom_functions
try:
from custom_functions import generate_toc_object
except ImportError:
from philologic.runtime import generate_toc_object
try:
from custom_functions import WebConfig
except ImportError:
from philologic.runtime import WebConfig
try:
from custom_functions import WSGIHandler
except ImportError:
from philologic.runtime import WSGIHandler
def table_of_contents(environ, start_response):
|
if __name__ == "__main__":
CGIHandler().run(table_of_contents)
| config = WebConfig(os.path.abspath(os.path.dirname(__file__)).replace("reports", ""))
request = WSGIHandler(environ, config)
headers = [("Content-type", "application/json; charset=UTF-8"), ("Access-Control-Allow-Origin", "*")]
start_response("200 OK", headers)
toc_object = generate_toc_object(request, config)
yield orjson.dumps(toc_object) | identifier_body |
table_of_contents.py | #!/usr/bin/env python3
import os
from wsgiref.handlers import CGIHandler
import orjson
import sys
sys.path.append("..")
import custom_functions
try:
from custom_functions import generate_toc_object
except ImportError:
from philologic.runtime import generate_toc_object
try:
from custom_functions import WebConfig
except ImportError:
from philologic.runtime import WebConfig
try:
from custom_functions import WSGIHandler
except ImportError:
from philologic.runtime import WSGIHandler
def | (environ, start_response):
config = WebConfig(os.path.abspath(os.path.dirname(__file__)).replace("reports", ""))
request = WSGIHandler(environ, config)
headers = [("Content-type", "application/json; charset=UTF-8"), ("Access-Control-Allow-Origin", "*")]
start_response("200 OK", headers)
toc_object = generate_toc_object(request, config)
yield orjson.dumps(toc_object)
if __name__ == "__main__":
CGIHandler().run(table_of_contents)
| table_of_contents | identifier_name |
table_of_contents.py | #!/usr/bin/env python3
import os
from wsgiref.handlers import CGIHandler
import orjson
import sys
sys.path.append("..")
import custom_functions
try:
from custom_functions import generate_toc_object
except ImportError:
from philologic.runtime import generate_toc_object
try:
from custom_functions import WebConfig
except ImportError:
from philologic.runtime import WebConfig
try:
from custom_functions import WSGIHandler
except ImportError:
from philologic.runtime import WSGIHandler
def table_of_contents(environ, start_response):
config = WebConfig(os.path.abspath(os.path.dirname(__file__)).replace("reports", ""))
request = WSGIHandler(environ, config)
headers = [("Content-type", "application/json; charset=UTF-8"), ("Access-Control-Allow-Origin", "*")]
start_response("200 OK", headers)
toc_object = generate_toc_object(request, config)
yield orjson.dumps(toc_object)
if __name__ == "__main__": | CGIHandler().run(table_of_contents) | random_line_split |
|
rot.rs | pub struct Rot {
pub s: f32,
pub c: f32
}
impl Rot {
pub fn new() -> Rot {
Rot {
s: 0.0,
c: 1.0
}
}
/// Initialize from an angle in radians
pub fn new_angle(angle: f32) -> Rot {
Rot {
s: angle.sin(),
c: angle.cos()
}
}
pub fn set(&mut self, angle: f32) {
self.s = angle.sin();
self.c = angle.cos();
}
/// Set to the identity rotation
pub fn set_identity(&mut self) {
self.s = 0.0;
self.c = 1.0;
}
/// Get the angle in radians
pub fn get_angle(&mut self) -> f32 {
self.s.atan2(self.c)
}
/// Get the x-axis
pub fn get_x_axis(&mut self) -> Vec2 {
Vec2::new(self.c, self.s)
}
/// Get the u axis
pub fn get_y_axis(&mut self) -> Vec2 {
Vec2::new(-self.s, self.c)
}
} | use super::Vec2;
/// Rotation
#[derive(Copy, Clone)] | random_line_split |
|
rot.rs | use super::Vec2;
/// Rotation
#[derive(Copy, Clone)]
pub struct | {
pub s: f32,
pub c: f32
}
impl Rot {
pub fn new() -> Rot {
Rot {
s: 0.0,
c: 1.0
}
}
/// Initialize from an angle in radians
pub fn new_angle(angle: f32) -> Rot {
Rot {
s: angle.sin(),
c: angle.cos()
}
}
pub fn set(&mut self, angle: f32) {
self.s = angle.sin();
self.c = angle.cos();
}
/// Set to the identity rotation
pub fn set_identity(&mut self) {
self.s = 0.0;
self.c = 1.0;
}
/// Get the angle in radians
pub fn get_angle(&mut self) -> f32 {
self.s.atan2(self.c)
}
/// Get the x-axis
pub fn get_x_axis(&mut self) -> Vec2 {
Vec2::new(self.c, self.s)
}
/// Get the u axis
pub fn get_y_axis(&mut self) -> Vec2 {
Vec2::new(-self.s, self.c)
}
}
| Rot | identifier_name |
check_with_sitemap_vpro.py | #!/usr/bin/env python3
import os
import re
import subprocess
import sys
import threading
import time
import urllib
from subprocess import Popen, PIPE
sys.path.append("..")
from check_with_sitemap import CheckWithSitemap
DEFAULT_JAVA_PATH = 'java'
class CheckWithSiteMapVpro(CheckWithSitemap):
"""
This specialization is customized for VPRO.
It can connect via JMX to VPRO's Mangolia CMS which contains the original pages, and request it to index missing pages
This wraps a command line client for jmx: https://github.com/jiaqi/jmxterm/
"""
def __init__(self, java_path: str = DEFAULT_JAVA_PATH):
super().__init__()
self.jmx_url = self.args.jmx_url
self.jmxterm_binary = self.args.jmxterm_binary
self.java_path = java_path
self._get_jmx_term_if_necessary()
if self.args.tunnel:
tunnel = SshTunnel(self.log)
tunnel.start()
def add_arguments(self):
super().add_arguments()
api = self.api
api.add_argument('--jmx_url', type=str, default=None, help='use JMX to trigger reindex. An url like "localhost:500" where this is tunneled to the magnolia backend server')
api.add_argument('--jmxterm_binary', type=str, default=None, help='location of jmxterm binary')
api.add_argument('--tunnel', action='store_true', default=False, help='set up jmx tunnel too')
def perform_add_to_api(self, not_in_api: list):
"""
Actually add to api
"""
if self.jmx_url:
self.jmxterm = [self.java_path, '-jar', self.jmxterm_binary, '--url', self.jmx_url, "-n", "-v", "silent"]
not_in_api = self._reindex_3voor12(not_in_api)
not_in_api = self._reindex_cinema_films(not_in_api)
not_in_api = self._reindex_cinema_person(not_in_api)
not_in_api = self._reindex_mids(not_in_api)
self._reindex_urls(not_in_api)
else:
self.log.info("No jmx_url configured, not trying to implicitly add to api via JMX")
def _reindex_mids(self, not_in_api: list) -> list:
urls_with_mid = list(filter(lambda m: m[0] is not None, map(self._find_mid, not_in_api)))
return self._reindex_ids(not_in_api, urls_with_mid, "nl.vpro.magnolia:name=IndexerMaintainerImpl", "reindexMediaObjects", 100, "media objects")
def _reindex_3voor12(self, not_in_api: list) -> list:
urls_with_uuids = list(filter(lambda m: m[0] is not None, map(self._find_update_uuid, not_in_api)))
return self._reindex_ids(not_in_api, urls_with_uuids, "nl.vpro.magnolia:name=DrieVoorTwaalfUpdateIndexer", "reindexUUIDs", 100, "3voor12 updates")
def _reindex_cinema_films(self, not_in_api: list) -> list:
cinema_ids = list(filter(lambda m: m[0] is not None, map(self._find_cinema_film_id, not_in_api)))
return self._reindex_ids(not_in_api, cinema_ids, "nl.vpro.magnolia:name=CinemaObjectIndexer", "reindex", 100, "cinema films")
def _reindex_cinema_person(self, not_in_api: list) -> list:
cinema_ids = list(filter(lambda m: m[0] is not None, map(self._find_cinema_person_uid, not_in_api)))
return self._reindex_ids(not_in_api, cinema_ids, "nl.vpro.magnolia:name=CinemaPersonIndexer", "reindex", 100, "cinema persons")
def _reindex_urls(self, not_in_api: list) -> None:
page_size = 20
self.log.info("Reindexing %d urls" % len(not_in_api))
for i in range(0, len(not_in_api), page_size ):
self._call_jmx_operation("nl.vpro.magnolia:name=IndexerMaintainerImpl", "reindexUrls", not_in_api[i: i + page_size ])
def _find_mid(self, url: str) -> list:
return self._find_by_regexp(".*?~(.*?)~.*", url)
def _find_update_uuid(self, url: str) -> list:
return self._find_by_regexp(".*?update~(.*?)~.*", url)
def _find_cinema_film_id(self, url: str) -> list:
return self._find_by_regexp(".*?film~(.*?)~.*", url)
def _find_cinema_person_uid(self, url: str) -> list:
return self._find_by_regexp(".*?persoon~(.*?)~.*", url)
@staticmethod
def _find_by_regexp(regex: str, url: str) -> list:
matcher = re.match(regex, url)
if matcher:
return [matcher.group(1), url]
else:
return [None, url]
def _reindex_ids(
self, not_in_api: list,
ids: list,
bean: str,
operation: str, page_size: int, name: str) -> list:
self.log.info("Reindexing %d %s" % (len(ids), name))
for i in range(0, len(ids), page_size):
self._call_jmx_operation(bean, operation, list(map(lambda m : m[0], ids[i: i + page_size])))
urls = list(map(lambda u: u[1], ids))
self.log.debug("Associated with %s" % str(urls))
return [e for e in not_in_api if e not in urls]
def _call_jmx_operation(self, bean: str, operation: str, sub_list: list):
p = Popen(self.jmxterm, stdin=PIPE, stdout=PIPE, encoding='utf-8')
input = "bean " + bean +"\nrun " + operation + " " + ",".join(sub_list)
self.log.info("input\n%s" % input)
out, error = p.communicate(input=input, timeout=100)
self.log.info("output\n%s" % out)
if error:
self.log.info("error\n%s" % error)
if "still busy" in out:
self.log.info("Jmx reports that still busy. Let's wait a bit then")
time.sleep(20)
def _get_jmx_term_if_necessary(self):
if self.jmx_url and not self.jmxterm_binary:
from_env = os.getenv('JMXTERM_BINARY')
if not from_env is None:
self.jmxterm_binary=from_env
else:
jmxtermversion = "1.0.2"
jmxterm = "jmxterm-" + jmxtermversion + "-uber.jar"
path = os.path.dirname(os.path.realpath(__file__))
self.jmxterm_binary = os.path.join(path, jmxterm)
if not os.path.exists(self.jmxterm_binary):
get_url = "https://github.com/jiaqi/jmxterm/releases/download/v" + jmxtermversion + "/" + jmxterm
self.log.info("Downloading %s -> %s" % (get_url, self.jmxterm_binary))
urllib.request.urlretrieve (get_url, self.jmxterm_binary)
class | (threading.Thread):
def __init__(self, log):
threading.Thread.__init__(self)
self.daemon = True # So that thread will exit when
# main non-daemon thread finishes
self.log = log
def run(self):
self.log.info("Setting up tunnel")
if subprocess.call([
'ssh', '-N', '-4',
'-L', '5000:localhost:5000',
'os2-magnolia-backend-prod-01'
]):
raise Exception ('ssh tunnel setup failed')
if __name__ == "__main__":
CheckWithSiteMapVpro().main()
| SshTunnel | identifier_name |
check_with_sitemap_vpro.py | #!/usr/bin/env python3
import os
import re
import subprocess
import sys
import threading
import time
import urllib
from subprocess import Popen, PIPE
sys.path.append("..")
from check_with_sitemap import CheckWithSitemap
DEFAULT_JAVA_PATH = 'java'
class CheckWithSiteMapVpro(CheckWithSitemap):
"""
This specialization is customized for VPRO.
It can connect via JMX to VPRO's Mangolia CMS which contains the original pages, and request it to index missing pages
This wraps a command line client for jmx: https://github.com/jiaqi/jmxterm/
"""
def __init__(self, java_path: str = DEFAULT_JAVA_PATH):
super().__init__()
self.jmx_url = self.args.jmx_url
self.jmxterm_binary = self.args.jmxterm_binary
self.java_path = java_path
self._get_jmx_term_if_necessary()
if self.args.tunnel:
tunnel = SshTunnel(self.log)
tunnel.start()
def add_arguments(self):
super().add_arguments()
api = self.api
api.add_argument('--jmx_url', type=str, default=None, help='use JMX to trigger reindex. An url like "localhost:500" where this is tunneled to the magnolia backend server')
api.add_argument('--jmxterm_binary', type=str, default=None, help='location of jmxterm binary')
api.add_argument('--tunnel', action='store_true', default=False, help='set up jmx tunnel too')
def perform_add_to_api(self, not_in_api: list):
"""
Actually add to api
"""
if self.jmx_url:
self.jmxterm = [self.java_path, '-jar', self.jmxterm_binary, '--url', self.jmx_url, "-n", "-v", "silent"]
not_in_api = self._reindex_3voor12(not_in_api)
not_in_api = self._reindex_cinema_films(not_in_api)
not_in_api = self._reindex_cinema_person(not_in_api)
not_in_api = self._reindex_mids(not_in_api)
self._reindex_urls(not_in_api)
else:
self.log.info("No jmx_url configured, not trying to implicitly add to api via JMX")
def _reindex_mids(self, not_in_api: list) -> list:
urls_with_mid = list(filter(lambda m: m[0] is not None, map(self._find_mid, not_in_api)))
return self._reindex_ids(not_in_api, urls_with_mid, "nl.vpro.magnolia:name=IndexerMaintainerImpl", "reindexMediaObjects", 100, "media objects")
def _reindex_3voor12(self, not_in_api: list) -> list:
urls_with_uuids = list(filter(lambda m: m[0] is not None, map(self._find_update_uuid, not_in_api)))
return self._reindex_ids(not_in_api, urls_with_uuids, "nl.vpro.magnolia:name=DrieVoorTwaalfUpdateIndexer", "reindexUUIDs", 100, "3voor12 updates")
def _reindex_cinema_films(self, not_in_api: list) -> list:
cinema_ids = list(filter(lambda m: m[0] is not None, map(self._find_cinema_film_id, not_in_api)))
return self._reindex_ids(not_in_api, cinema_ids, "nl.vpro.magnolia:name=CinemaObjectIndexer", "reindex", 100, "cinema films")
def _reindex_cinema_person(self, not_in_api: list) -> list:
cinema_ids = list(filter(lambda m: m[0] is not None, map(self._find_cinema_person_uid, not_in_api)))
return self._reindex_ids(not_in_api, cinema_ids, "nl.vpro.magnolia:name=CinemaPersonIndexer", "reindex", 100, "cinema persons")
def _reindex_urls(self, not_in_api: list) -> None:
|
def _find_mid(self, url: str) -> list:
return self._find_by_regexp(".*?~(.*?)~.*", url)
def _find_update_uuid(self, url: str) -> list:
return self._find_by_regexp(".*?update~(.*?)~.*", url)
def _find_cinema_film_id(self, url: str) -> list:
return self._find_by_regexp(".*?film~(.*?)~.*", url)
def _find_cinema_person_uid(self, url: str) -> list:
return self._find_by_regexp(".*?persoon~(.*?)~.*", url)
@staticmethod
def _find_by_regexp(regex: str, url: str) -> list:
matcher = re.match(regex, url)
if matcher:
return [matcher.group(1), url]
else:
return [None, url]
def _reindex_ids(
self, not_in_api: list,
ids: list,
bean: str,
operation: str, page_size: int, name: str) -> list:
self.log.info("Reindexing %d %s" % (len(ids), name))
for i in range(0, len(ids), page_size):
self._call_jmx_operation(bean, operation, list(map(lambda m : m[0], ids[i: i + page_size])))
urls = list(map(lambda u: u[1], ids))
self.log.debug("Associated with %s" % str(urls))
return [e for e in not_in_api if e not in urls]
def _call_jmx_operation(self, bean: str, operation: str, sub_list: list):
p = Popen(self.jmxterm, stdin=PIPE, stdout=PIPE, encoding='utf-8')
input = "bean " + bean +"\nrun " + operation + " " + ",".join(sub_list)
self.log.info("input\n%s" % input)
out, error = p.communicate(input=input, timeout=100)
self.log.info("output\n%s" % out)
if error:
self.log.info("error\n%s" % error)
if "still busy" in out:
self.log.info("Jmx reports that still busy. Let's wait a bit then")
time.sleep(20)
def _get_jmx_term_if_necessary(self):
if self.jmx_url and not self.jmxterm_binary:
from_env = os.getenv('JMXTERM_BINARY')
if not from_env is None:
self.jmxterm_binary=from_env
else:
jmxtermversion = "1.0.2"
jmxterm = "jmxterm-" + jmxtermversion + "-uber.jar"
path = os.path.dirname(os.path.realpath(__file__))
self.jmxterm_binary = os.path.join(path, jmxterm)
if not os.path.exists(self.jmxterm_binary):
get_url = "https://github.com/jiaqi/jmxterm/releases/download/v" + jmxtermversion + "/" + jmxterm
self.log.info("Downloading %s -> %s" % (get_url, self.jmxterm_binary))
urllib.request.urlretrieve (get_url, self.jmxterm_binary)
class SshTunnel(threading.Thread):
def __init__(self, log):
threading.Thread.__init__(self)
self.daemon = True # So that thread will exit when
# main non-daemon thread finishes
self.log = log
def run(self):
self.log.info("Setting up tunnel")
if subprocess.call([
'ssh', '-N', '-4',
'-L', '5000:localhost:5000',
'os2-magnolia-backend-prod-01'
]):
raise Exception ('ssh tunnel setup failed')
if __name__ == "__main__":
CheckWithSiteMapVpro().main()
| page_size = 20
self.log.info("Reindexing %d urls" % len(not_in_api))
for i in range(0, len(not_in_api), page_size ):
self._call_jmx_operation("nl.vpro.magnolia:name=IndexerMaintainerImpl", "reindexUrls", not_in_api[i: i + page_size ]) | identifier_body |
check_with_sitemap_vpro.py | #!/usr/bin/env python3
import os
import re
import subprocess
import sys
import threading
import time
import urllib
from subprocess import Popen, PIPE
sys.path.append("..")
from check_with_sitemap import CheckWithSitemap
DEFAULT_JAVA_PATH = 'java'
class CheckWithSiteMapVpro(CheckWithSitemap):
"""
This specialization is customized for VPRO.
It can connect via JMX to VPRO's Mangolia CMS which contains the original pages, and request it to index missing pages
This wraps a command line client for jmx: https://github.com/jiaqi/jmxterm/
"""
def __init__(self, java_path: str = DEFAULT_JAVA_PATH):
super().__init__()
self.jmx_url = self.args.jmx_url
self.jmxterm_binary = self.args.jmxterm_binary
self.java_path = java_path
self._get_jmx_term_if_necessary()
if self.args.tunnel:
tunnel = SshTunnel(self.log)
tunnel.start()
def add_arguments(self):
super().add_arguments()
api = self.api
api.add_argument('--jmx_url', type=str, default=None, help='use JMX to trigger reindex. An url like "localhost:500" where this is tunneled to the magnolia backend server')
api.add_argument('--jmxterm_binary', type=str, default=None, help='location of jmxterm binary')
api.add_argument('--tunnel', action='store_true', default=False, help='set up jmx tunnel too')
def perform_add_to_api(self, not_in_api: list):
"""
Actually add to api
"""
if self.jmx_url:
self.jmxterm = [self.java_path, '-jar', self.jmxterm_binary, '--url', self.jmx_url, "-n", "-v", "silent"]
not_in_api = self._reindex_3voor12(not_in_api)
not_in_api = self._reindex_cinema_films(not_in_api)
not_in_api = self._reindex_cinema_person(not_in_api)
not_in_api = self._reindex_mids(not_in_api)
self._reindex_urls(not_in_api)
else:
self.log.info("No jmx_url configured, not trying to implicitly add to api via JMX")
def _reindex_mids(self, not_in_api: list) -> list:
urls_with_mid = list(filter(lambda m: m[0] is not None, map(self._find_mid, not_in_api)))
return self._reindex_ids(not_in_api, urls_with_mid, "nl.vpro.magnolia:name=IndexerMaintainerImpl", "reindexMediaObjects", 100, "media objects")
def _reindex_3voor12(self, not_in_api: list) -> list:
urls_with_uuids = list(filter(lambda m: m[0] is not None, map(self._find_update_uuid, not_in_api)))
return self._reindex_ids(not_in_api, urls_with_uuids, "nl.vpro.magnolia:name=DrieVoorTwaalfUpdateIndexer", "reindexUUIDs", 100, "3voor12 updates")
def _reindex_cinema_films(self, not_in_api: list) -> list:
cinema_ids = list(filter(lambda m: m[0] is not None, map(self._find_cinema_film_id, not_in_api)))
return self._reindex_ids(not_in_api, cinema_ids, "nl.vpro.magnolia:name=CinemaObjectIndexer", "reindex", 100, "cinema films")
def _reindex_cinema_person(self, not_in_api: list) -> list:
cinema_ids = list(filter(lambda m: m[0] is not None, map(self._find_cinema_person_uid, not_in_api)))
return self._reindex_ids(not_in_api, cinema_ids, "nl.vpro.magnolia:name=CinemaPersonIndexer", "reindex", 100, "cinema persons")
def _reindex_urls(self, not_in_api: list) -> None:
page_size = 20
self.log.info("Reindexing %d urls" % len(not_in_api))
for i in range(0, len(not_in_api), page_size ):
self._call_jmx_operation("nl.vpro.magnolia:name=IndexerMaintainerImpl", "reindexUrls", not_in_api[i: i + page_size ])
def _find_mid(self, url: str) -> list:
return self._find_by_regexp(".*?~(.*?)~.*", url)
def _find_update_uuid(self, url: str) -> list:
return self._find_by_regexp(".*?update~(.*?)~.*", url)
def _find_cinema_film_id(self, url: str) -> list:
return self._find_by_regexp(".*?film~(.*?)~.*", url)
def _find_cinema_person_uid(self, url: str) -> list:
return self._find_by_regexp(".*?persoon~(.*?)~.*", url)
@staticmethod
def _find_by_regexp(regex: str, url: str) -> list:
matcher = re.match(regex, url)
if matcher:
return [matcher.group(1), url]
else:
return [None, url]
def _reindex_ids(
self, not_in_api: list,
ids: list,
bean: str,
operation: str, page_size: int, name: str) -> list:
self.log.info("Reindexing %d %s" % (len(ids), name))
for i in range(0, len(ids), page_size):
self._call_jmx_operation(bean, operation, list(map(lambda m : m[0], ids[i: i + page_size])))
urls = list(map(lambda u: u[1], ids))
self.log.debug("Associated with %s" % str(urls))
return [e for e in not_in_api if e not in urls]
def _call_jmx_operation(self, bean: str, operation: str, sub_list: list):
p = Popen(self.jmxterm, stdin=PIPE, stdout=PIPE, encoding='utf-8')
input = "bean " + bean +"\nrun " + operation + " " + ",".join(sub_list)
self.log.info("input\n%s" % input)
out, error = p.communicate(input=input, timeout=100)
self.log.info("output\n%s" % out)
if error:
self.log.info("error\n%s" % error)
if "still busy" in out:
self.log.info("Jmx reports that still busy. Let's wait a bit then")
time.sleep(20)
def _get_jmx_term_if_necessary(self):
if self.jmx_url and not self.jmxterm_binary:
from_env = os.getenv('JMXTERM_BINARY')
if not from_env is None:
self.jmxterm_binary=from_env
else:
jmxtermversion = "1.0.2"
jmxterm = "jmxterm-" + jmxtermversion + "-uber.jar"
path = os.path.dirname(os.path.realpath(__file__))
self.jmxterm_binary = os.path.join(path, jmxterm)
if not os.path.exists(self.jmxterm_binary):
get_url = "https://github.com/jiaqi/jmxterm/releases/download/v" + jmxtermversion + "/" + jmxterm
self.log.info("Downloading %s -> %s" % (get_url, self.jmxterm_binary))
urllib.request.urlretrieve (get_url, self.jmxterm_binary)
class SshTunnel(threading.Thread):
def __init__(self, log):
threading.Thread.__init__(self)
self.daemon = True # So that thread will exit when | self.log = log
def run(self):
self.log.info("Setting up tunnel")
if subprocess.call([
'ssh', '-N', '-4',
'-L', '5000:localhost:5000',
'os2-magnolia-backend-prod-01'
]):
raise Exception ('ssh tunnel setup failed')
if __name__ == "__main__":
CheckWithSiteMapVpro().main() | # main non-daemon thread finishes | random_line_split |
check_with_sitemap_vpro.py | #!/usr/bin/env python3
import os
import re
import subprocess
import sys
import threading
import time
import urllib
from subprocess import Popen, PIPE
sys.path.append("..")
from check_with_sitemap import CheckWithSitemap
DEFAULT_JAVA_PATH = 'java'
class CheckWithSiteMapVpro(CheckWithSitemap):
"""
This specialization is customized for VPRO.
It can connect via JMX to VPRO's Mangolia CMS which contains the original pages, and request it to index missing pages
This wraps a command line client for jmx: https://github.com/jiaqi/jmxterm/
"""
def __init__(self, java_path: str = DEFAULT_JAVA_PATH):
super().__init__()
self.jmx_url = self.args.jmx_url
self.jmxterm_binary = self.args.jmxterm_binary
self.java_path = java_path
self._get_jmx_term_if_necessary()
if self.args.tunnel:
tunnel = SshTunnel(self.log)
tunnel.start()
def add_arguments(self):
super().add_arguments()
api = self.api
api.add_argument('--jmx_url', type=str, default=None, help='use JMX to trigger reindex. An url like "localhost:500" where this is tunneled to the magnolia backend server')
api.add_argument('--jmxterm_binary', type=str, default=None, help='location of jmxterm binary')
api.add_argument('--tunnel', action='store_true', default=False, help='set up jmx tunnel too')
def perform_add_to_api(self, not_in_api: list):
"""
Actually add to api
"""
if self.jmx_url:
self.jmxterm = [self.java_path, '-jar', self.jmxterm_binary, '--url', self.jmx_url, "-n", "-v", "silent"]
not_in_api = self._reindex_3voor12(not_in_api)
not_in_api = self._reindex_cinema_films(not_in_api)
not_in_api = self._reindex_cinema_person(not_in_api)
not_in_api = self._reindex_mids(not_in_api)
self._reindex_urls(not_in_api)
else:
self.log.info("No jmx_url configured, not trying to implicitly add to api via JMX")
def _reindex_mids(self, not_in_api: list) -> list:
urls_with_mid = list(filter(lambda m: m[0] is not None, map(self._find_mid, not_in_api)))
return self._reindex_ids(not_in_api, urls_with_mid, "nl.vpro.magnolia:name=IndexerMaintainerImpl", "reindexMediaObjects", 100, "media objects")
def _reindex_3voor12(self, not_in_api: list) -> list:
urls_with_uuids = list(filter(lambda m: m[0] is not None, map(self._find_update_uuid, not_in_api)))
return self._reindex_ids(not_in_api, urls_with_uuids, "nl.vpro.magnolia:name=DrieVoorTwaalfUpdateIndexer", "reindexUUIDs", 100, "3voor12 updates")
def _reindex_cinema_films(self, not_in_api: list) -> list:
cinema_ids = list(filter(lambda m: m[0] is not None, map(self._find_cinema_film_id, not_in_api)))
return self._reindex_ids(not_in_api, cinema_ids, "nl.vpro.magnolia:name=CinemaObjectIndexer", "reindex", 100, "cinema films")
def _reindex_cinema_person(self, not_in_api: list) -> list:
cinema_ids = list(filter(lambda m: m[0] is not None, map(self._find_cinema_person_uid, not_in_api)))
return self._reindex_ids(not_in_api, cinema_ids, "nl.vpro.magnolia:name=CinemaPersonIndexer", "reindex", 100, "cinema persons")
def _reindex_urls(self, not_in_api: list) -> None:
page_size = 20
self.log.info("Reindexing %d urls" % len(not_in_api))
for i in range(0, len(not_in_api), page_size ):
self._call_jmx_operation("nl.vpro.magnolia:name=IndexerMaintainerImpl", "reindexUrls", not_in_api[i: i + page_size ])
def _find_mid(self, url: str) -> list:
return self._find_by_regexp(".*?~(.*?)~.*", url)
def _find_update_uuid(self, url: str) -> list:
return self._find_by_regexp(".*?update~(.*?)~.*", url)
def _find_cinema_film_id(self, url: str) -> list:
return self._find_by_regexp(".*?film~(.*?)~.*", url)
def _find_cinema_person_uid(self, url: str) -> list:
return self._find_by_regexp(".*?persoon~(.*?)~.*", url)
@staticmethod
def _find_by_regexp(regex: str, url: str) -> list:
matcher = re.match(regex, url)
if matcher:
return [matcher.group(1), url]
else:
return [None, url]
def _reindex_ids(
self, not_in_api: list,
ids: list,
bean: str,
operation: str, page_size: int, name: str) -> list:
self.log.info("Reindexing %d %s" % (len(ids), name))
for i in range(0, len(ids), page_size):
self._call_jmx_operation(bean, operation, list(map(lambda m : m[0], ids[i: i + page_size])))
urls = list(map(lambda u: u[1], ids))
self.log.debug("Associated with %s" % str(urls))
return [e for e in not_in_api if e not in urls]
def _call_jmx_operation(self, bean: str, operation: str, sub_list: list):
p = Popen(self.jmxterm, stdin=PIPE, stdout=PIPE, encoding='utf-8')
input = "bean " + bean +"\nrun " + operation + " " + ",".join(sub_list)
self.log.info("input\n%s" % input)
out, error = p.communicate(input=input, timeout=100)
self.log.info("output\n%s" % out)
if error:
self.log.info("error\n%s" % error)
if "still busy" in out:
|
def _get_jmx_term_if_necessary(self):
if self.jmx_url and not self.jmxterm_binary:
from_env = os.getenv('JMXTERM_BINARY')
if not from_env is None:
self.jmxterm_binary=from_env
else:
jmxtermversion = "1.0.2"
jmxterm = "jmxterm-" + jmxtermversion + "-uber.jar"
path = os.path.dirname(os.path.realpath(__file__))
self.jmxterm_binary = os.path.join(path, jmxterm)
if not os.path.exists(self.jmxterm_binary):
get_url = "https://github.com/jiaqi/jmxterm/releases/download/v" + jmxtermversion + "/" + jmxterm
self.log.info("Downloading %s -> %s" % (get_url, self.jmxterm_binary))
urllib.request.urlretrieve (get_url, self.jmxterm_binary)
class SshTunnel(threading.Thread):
def __init__(self, log):
threading.Thread.__init__(self)
self.daemon = True # So that thread will exit when
# main non-daemon thread finishes
self.log = log
def run(self):
self.log.info("Setting up tunnel")
if subprocess.call([
'ssh', '-N', '-4',
'-L', '5000:localhost:5000',
'os2-magnolia-backend-prod-01'
]):
raise Exception ('ssh tunnel setup failed')
if __name__ == "__main__":
CheckWithSiteMapVpro().main()
| self.log.info("Jmx reports that still busy. Let's wait a bit then")
time.sleep(20) | conditional_block |
personal.rs | , 2016 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::sync::Arc;
use std::str::FromStr;
use jsonrpc_core::{IoHandler, GenericIoHandler};
use util::{U256, Uint, Address};
use ethcore::account_provider::AccountProvider;
use v1::{PersonalClient, Personal};
use v1::tests::helpers::TestMinerService;
use ethcore::client::TestBlockChainClient;
use ethcore::transaction::{Action, Transaction};
struct PersonalTester {
accounts: Arc<AccountProvider>,
io: IoHandler,
miner: Arc<TestMinerService>,
// these unused fields are necessary to keep the data alive
// as the handler has only weak pointers.
_client: Arc<TestBlockChainClient>,
}
fn blockchain_client() -> Arc<TestBlockChainClient> {
let client = TestBlockChainClient::new();
Arc::new(client)
}
fn accounts_provider() -> Arc<AccountProvider> {
Arc::new(AccountProvider::transient_provider())
}
fn miner_service() -> Arc<TestMinerService> {
Arc::new(TestMinerService::default())
}
fn setup() -> PersonalTester {
let accounts = accounts_provider();
let client = blockchain_client();
let miner = miner_service();
let personal = PersonalClient::new(&accounts, &client, &miner, false);
let io = IoHandler::new();
io.add_delegate(personal.to_delegate());
let tester = PersonalTester {
accounts: accounts,
io: io,
miner: miner,
_client: client,
};
tester
}
#[test]
fn accounts() {
let tester = setup();
let address = tester.accounts.new_account("").unwrap();
let request = r#"{"jsonrpc": "2.0", "method": "personal_listAccounts", "params": [], "id": 1}"#;
let response = r#"{"jsonrpc":"2.0","result":[""#.to_owned() + &format!("0x{:?}", address) + r#""],"id":1}"#;
assert_eq!(tester.io.handle_request_sync(request), Some(response.to_owned()));
}
#[test]
fn new_account() {
let tester = setup();
let request = r#"{"jsonrpc": "2.0", "method": "personal_newAccount", "params": ["pass"], "id": 1}"#;
let res = tester.io.handle_request_sync(request);
let accounts = tester.accounts.accounts().unwrap();
assert_eq!(accounts.len(), 1);
let address = accounts[0];
let response = r#"{"jsonrpc":"2.0","result":""#.to_owned() + format!("0x{:?}", address).as_ref() + r#"","id":1}"#;
assert_eq!(res, Some(response));
}
#[test]
fn sign_and_send_transaction_with_invalid_password() {
let tester = setup();
let address = tester.accounts.new_account("password123").unwrap();
let request = r#"{
"jsonrpc": "2.0",
"method": "personal_signAndSendTransaction",
"params": [{
"from": ""#.to_owned() + format!("0x{:?}", address).as_ref() + r#"",
"to": "0xd46e8dd67c5d32be8058bb8eb970870f07244567",
"gas": "0x76c0",
"gasPrice": "0x9184e72a000",
"value": "0x9184e72a"
}, "password321"],
"id": 1
}"#;
let response = r#"{"jsonrpc":"2.0","error":{"code":-32021,"message":"Account password is invalid or account does not exist.","data":"SStore(InvalidPassword)"},"id":1}"#;
assert_eq!(tester.io.handle_request_sync(request.as_ref()), Some(response.into()));
}
#[test]
fn sign_and_send_transaction() {
let tester = setup();
let address = tester.accounts.new_account("password123").unwrap();
let request = r#"{
"jsonrpc": "2.0",
"method": "personal_signAndSendTransaction",
"params": [{
"from": ""#.to_owned() + format!("0x{:?}", address).as_ref() + r#"",
"to": "0xd46e8dd67c5d32be8058bb8eb970870f07244567",
"gas": "0x76c0",
"gasPrice": "0x9184e72a000",
"value": "0x9184e72a"
}, "password123"],
"id": 1
}"#;
let t = Transaction {
nonce: U256::zero(),
gas_price: U256::from(0x9184e72a000u64),
gas: U256::from(0x76c0),
action: Action::Call(Address::from_str("d46e8dd67c5d32be8058bb8eb970870f07244567").unwrap()),
value: U256::from(0x9184e72au64),
data: vec![]
};
tester.accounts.unlock_account_temporarily(address, "password123".into()).unwrap();
let signature = tester.accounts.sign(address, None, t.hash(None)).unwrap();
let t = t.with_signature(signature, None);
let response = r#"{"jsonrpc":"2.0","result":""#.to_owned() + format!("0x{:?}", t.hash()).as_ref() + r#"","id":1}"#;
assert_eq!(tester.io.handle_request_sync(request.as_ref()), Some(response));
tester.miner.last_nonces.write().insert(address.clone(), U256::zero());
let t = Transaction {
nonce: U256::one(),
gas_price: U256::from(0x9184e72a000u64),
gas: U256::from(0x76c0),
action: Action::Call(Address::from_str("d46e8dd67c5d32be8058bb8eb970870f07244567").unwrap()),
value: U256::from(0x9184e72au64),
data: vec![]
};
tester.accounts.unlock_account_temporarily(address, "password123".into()).unwrap();
let signature = tester.accounts.sign(address, None, t.hash(None)).unwrap();
let t = t.with_signature(signature, None);
let response = r#"{"jsonrpc":"2.0","result":""#.to_owned() + format!("0x{:?}", t.hash()).as_ref() + r#"","id":1}"#;
assert_eq!(tester.io.handle_request_sync(request.as_ref()), Some(response));
}
#[test]
fn should_unlock_account_temporarily() {
let tester = setup();
let address = tester.accounts.new_account("password123").unwrap();
let request = r#"{
"jsonrpc": "2.0",
"method": "personal_unlockAccount",
"params": [
""#.to_owned() + &format!("0x{:?}", address) + r#"",
"password123",
"0x100"
],
"id": 1
}"#;
let response = r#"{"jsonrpc":"2.0","result":true,"id":1}"#;
assert_eq!(tester.io.handle_request_sync(&request), Some(response.into()));
assert!(tester.accounts.sign(address, None, Default::default()).is_ok(), "Should unlock account.");
}
#[test]
fn should_unlock_account_permanently() {
let tester = setup();
let address = tester.accounts.new_account("password123").unwrap();
let request = r#"{
"jsonrpc": "2.0",
"method": "personal_unlockAccount",
"params": [
""#.to_owned() + &format!("0x{:?}", address) + r#"",
"password123",
null
],
"id": 1 | let response = r#"{"jsonrpc":"2.0","result":true,"id":1}"#;
assert_eq!(tester.io.handle_request_sync(&request), Some(response.into()));
assert!(tester.accounts | }"#; | random_line_split |
personal.rs | 2016 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::sync::Arc;
use std::str::FromStr;
use jsonrpc_core::{IoHandler, GenericIoHandler};
use util::{U256, Uint, Address};
use ethcore::account_provider::AccountProvider;
use v1::{PersonalClient, Personal};
use v1::tests::helpers::TestMinerService;
use ethcore::client::TestBlockChainClient;
use ethcore::transaction::{Action, Transaction};
struct PersonalTester {
accounts: Arc<AccountProvider>,
io: IoHandler,
miner: Arc<TestMinerService>,
// these unused fields are necessary to keep the data alive
// as the handler has only weak pointers.
_client: Arc<TestBlockChainClient>,
}
fn blockchain_client() -> Arc<TestBlockChainClient> {
let client = TestBlockChainClient::new();
Arc::new(client)
}
fn accounts_provider() -> Arc<AccountProvider> {
Arc::new(AccountProvider::transient_provider())
}
fn miner_service() -> Arc<TestMinerService> {
Arc::new(TestMinerService::default())
}
fn setup() -> PersonalTester {
let accounts = accounts_provider();
let client = blockchain_client();
let miner = miner_service();
let personal = PersonalClient::new(&accounts, &client, &miner, false);
let io = IoHandler::new();
io.add_delegate(personal.to_delegate());
let tester = PersonalTester {
accounts: accounts,
io: io,
miner: miner,
_client: client,
};
tester
}
#[test]
fn accounts() {
let tester = setup();
let address = tester.accounts.new_account("").unwrap();
let request = r#"{"jsonrpc": "2.0", "method": "personal_listAccounts", "params": [], "id": 1}"#;
let response = r#"{"jsonrpc":"2.0","result":[""#.to_owned() + &format!("0x{:?}", address) + r#""],"id":1}"#;
assert_eq!(tester.io.handle_request_sync(request), Some(response.to_owned()));
}
#[test]
fn new_account() {
let tester = setup();
let request = r#"{"jsonrpc": "2.0", "method": "personal_newAccount", "params": ["pass"], "id": 1}"#;
let res = tester.io.handle_request_sync(request);
let accounts = tester.accounts.accounts().unwrap();
assert_eq!(accounts.len(), 1);
let address = accounts[0];
let response = r#"{"jsonrpc":"2.0","result":""#.to_owned() + format!("0x{:?}", address).as_ref() + r#"","id":1}"#;
assert_eq!(res, Some(response));
}
#[test]
fn sign_and_send_transaction_with_invalid_password() {
let tester = setup();
let address = tester.accounts.new_account("password123").unwrap();
let request = r#"{
"jsonrpc": "2.0",
"method": "personal_signAndSendTransaction",
"params": [{
"from": ""#.to_owned() + format!("0x{:?}", address).as_ref() + r#"",
"to": "0xd46e8dd67c5d32be8058bb8eb970870f07244567",
"gas": "0x76c0",
"gasPrice": "0x9184e72a000",
"value": "0x9184e72a"
}, "password321"],
"id": 1
}"#;
let response = r#"{"jsonrpc":"2.0","error":{"code":-32021,"message":"Account password is invalid or account does not exist.","data":"SStore(InvalidPassword)"},"id":1}"#;
assert_eq!(tester.io.handle_request_sync(request.as_ref()), Some(response.into()));
}
#[test]
fn sign_and_send_transaction() {
let tester = setup();
let address = tester.accounts.new_account("password123").unwrap();
let request = r#"{
"jsonrpc": "2.0",
"method": "personal_signAndSendTransaction",
"params": [{
"from": ""#.to_owned() + format!("0x{:?}", address).as_ref() + r#"",
"to": "0xd46e8dd67c5d32be8058bb8eb970870f07244567",
"gas": "0x76c0",
"gasPrice": "0x9184e72a000",
"value": "0x9184e72a"
}, "password123"],
"id": 1
}"#;
let t = Transaction {
nonce: U256::zero(),
gas_price: U256::from(0x9184e72a000u64),
gas: U256::from(0x76c0),
action: Action::Call(Address::from_str("d46e8dd67c5d32be8058bb8eb970870f07244567").unwrap()),
value: U256::from(0x9184e72au64),
data: vec![]
};
tester.accounts.unlock_account_temporarily(address, "password123".into()).unwrap();
let signature = tester.accounts.sign(address, None, t.hash(None)).unwrap();
let t = t.with_signature(signature, None);
let response = r#"{"jsonrpc":"2.0","result":""#.to_owned() + format!("0x{:?}", t.hash()).as_ref() + r#"","id":1}"#;
assert_eq!(tester.io.handle_request_sync(request.as_ref()), Some(response));
tester.miner.last_nonces.write().insert(address.clone(), U256::zero());
let t = Transaction {
nonce: U256::one(),
gas_price: U256::from(0x9184e72a000u64),
gas: U256::from(0x76c0),
action: Action::Call(Address::from_str("d46e8dd67c5d32be8058bb8eb970870f07244567").unwrap()),
value: U256::from(0x9184e72au64),
data: vec![]
};
tester.accounts.unlock_account_temporarily(address, "password123".into()).unwrap();
let signature = tester.accounts.sign(address, None, t.hash(None)).unwrap();
let t = t.with_signature(signature, None);
let response = r#"{"jsonrpc":"2.0","result":""#.to_owned() + format!("0x{:?}", t.hash()).as_ref() + r#"","id":1}"#;
assert_eq!(tester.io.handle_request_sync(request.as_ref()), Some(response));
}
#[test]
fn | () {
let tester = setup();
let address = tester.accounts.new_account("password123").unwrap();
let request = r#"{
"jsonrpc": "2.0",
"method": "personal_unlockAccount",
"params": [
""#.to_owned() + &format!("0x{:?}", address) + r#"",
"password123",
"0x100"
],
"id": 1
}"#;
let response = r#"{"jsonrpc":"2.0","result":true,"id":1}"#;
assert_eq!(tester.io.handle_request_sync(&request), Some(response.into()));
assert!(tester.accounts.sign(address, None, Default::default()).is_ok(), "Should unlock account.");
}
#[test]
fn should_unlock_account_permanently() {
let tester = setup();
let address = tester.accounts.new_account("password123").unwrap();
let request = r#"{
"jsonrpc": "2.0",
"method": "personal_unlockAccount",
"params": [
""#.to_owned() + &format!("0x{:?}", address) + r#"",
"password123",
null
],
"id": 1
}"#;
let response = r#"{"jsonrpc":"2.0","result":true,"id":1}"#;
assert_eq!(tester.io.handle_request_sync(&request), Some(response.into()));
assert!(tester | should_unlock_account_temporarily | identifier_name |
index.js | = req.query.nonce;
let echostr = req.query.echostr;
let flag = Wechat.validateSignature(signature, timestamp, nonce);
if (flag) return res.send(echostr);
else return res.send({success: false});
};
/* 更新access token */
exports.updateAccessToken = function (req, res, next) {
Wechat.updateAccessToken();
res.send('updateAccessToken');
};
/* 接收来自微信的消息推送 */
exports.processWechatEvent = function (req, res, next) {
let content = req.body.xml;
console.log('Event received. Event: %s', JSON.stringify(content));
res.send('success');
if(!content) return;
try {
let fromOpenId = content.FromUserName[0],
toOpenId = content.ToUserName[0],
createTime = content.CreateTime[0],
event = content.Event ? content.Event[0] : "",
eventKey = content.EventKey ? content.EventKey[0] : "",
msgType = content.MsgType[0],
msgId = content.MsgID ? content.MsgID[0] : "",
status = content.Status ? content.Status[0] : "",
ticket = content.Ticket ? content.Ticket[0] : null;
if(msgType === 'event') {
const WechatEvent = req.app.db.models.WechatEvent;
let wechatEvent = new WechatEvent({ event: content });
wechatEvent.save((err) => {
if(err) console.error(err, err.stack);
handleEvent(req, res, fromOpenId, event, eventKey, msgId, status, ticket);
});
}
} catch(e) {
console.error(e, e.stack);
}
};
function handleEvent(req, res, openId, name, key, msgId, status, ticket) {
const Subscriber = req.app.db.models.Subscriber;
const InvitationCard = req.app.db.models.InvitationCard;
name = String(name).toLowerCase();
if(name === 'scan') {
if(ticket) {
InvitationCard.findOne({ qrTicket: ticket }).populate('invitationTask').exec((err, cardDoc) => {
if(err) return console.error(err);
if(cardDoc && cardDoc.invitationTask.status === 'OPEN') {
if(cardDoc.openId === openId) {
return Wechat.sendText(openId, "你不能扫描自己的任务卡");
} else {
Subscriber.findOne({ openId, subscribe: true }).exec((err, subscriberDoc) => {
if(err) return console.error(err);
if(subscriberDoc) return Wechat.sendText(openId, "你已经关注,不能被邀请");
});
}
}
});
}
}
if(name === 'click') {
if(key.indexOf('invitationTask') === 0) {
let taskId = key.split('-')[1];
require('../../service/InvitationCard').sendCard(openId, taskId);
}
if(key.indexOf('message') === 0) {
let replyQueueId = key.split('-')[1];
let ReplyQueue = req.app.db.models.ReplyQueue;
let ReplyQueueLog = req.app.db.models.ReplyQueueLog;
function sendMessages(messageGroup) {
async.eachSeries(messageGroup, function(message, callback) {
if(messag | if(log) {
let nextIndex = log.clickCount;
if(nextIndex > log.messageGroupsSnapshot.length-1) {
nextIndex = log.messageGroupsSnapshot.length-1;
} else {
ReplyQueueLog.findByIdAndUpdate(log._id, { clickCount: nextIndex + 1 }, { new: true }).exec();
}
sendMessages(log.messageGroupsSnapshot[nextIndex])
} else {
ReplyQueue.findById(replyQueueId).exec((err, replyQueue) => {
new ReplyQueueLog({ openId: openId, replyQueue: replyQueueId, messageGroupsSnapshot: replyQueue.messageGroups, clickCount: 1 }).save();
sendMessages(replyQueue.messageGroups[0]);
});
}
});
}
}
if(name === 'subscribe') {
const workflow = new EventEmitter();
let introducer = null;
let card = null;
// 检查扫码标记
workflow.on('checkTicket', () => {
debug('Event: checkTicket');
if(ticket) return workflow.emit('findNewUser'); // 在数据库中寻找该"新"用户
return workflow.emit('getSubscriberInfo'); // 获得"新"用户详情
});
// 在数据库中寻找该用户
workflow.on('findNewUser', () => {
debug('Event: findNewUser');
Subscriber.findOne({ openId }).exec((err, doc) => {
if(err) return workflow.emit('error', err); // 错误
if(!doc) return workflow.emit('getTaskAndCard'); // 查找邀请卡和任务配置
InvitationCard.findOne({ openId, qrTicket: ticket }).exec((err, selfScanedCardDoc) => {
if(err) return workflow.emit('error', err); // 错误
if(selfScanedCardDoc) return Wechat.sendText(openId, "你不能扫描自己的任务卡");
if(doc.subscribe) return Wechat.sendText(openId, "你已经关注,不能被邀请");
Wechat.sendText(openId, "你已经被邀请过,不能被再次邀请");
});
return workflow.emit('getSubscriberInfo'); // 获得"新"用户详情
});
});
workflow.on('getTaskAndCard', () => {
debug('Event: getTaskAndCard');
InvitationCard.findOne({ qrTicket: ticket }).populate('invitationTask').exec((err, cardDoc) => {
if(err) return workflow.emit('error', err); // 错误
if(!cardDoc) return workflow.emit('getSubscriberInfo'); // 没有此邀请卡,获得"新"用户详情
if(!cardDoc.invitationTask) return workflow.emit('getSubscriberInfo'); // 邀请卡任务不存在,获得"新"用户详情
if(cardDoc.invitationTask.status !== 'OPEN') return workflow.emit('getSubscriberInfo'); // 邀请卡任务已关闭,获得"新"用户详情
card = cardDoc.toJSON();
Subscriber.findOne({ openId: cardDoc.openId }).exec((err, introducerDoc) => {
if(err) return workflow.emit('error', err); // 错误
if(!introducerDoc) return workflow.emit('getSubscriberInfo'); // 没有此邀请人,获得"新"用户详情
introducer = introducerDoc.toJSON();
return workflow.emit('invitedCountPlus'); // 增加邀请量
});
});
});
workflow.on('invitedCountPlus', () => {
debug('Event: invitedCountPlus');
InvitationCard.findOneAndUpdate({ qrTicket: ticket }, { $inc: { invitedCount: 1 }}, function(err, doc) {
if(err) return workflow.emit('error', err); // 错误
console.log(`[WechatController] Add Invitation: ${ticket}`);
workflow.emit('getSubscriberInfo');
});
});
workflow.on('getSubscriberInfo', () => {
debug('Event: getSubscriberInfo');
Wechat.getSubscriberInfo(openId).then((info) => {
let newData = {
openId: info.openid,
groupId: info.groupid,
unionId: info.unionid,
subscribe: info.subscribe ? true : false,
subscribeTime: new Date(info.subscribe_time * 1000),
nickname: info.nickname,
remark: info.remark,
gender: info.sex,
headImgUrl: info.headimgurl,
city: info.city,
province: info.province,
country: info.country,
language: info.language
};
if(introducer) {
newData.introducer = introducer._id;
if(card.invitationTask.invitedFeedback) {
let invitedCount = card.invitedCount + 1;
let remainCount = card.invitationTask.threshold - invitedCount;
let invitedFeedback = card.invitationTask.invitedFeedback;
if(remainCount > 0) {
invitedFeedback = invitedFeedback.replace(/###/g, info.nickname)
invitedFeedback = invitedFeedback.replace(/#invited#/g, invitedCount + '');
invitedFeedback = invitedFeedback.replace(/#remain#/g, remainCount + '')
Wechat.sendText(introducer.openId, invitedFeedback);
}
}
}
Subscriber.findOneAndUpdate({ openId }, newData, { upsert: true }, function(err, doc){
if(err) return workflow.emit('error', err); // 错误
console.log(`[WechatController] New Subscriber: ${openId}`);
});
}).catch((err) => {
if(err) | e.type === 'text') {
return Wechat.sendText(openId, message.content).then((data) => {
setTimeout(callback, 1000);
}).catch(callback);
}
if(message.type === 'image') {
return Wechat.sendImage(openId, message.mediaId).then((data) => {
setTimeout(callback, 5000);
}).catch(callback);
}
}, function(err) {
err && console.log(err);
});
}
ReplyQueueLog.findOne({ openId, replyQueue: replyQueueId}).exec((err, log) => {
| identifier_body |
index.js | = req.query.nonce;
let echostr = req.query.echostr;
let flag = Wechat.validateSignature(signature, timestamp, nonce);
if (flag) return res.send(echostr);
else return res.send({success: false});
};
/* 更新access token */
exports.updateAccessToken = function (req, res, next) {
Wechat.updateAccessToken();
res.send('updateAccessToken');
};
/* 接收来自微信的消息推送 */
exports.processWechatEvent = function (req, res, next) {
let content = req.body.xml;
console.log('Event received. Event: %s', JSON.stringify(content));
res.send('success');
if(!content) return;
try {
let fromOpenId = content.FromUserName[0],
toOpenId = content.ToUserName[0],
createTime = content.CreateTime[0],
event = content.Event ? content.Event[0] : "",
eventKey = content.EventKey ? content.EventKey[0] : "",
msgType = content.MsgType[0],
msgId = content.MsgID ? content.MsgID[0] : "",
status = content.Status ? content.Status[0] : "",
ticket = content.Ticket ? content.Ticket[0] : null;
if(msgType === 'event') {
const WechatEvent = req.app.db.models.WechatEvent;
let wechatEvent = new WechatEvent({ event: content });
wechatEvent.save((err) => {
if(err) console.error(err, err.stack);
handleEvent(req, res, fromOpenId, event, eventKey, msgId, status, ticket);
});
}
} catch(e) {
console.error(e, e.stack);
}
};
function handleEvent(req, res, openId, name, key, msgId, status, ticket) {
const Subscriber = req.app.db.models.Subscriber;
const InvitationCard = req.app.db.models.InvitationCard;
name = String(name).toLowerCase();
if(name === 'scan') {
if(ticket) {
InvitationCard.findOne({ qrTicket: ticket }).populate('invitationTask').exec((err, cardDoc) => {
if(err) return console.error(err);
if(cardDoc && cardDoc.invitationTask.status === 'OPEN') {
if(cardDoc.openId === openId) {
return Wechat.sendText(openId, "你不能扫描自己的任务卡");
} else {
Subscriber.findOne({ openId, subscribe: true }).exec((err, subscriberDoc) => {
if(err) return console.error(err);
if(subscriberDoc) return Wechat.sendText(openId, "你已经关注,不能被邀请");
});
}
}
});
}
}
if(name === 'click') {
if(key.indexOf('invitationTask') === 0) {
let taskId = key.split('-')[1];
require('../../service/InvitationCard').sendCard(openId, taskId);
}
if(key.indexOf('message') === 0) {
let replyQueueId = key.split('-')[1];
let ReplyQueue = req.app.db.models.ReplyQueue;
let ReplyQueueLog = req.app.db.models.ReplyQueueLog;
function sendMessages(messageGroup) {
async.eachSeries(messageGroup, function(message, call | if(message.type === 'text') {
return Wechat.sendText(openId, message.content).then((data) => {
setTimeout(callback, 1000);
}).catch(callback);
}
if(message.type === 'image') {
return Wechat.sendImage(openId, message.mediaId).then((data) => {
setTimeout(callback, 5000);
}).catch(callback);
}
}, function(err) {
err && console.log(err);
});
}
ReplyQueueLog.findOne({ openId, replyQueue: replyQueueId}).exec((err, log) => {
if(log) {
let nextIndex = log.clickCount;
if(nextIndex > log.messageGroupsSnapshot.length-1) {
nextIndex = log.messageGroupsSnapshot.length-1;
} else {
ReplyQueueLog.findByIdAndUpdate(log._id, { clickCount: nextIndex + 1 }, { new: true }).exec();
}
sendMessages(log.messageGroupsSnapshot[nextIndex])
} else {
ReplyQueue.findById(replyQueueId).exec((err, replyQueue) => {
new ReplyQueueLog({ openId: openId, replyQueue: replyQueueId, messageGroupsSnapshot: replyQueue.messageGroups, clickCount: 1 }).save();
sendMessages(replyQueue.messageGroups[0]);
});
}
});
}
}
if(name === 'subscribe') {
const workflow = new EventEmitter();
let introducer = null;
let card = null;
// 检查扫码标记
workflow.on('checkTicket', () => {
debug('Event: checkTicket');
if(ticket) return workflow.emit('findNewUser'); // 在数据库中寻找该"新"用户
return workflow.emit('getSubscriberInfo'); // 获得"新"用户详情
});
// 在数据库中寻找该用户
workflow.on('findNewUser', () => {
debug('Event: findNewUser');
Subscriber.findOne({ openId }).exec((err, doc) => {
if(err) return workflow.emit('error', err); // 错误
if(!doc) return workflow.emit('getTaskAndCard'); // 查找邀请卡和任务配置
InvitationCard.findOne({ openId, qrTicket: ticket }).exec((err, selfScanedCardDoc) => {
if(err) return workflow.emit('error', err); // 错误
if(selfScanedCardDoc) return Wechat.sendText(openId, "你不能扫描自己的任务卡");
if(doc.subscribe) return Wechat.sendText(openId, "你已经关注,不能被邀请");
Wechat.sendText(openId, "你已经被邀请过,不能被再次邀请");
});
return workflow.emit('getSubscriberInfo'); // 获得"新"用户详情
});
});
workflow.on('getTaskAndCard', () => {
debug('Event: getTaskAndCard');
InvitationCard.findOne({ qrTicket: ticket }).populate('invitationTask').exec((err, cardDoc) => {
if(err) return workflow.emit('error', err); // 错误
if(!cardDoc) return workflow.emit('getSubscriberInfo'); // 没有此邀请卡,获得"新"用户详情
if(!cardDoc.invitationTask) return workflow.emit('getSubscriberInfo'); // 邀请卡任务不存在,获得"新"用户详情
if(cardDoc.invitationTask.status !== 'OPEN') return workflow.emit('getSubscriberInfo'); // 邀请卡任务已关闭,获得"新"用户详情
card = cardDoc.toJSON();
Subscriber.findOne({ openId: cardDoc.openId }).exec((err, introducerDoc) => {
if(err) return workflow.emit('error', err); // 错误
if(!introducerDoc) return workflow.emit('getSubscriberInfo'); // 没有此邀请人,获得"新"用户详情
introducer = introducerDoc.toJSON();
return workflow.emit('invitedCountPlus'); // 增加邀请量
});
});
});
workflow.on('invitedCountPlus', () => {
debug('Event: invitedCountPlus');
InvitationCard.findOneAndUpdate({ qrTicket: ticket }, { $inc: { invitedCount: 1 }}, function(err, doc) {
if(err) return workflow.emit('error', err); // 错误
console.log(`[WechatController] Add Invitation: ${ticket}`);
workflow.emit('getSubscriberInfo');
});
});
workflow.on('getSubscriberInfo', () => {
debug('Event: getSubscriberInfo');
Wechat.getSubscriberInfo(openId).then((info) => {
let newData = {
openId: info.openid,
groupId: info.groupid,
unionId: info.unionid,
subscribe: info.subscribe ? true : false,
subscribeTime: new Date(info.subscribe_time * 1000),
nickname: info.nickname,
remark: info.remark,
gender: info.sex,
headImgUrl: info.headimgurl,
city: info.city,
province: info.province,
country: info.country,
language: info.language
};
if(introducer) {
newData.introducer = introducer._id;
if(card.invitationTask.invitedFeedback) {
let invitedCount = card.invitedCount + 1;
let remainCount = card.invitationTask.threshold - invitedCount;
let invitedFeedback = card.invitationTask.invitedFeedback;
if(remainCount > 0) {
invitedFeedback = invitedFeedback.replace(/###/g, info.nickname)
invitedFeedback = invitedFeedback.replace(/#invited#/g, invitedCount + '');
invitedFeedback = invitedFeedback.replace(/#remain#/g, remainCount + '')
Wechat.sendText(introducer.openId, invitedFeedback);
}
}
}
Subscriber.findOneAndUpdate({ openId }, newData, { upsert: true }, function(err, doc){
if(err) return workflow.emit('error', err); // 错误
console.log(`[WechatController] New Subscriber: ${openId}`);
});
}).catch((err) => {
if(err) return workflow | back) {
| identifier_name |
index.js | = req.query.nonce;
let echostr = req.query.echostr;
let flag = Wechat.validateSignature(signature, timestamp, nonce);
if (flag) return res.send(echostr);
else return res.send({success: false});
};
/* 更新access token */
exports.updateAccessToken = function (req, res, next) {
Wechat.updateAccessToken();
res.send('updateAccessToken');
};
/* 接收来自微信的消息推送 */
exports.processWechatEvent = function (req, res, next) {
let content = req.body.xml;
console.log('Event received. Event: %s', JSON.stringify(content));
res.send('success');
if(!content) return;
try {
let fromOpenId = content.FromUserName[0],
toOpenId = content.ToUserName[0],
createTime = content.CreateTime[0],
event = content.Event ? content.Event[0] : "",
eventKey = content.EventKey ? content.EventKey[0] : "",
msgType = content.MsgType[0],
msgId = content.MsgID ? content.MsgID[0] : "",
status = content.Status ? content.Status[0] : "",
ticket = content.Ticket ? content.Ticket[0] : null;
if(msgType === 'event') {
const WechatEvent = req.app.db.models.WechatEvent;
let wechatEvent = new WechatEvent({ event: content });
wechatEvent.save((err) => {
if(err) console.error(err, err.stack);
handleEvent(req, res, fromOpenId, event, eventKey, msgId, status, ticket);
});
}
} catch(e) {
console.error(e, e.stack);
}
};
function handleEvent(req, res, openId, name, key, msgId, status, ticket) {
const Subscriber = req.app.db.models.Subscriber;
const InvitationCard = req.app.db.models.InvitationCard;
name = String(name).toLowerCase();
if(name === 'scan') {
if(ticket) {
InvitationCard.findOne({ qrTicket: ticket }).populate('invitationTask').exec((err, cardDoc) => {
if(err) return console.error(err);
if(cardDoc && cardDoc.invitationTask.status === 'OPEN') {
if(cardDoc.openId === openId) {
return Wechat.sendText(openId, "你不能扫描自己的任务卡");
} else {
Subscriber.findOne({ openId, subscribe: true }).exec((err, subscriberDoc) => {
if(err) return console.error(err);
if(subscriberDoc) return Wechat.sendText(openId, "你已经关注,不能被邀请");
});
}
}
});
}
}
if(name === 'click') {
if(key.indexOf('invitationTask') === 0) {
let taskId = key.split('-')[1];
require('../../service/InvitationCard').sendCard(openId, taskId);
}
if(key.indexOf('message') === 0) {
let replyQueueId = key.split('-')[1];
let ReplyQueue = req.app.db.models.ReplyQueue;
let ReplyQueueLog = req.app.db.models.ReplyQueueLog;
function sendMessages(messageGroup) {
async.eachSeries(messageGroup, function(message, callback) {
if(message.type === 'text') {
return Wechat.sendText(openId, message.content).then((data) => {
setTimeout(callback, 1000);
}).catch(callback);
}
if(message.type === 'image') {
return Wechat.sendImage(openId, message.mediaId).then((data) => {
setTimeout(callback, 5000);
}).catch(callback);
}
}, function(err) {
err && console.log(err);
});
}
ReplyQueueLog.findOne({ openId, replyQueue: replyQueueId}).exec((err, log) => {
if(log) {
let nextIndex = log.clickCount;
if(nextIndex > log.messageGroupsSnapshot.length-1) {
nextIndex = log.messageGroupsSnapshot.length-1;
} else {
ReplyQueueLog.findByIdAndUpdate(log._id, { clickCount: nextIndex + 1 }, { new: true }).exec();
}
sendMessages(log.messageGroupsSnapshot[nextIndex])
} else {
ReplyQueue.findById(replyQueueId).exec((err, replyQueue) => {
new ReplyQueueLog({ openId: openId, replyQueue: replyQueueId, messageGroupsSnapshot: replyQueue.messageGroups, clickCount: 1 }).save();
sendMessages(replyQueue.messageGroups[0]);
});
}
});
}
}
if(name === 'subscribe') {
const workflow = new EventEmitter();
let introducer = null;
let card = null; | });
return workflow.emit('getSubscriberInfo'); // 获得"新"用户详情
});
});
workflow.on('getTaskAndCard', () => {
debug('Event: getTaskAndCard');
InvitationCard.findOne({ qrTicket: ticket }).populate('invitationTask').exec((err, cardDoc) => {
if(err) return workflow.emit('error', err); // 错误
if(!cardDoc) return workflow.emit('getSubscriberInfo'); // 没有此邀请卡,获得"新"用户详情
if(!cardDoc.invitationTask) return workflow.emit('getSubscriberInfo'); // 邀请卡任务不存在,获得"新"用户详情
if(cardDoc.invitationTask.status !== 'OPEN') return workflow.emit('getSubscriberInfo'); // 邀请卡任务已关闭,获得"新"用户详情
card = cardDoc.toJSON();
Subscriber.findOne({ openId: cardDoc.openId }).exec((err, introducerDoc) => {
if(err) return workflow.emit('error', err); // 错误
if(!introducerDoc) return workflow.emit('getSubscriberInfo'); // 没有此邀请人,获得"新"用户详情
introducer = introducerDoc.toJSON();
return workflow.emit('invitedCountPlus'); // 增加邀请量
});
});
});
workflow.on('invitedCountPlus', () => {
debug('Event: invitedCountPlus');
InvitationCard.findOneAndUpdate({ qrTicket: ticket }, { $inc: { invitedCount: 1 }}, function(err, doc) {
if(err) return workflow.emit('error', err); // 错误
console.log(`[WechatController] Add Invitation: ${ticket}`);
workflow.emit('getSubscriberInfo');
});
});
workflow.on('getSubscriberInfo', () => {
debug('Event: getSubscriberInfo');
Wechat.getSubscriberInfo(openId).then((info) => {
let newData = {
openId: info.openid,
groupId: info.groupid,
unionId: info.unionid,
subscribe: info.subscribe ? true : false,
subscribeTime: new Date(info.subscribe_time * 1000),
nickname: info.nickname,
remark: info.remark,
gender: info.sex,
headImgUrl: info.headimgurl,
city: info.city,
province: info.province,
country: info.country,
language: info.language
};
if(introducer) {
newData.introducer = introducer._id;
if(card.invitationTask.invitedFeedback) {
let invitedCount = card.invitedCount + 1;
let remainCount = card.invitationTask.threshold - invitedCount;
let invitedFeedback = card.invitationTask.invitedFeedback;
if(remainCount > 0) {
invitedFeedback = invitedFeedback.replace(/###/g, info.nickname)
invitedFeedback = invitedFeedback.replace(/#invited#/g, invitedCount + '');
invitedFeedback = invitedFeedback.replace(/#remain#/g, remainCount + '')
Wechat.sendText(introducer.openId, invitedFeedback);
}
}
}
Subscriber.findOneAndUpdate({ openId }, newData, { upsert: true }, function(err, doc){
if(err) return workflow.emit('error', err); // 错误
console.log(`[WechatController] New Subscriber: ${openId}`);
});
}).catch((err) => {
if(err) return workflow.emit |
// 检查扫码标记
workflow.on('checkTicket', () => {
debug('Event: checkTicket');
if(ticket) return workflow.emit('findNewUser'); // 在数据库中寻找该"新"用户
return workflow.emit('getSubscriberInfo'); // 获得"新"用户详情
});
// 在数据库中寻找该用户
workflow.on('findNewUser', () => {
debug('Event: findNewUser');
Subscriber.findOne({ openId }).exec((err, doc) => {
if(err) return workflow.emit('error', err); // 错误
if(!doc) return workflow.emit('getTaskAndCard'); // 查找邀请卡和任务配置
InvitationCard.findOne({ openId, qrTicket: ticket }).exec((err, selfScanedCardDoc) => {
if(err) return workflow.emit('error', err); // 错误
if(selfScanedCardDoc) return Wechat.sendText(openId, "你不能扫描自己的任务卡");
if(doc.subscribe) return Wechat.sendText(openId, "你已经关注,不能被邀请");
Wechat.sendText(openId, "你已经被邀请过,不能被再次邀请"); | conditional_block |
index.js | nonce = req.query.nonce;
let echostr = req.query.echostr;
let flag = Wechat.validateSignature(signature, timestamp, nonce);
if (flag) return res.send(echostr);
else return res.send({success: false});
};
/* 更新access token */
exports.updateAccessToken = function (req, res, next) {
Wechat.updateAccessToken();
res.send('updateAccessToken');
};
/* 接收来自微信的消息推送 */
exports.processWechatEvent = function (req, res, next) {
let content = req.body.xml;
console.log('Event received. Event: %s', JSON.stringify(content));
res.send('success');
if(!content) return;
try {
let fromOpenId = content.FromUserName[0],
toOpenId = content.ToUserName[0],
createTime = content.CreateTime[0],
event = content.Event ? content.Event[0] : "",
eventKey = content.EventKey ? content.EventKey[0] : "",
msgType = content.MsgType[0],
msgId = content.MsgID ? content.MsgID[0] : "",
status = content.Status ? content.Status[0] : "",
ticket = content.Ticket ? content.Ticket[0] : null;
if(msgType === 'event') {
const WechatEvent = req.app.db.models.WechatEvent;
let wechatEvent = new WechatEvent({ event: content });
wechatEvent.save((err) => {
if(err) console.error(err, err.stack);
handleEvent(req, res, fromOpenId, event, eventKey, msgId, status, ticket);
});
}
} catch(e) {
console.error(e, e.stack);
}
};
function handleEvent(req, res, openId, name, key, msgId, status, ticket) {
const Subscriber = req.app.db.models.Subscriber;
const InvitationCard = req.app.db.models.InvitationCard;
name = String(name).toLowerCase();
if(name === 'scan') {
if(ticket) {
InvitationCard.findOne({ qrTicket: ticket }).populate('invitationTask').exec((err, cardDoc) => { | } else {
Subscriber.findOne({ openId, subscribe: true }).exec((err, subscriberDoc) => {
if(err) return console.error(err);
if(subscriberDoc) return Wechat.sendText(openId, "你已经关注,不能被邀请");
});
}
}
});
}
}
if(name === 'click') {
if(key.indexOf('invitationTask') === 0) {
let taskId = key.split('-')[1];
require('../../service/InvitationCard').sendCard(openId, taskId);
}
if(key.indexOf('message') === 0) {
let replyQueueId = key.split('-')[1];
let ReplyQueue = req.app.db.models.ReplyQueue;
let ReplyQueueLog = req.app.db.models.ReplyQueueLog;
function sendMessages(messageGroup) {
async.eachSeries(messageGroup, function(message, callback) {
if(message.type === 'text') {
return Wechat.sendText(openId, message.content).then((data) => {
setTimeout(callback, 1000);
}).catch(callback);
}
if(message.type === 'image') {
return Wechat.sendImage(openId, message.mediaId).then((data) => {
setTimeout(callback, 5000);
}).catch(callback);
}
}, function(err) {
err && console.log(err);
});
}
ReplyQueueLog.findOne({ openId, replyQueue: replyQueueId}).exec((err, log) => {
if(log) {
let nextIndex = log.clickCount;
if(nextIndex > log.messageGroupsSnapshot.length-1) {
nextIndex = log.messageGroupsSnapshot.length-1;
} else {
ReplyQueueLog.findByIdAndUpdate(log._id, { clickCount: nextIndex + 1 }, { new: true }).exec();
}
sendMessages(log.messageGroupsSnapshot[nextIndex])
} else {
ReplyQueue.findById(replyQueueId).exec((err, replyQueue) => {
new ReplyQueueLog({ openId: openId, replyQueue: replyQueueId, messageGroupsSnapshot: replyQueue.messageGroups, clickCount: 1 }).save();
sendMessages(replyQueue.messageGroups[0]);
});
}
});
}
}
if(name === 'subscribe') {
const workflow = new EventEmitter();
let introducer = null;
let card = null;
// 检查扫码标记
workflow.on('checkTicket', () => {
debug('Event: checkTicket');
if(ticket) return workflow.emit('findNewUser'); // 在数据库中寻找该"新"用户
return workflow.emit('getSubscriberInfo'); // 获得"新"用户详情
});
// 在数据库中寻找该用户
workflow.on('findNewUser', () => {
debug('Event: findNewUser');
Subscriber.findOne({ openId }).exec((err, doc) => {
if(err) return workflow.emit('error', err); // 错误
if(!doc) return workflow.emit('getTaskAndCard'); // 查找邀请卡和任务配置
InvitationCard.findOne({ openId, qrTicket: ticket }).exec((err, selfScanedCardDoc) => {
if(err) return workflow.emit('error', err); // 错误
if(selfScanedCardDoc) return Wechat.sendText(openId, "你不能扫描自己的任务卡");
if(doc.subscribe) return Wechat.sendText(openId, "你已经关注,不能被邀请");
Wechat.sendText(openId, "你已经被邀请过,不能被再次邀请");
});
return workflow.emit('getSubscriberInfo'); // 获得"新"用户详情
});
});
workflow.on('getTaskAndCard', () => {
debug('Event: getTaskAndCard');
InvitationCard.findOne({ qrTicket: ticket }).populate('invitationTask').exec((err, cardDoc) => {
if(err) return workflow.emit('error', err); // 错误
if(!cardDoc) return workflow.emit('getSubscriberInfo'); // 没有此邀请卡,获得"新"用户详情
if(!cardDoc.invitationTask) return workflow.emit('getSubscriberInfo'); // 邀请卡任务不存在,获得"新"用户详情
if(cardDoc.invitationTask.status !== 'OPEN') return workflow.emit('getSubscriberInfo'); // 邀请卡任务已关闭,获得"新"用户详情
card = cardDoc.toJSON();
Subscriber.findOne({ openId: cardDoc.openId }).exec((err, introducerDoc) => {
if(err) return workflow.emit('error', err); // 错误
if(!introducerDoc) return workflow.emit('getSubscriberInfo'); // 没有此邀请人,获得"新"用户详情
introducer = introducerDoc.toJSON();
return workflow.emit('invitedCountPlus'); // 增加邀请量
});
});
});
workflow.on('invitedCountPlus', () => {
debug('Event: invitedCountPlus');
InvitationCard.findOneAndUpdate({ qrTicket: ticket }, { $inc: { invitedCount: 1 }}, function(err, doc) {
if(err) return workflow.emit('error', err); // 错误
console.log(`[WechatController] Add Invitation: ${ticket}`);
workflow.emit('getSubscriberInfo');
});
});
workflow.on('getSubscriberInfo', () => {
debug('Event: getSubscriberInfo');
Wechat.getSubscriberInfo(openId).then((info) => {
let newData = {
openId: info.openid,
groupId: info.groupid,
unionId: info.unionid,
subscribe: info.subscribe ? true : false,
subscribeTime: new Date(info.subscribe_time * 1000),
nickname: info.nickname,
remark: info.remark,
gender: info.sex,
headImgUrl: info.headimgurl,
city: info.city,
province: info.province,
country: info.country,
language: info.language
};
if(introducer) {
newData.introducer = introducer._id;
if(card.invitationTask.invitedFeedback) {
let invitedCount = card.invitedCount + 1;
let remainCount = card.invitationTask.threshold - invitedCount;
let invitedFeedback = card.invitationTask.invitedFeedback;
if(remainCount > 0) {
invitedFeedback = invitedFeedback.replace(/###/g, info.nickname)
invitedFeedback = invitedFeedback.replace(/#invited#/g, invitedCount + '');
invitedFeedback = invitedFeedback.replace(/#remain#/g, remainCount + '')
Wechat.sendText(introducer.openId, invitedFeedback);
}
}
}
Subscriber.findOneAndUpdate({ openId }, newData, { upsert: true }, function(err, doc){
if(err) return workflow.emit('error', err); // 错误
console.log(`[WechatController] New Subscriber: ${openId}`);
});
}).catch((err) => {
if(err) return workflow.emit | if(err) return console.error(err);
if(cardDoc && cardDoc.invitationTask.status === 'OPEN') {
if(cardDoc.openId === openId) {
return Wechat.sendText(openId, "你不能扫描自己的任务卡"); | random_line_split |
thread_share.rs | //thread_share.rs
//Copyright 2015 David Huddle
use std::thread;
use std::sync::{Arc,mpsc};
extern crate time;
/// takes a vector and modifies in on a different thread
pub fn do_amazing_things(data:Vec<i32>)->Vec<i32> |
/// Takes a vec and breaks it up to do calculations
pub fn do_calc(data: Vec<i32>)->Vec<i32>{
let mut package = vec![data];
let start = time::precise_time_ns();
for _ in 0..2 {
package = break_vec(package);
}
let stop = time::precise_time_ns();
println!("split time: {}", stop - start);
let count = package.len();
let (tx, rx) = mpsc::channel();
for vec in package {
let tx = tx.clone();
let data_t = vec;
thread::spawn(move || {
let mut ret = Vec::new();
for x in data_t {
ret.push(x * 7);
}
tx.send(ret);
});
}
let mut ret:Vec<i32> = Vec::new();
for _ in 0..count {
let mut source = rx.recv().ok().expect("Could not receive answer");
ret.append(&mut source);
}
ret
}
/// Takes the vectors inside a vector and splits them in half
/// No checking is done to miss rounding errors
fn break_vec(data: Vec<Vec<i32>>)->Vec<Vec<i32>>{
let mut ret: Vec<Vec<i32>> = Vec::new();
for mut vec in data {
let size = vec.len()/2;
let vec1 = vec.split_off(size);
ret.push(vec);
ret.push(vec1);
}
ret
}
/// Takes some data makes four copies and modifies each copy in a thread
///
///# Examples
pub fn shared_data_example(){
//never do it this way thread::sleep is there to ensure all thread complete
//clearly not a good idea
let numbers: Vec<_> = (5..15i32).collect();
let shared_numbers = Arc::new(numbers);
for thread_no in 0..3 {
println!("threadno: {}", thread_no);
let child_numbers = shared_numbers.clone();
thread::spawn(move || {
let local_numbers = &child_numbers[..];
// Work with the local numbers
for x in local_numbers {
let x = x+1;
println!("threadno: {} mod data: {}", thread_no, x);
}
});
}
thread::sleep_ms(1000);
}
/// Simple example of multi provider single consumer
pub fn simple_mpsc()->Vec<i32>{
let (tx, rx) = mpsc::channel();
for _ in 0..10 {
let tx = tx.clone();
thread::spawn(move || {
let answer = 42;
tx.send(answer);
});
}
let mut ret:Vec<i32> = Vec::new();
for _ in 0..10 {
ret.push(rx.recv().ok().expect("Could not receive answer"));
}
ret
}
/// Simple example of spawning a thread and rejoining
/// Also handles a panic during thread execution for graceful recovery
pub fn thread_handle(){
use std::thread;
let handle = thread::spawn(move || {
panic!("oops!");
});
let result = handle.join();
assert!(result.is_err());
}
| {
let (tx, rx) = mpsc::channel();
let tx = tx.clone();
thread::spawn(move || {
let mut ret = Vec::new();
for x in data {
ret.push(x * 7);
}
tx.send(ret);
});
rx.recv().ok().expect("Could not receive answer")
} | identifier_body |
thread_share.rs | //thread_share.rs
//Copyright 2015 David Huddle
use std::thread;
use std::sync::{Arc,mpsc};
extern crate time;
/// takes a vector and modifies in on a different thread
pub fn do_amazing_things(data:Vec<i32>)->Vec<i32>{
let (tx, rx) = mpsc::channel();
let tx = tx.clone();
thread::spawn(move || {
let mut ret = Vec::new();
for x in data {
ret.push(x * 7);
}
tx.send(ret);
});
rx.recv().ok().expect("Could not receive answer")
}
/// Takes a vec and breaks it up to do calculations
pub fn do_calc(data: Vec<i32>)->Vec<i32>{
let mut package = vec![data];
let start = time::precise_time_ns();
for _ in 0..2 {
package = break_vec(package);
}
let stop = time::precise_time_ns();
println!("split time: {}", stop - start);
let count = package.len();
let (tx, rx) = mpsc::channel();
for vec in package {
let tx = tx.clone();
let data_t = vec;
thread::spawn(move || {
let mut ret = Vec::new();
for x in data_t {
ret.push(x * 7);
}
tx.send(ret);
});
}
let mut ret:Vec<i32> = Vec::new();
for _ in 0..count {
let mut source = rx.recv().ok().expect("Could not receive answer");
ret.append(&mut source);
}
ret
}
/// Takes the vectors inside a vector and splits them in half
/// No checking is done to miss rounding errors
fn break_vec(data: Vec<Vec<i32>>)->Vec<Vec<i32>>{
let mut ret: Vec<Vec<i32>> = Vec::new();
for mut vec in data {
let size = vec.len()/2;
let vec1 = vec.split_off(size);
ret.push(vec);
ret.push(vec1);
}
ret
}
/// Takes some data makes four copies and modifies each copy in a thread
///
///# Examples
pub fn shared_data_example(){
//never do it this way thread::sleep is there to ensure all thread complete
//clearly not a good idea
let numbers: Vec<_> = (5..15i32).collect();
let shared_numbers = Arc::new(numbers);
for thread_no in 0..3 {
println!("threadno: {}", thread_no);
let child_numbers = shared_numbers.clone();
thread::spawn(move || {
let local_numbers = &child_numbers[..];
// Work with the local numbers
for x in local_numbers {
let x = x+1;
println!("threadno: {} mod data: {}", thread_no, x);
}
});
}
thread::sleep_ms(1000); | for _ in 0..10 {
let tx = tx.clone();
thread::spawn(move || {
let answer = 42;
tx.send(answer);
});
}
let mut ret:Vec<i32> = Vec::new();
for _ in 0..10 {
ret.push(rx.recv().ok().expect("Could not receive answer"));
}
ret
}
/// Simple example of spawning a thread and rejoining
/// Also handles a panic during thread execution for graceful recovery
pub fn thread_handle(){
use std::thread;
let handle = thread::spawn(move || {
panic!("oops!");
});
let result = handle.join();
assert!(result.is_err());
} | }
/// Simple example of multi provider single consumer
pub fn simple_mpsc()->Vec<i32>{
let (tx, rx) = mpsc::channel(); | random_line_split |
thread_share.rs | //thread_share.rs
//Copyright 2015 David Huddle
use std::thread;
use std::sync::{Arc,mpsc};
extern crate time;
/// takes a vector and modifies in on a different thread
pub fn do_amazing_things(data:Vec<i32>)->Vec<i32>{
let (tx, rx) = mpsc::channel();
let tx = tx.clone();
thread::spawn(move || {
let mut ret = Vec::new();
for x in data {
ret.push(x * 7);
}
tx.send(ret);
});
rx.recv().ok().expect("Could not receive answer")
}
/// Takes a vec and breaks it up to do calculations
pub fn do_calc(data: Vec<i32>)->Vec<i32>{
let mut package = vec![data];
let start = time::precise_time_ns();
for _ in 0..2 {
package = break_vec(package);
}
let stop = time::precise_time_ns();
println!("split time: {}", stop - start);
let count = package.len();
let (tx, rx) = mpsc::channel();
for vec in package {
let tx = tx.clone();
let data_t = vec;
thread::spawn(move || {
let mut ret = Vec::new();
for x in data_t {
ret.push(x * 7);
}
tx.send(ret);
});
}
let mut ret:Vec<i32> = Vec::new();
for _ in 0..count {
let mut source = rx.recv().ok().expect("Could not receive answer");
ret.append(&mut source);
}
ret
}
/// Takes the vectors inside a vector and splits them in half
/// No checking is done to miss rounding errors
fn break_vec(data: Vec<Vec<i32>>)->Vec<Vec<i32>>{
let mut ret: Vec<Vec<i32>> = Vec::new();
for mut vec in data {
let size = vec.len()/2;
let vec1 = vec.split_off(size);
ret.push(vec);
ret.push(vec1);
}
ret
}
/// Takes some data makes four copies and modifies each copy in a thread
///
///# Examples
pub fn shared_data_example(){
//never do it this way thread::sleep is there to ensure all thread complete
//clearly not a good idea
let numbers: Vec<_> = (5..15i32).collect();
let shared_numbers = Arc::new(numbers);
for thread_no in 0..3 {
println!("threadno: {}", thread_no);
let child_numbers = shared_numbers.clone();
thread::spawn(move || {
let local_numbers = &child_numbers[..];
// Work with the local numbers
for x in local_numbers {
let x = x+1;
println!("threadno: {} mod data: {}", thread_no, x);
}
});
}
thread::sleep_ms(1000);
}
/// Simple example of multi provider single consumer
pub fn simple_mpsc()->Vec<i32>{
let (tx, rx) = mpsc::channel();
for _ in 0..10 {
let tx = tx.clone();
thread::spawn(move || {
let answer = 42;
tx.send(answer);
});
}
let mut ret:Vec<i32> = Vec::new();
for _ in 0..10 {
ret.push(rx.recv().ok().expect("Could not receive answer"));
}
ret
}
/// Simple example of spawning a thread and rejoining
/// Also handles a panic during thread execution for graceful recovery
pub fn | (){
use std::thread;
let handle = thread::spawn(move || {
panic!("oops!");
});
let result = handle.join();
assert!(result.is_err());
}
| thread_handle | identifier_name |
mem_replace.rs | // run-rustfix
#![allow(unused_imports)]
#![warn(
clippy::all,
clippy::style,
clippy::mem_replace_option_with_none,
clippy::mem_replace_with_default
)]
use std::collections::{BTreeMap, BTreeSet, BinaryHeap, HashMap, HashSet, LinkedList, VecDeque};
use std::mem;
fn replace_option_with_none() {
let mut an_option = Some(1);
let _ = mem::replace(&mut an_option, None);
let an_option = &mut Some(1);
let _ = mem::replace(an_option, None);
}
fn | () {
let mut s = String::from("foo");
let _ = std::mem::replace(&mut s, String::default());
let s = &mut String::from("foo");
let _ = std::mem::replace(s, String::default());
let _ = std::mem::replace(s, Default::default());
let mut v = vec![123];
let _ = std::mem::replace(&mut v, Vec::default());
let _ = std::mem::replace(&mut v, Default::default());
let _ = std::mem::replace(&mut v, Vec::new());
let _ = std::mem::replace(&mut v, vec![]);
let mut hash_map: HashMap<i32, i32> = HashMap::new();
let _ = std::mem::replace(&mut hash_map, HashMap::new());
let mut btree_map: BTreeMap<i32, i32> = BTreeMap::new();
let _ = std::mem::replace(&mut btree_map, BTreeMap::new());
let mut vd: VecDeque<i32> = VecDeque::new();
let _ = std::mem::replace(&mut vd, VecDeque::new());
let mut hash_set: HashSet<&str> = HashSet::new();
let _ = std::mem::replace(&mut hash_set, HashSet::new());
let mut btree_set: BTreeSet<&str> = BTreeSet::new();
let _ = std::mem::replace(&mut btree_set, BTreeSet::new());
let mut list: LinkedList<i32> = LinkedList::new();
let _ = std::mem::replace(&mut list, LinkedList::new());
let mut binary_heap: BinaryHeap<i32> = BinaryHeap::new();
let _ = std::mem::replace(&mut binary_heap, BinaryHeap::new());
let mut tuple = (vec![1, 2], BinaryHeap::<i32>::new());
let _ = std::mem::replace(&mut tuple, (vec![], BinaryHeap::new()));
let mut refstr = "hello";
let _ = std::mem::replace(&mut refstr, "");
let mut slice: &[i32] = &[1, 2, 3];
let _ = std::mem::replace(&mut slice, &[]);
}
// lint is disabled for primitives because in this case `take`
// has no clear benefit over `replace` and sometimes is harder to read
fn dont_lint_primitive() {
let mut pbool = true;
let _ = std::mem::replace(&mut pbool, false);
let mut pint = 5;
let _ = std::mem::replace(&mut pint, 0);
}
fn main() {
replace_option_with_none();
replace_with_default();
dont_lint_primitive();
}
| replace_with_default | identifier_name |
mem_replace.rs | // run-rustfix
#![allow(unused_imports)]
#![warn(
clippy::all,
clippy::style,
clippy::mem_replace_option_with_none,
clippy::mem_replace_with_default
)]
use std::collections::{BTreeMap, BTreeSet, BinaryHeap, HashMap, HashSet, LinkedList, VecDeque};
use std::mem;
fn replace_option_with_none() |
fn replace_with_default() {
let mut s = String::from("foo");
let _ = std::mem::replace(&mut s, String::default());
let s = &mut String::from("foo");
let _ = std::mem::replace(s, String::default());
let _ = std::mem::replace(s, Default::default());
let mut v = vec![123];
let _ = std::mem::replace(&mut v, Vec::default());
let _ = std::mem::replace(&mut v, Default::default());
let _ = std::mem::replace(&mut v, Vec::new());
let _ = std::mem::replace(&mut v, vec![]);
let mut hash_map: HashMap<i32, i32> = HashMap::new();
let _ = std::mem::replace(&mut hash_map, HashMap::new());
let mut btree_map: BTreeMap<i32, i32> = BTreeMap::new();
let _ = std::mem::replace(&mut btree_map, BTreeMap::new());
let mut vd: VecDeque<i32> = VecDeque::new();
let _ = std::mem::replace(&mut vd, VecDeque::new());
let mut hash_set: HashSet<&str> = HashSet::new();
let _ = std::mem::replace(&mut hash_set, HashSet::new());
let mut btree_set: BTreeSet<&str> = BTreeSet::new();
let _ = std::mem::replace(&mut btree_set, BTreeSet::new());
let mut list: LinkedList<i32> = LinkedList::new();
let _ = std::mem::replace(&mut list, LinkedList::new());
let mut binary_heap: BinaryHeap<i32> = BinaryHeap::new();
let _ = std::mem::replace(&mut binary_heap, BinaryHeap::new());
let mut tuple = (vec![1, 2], BinaryHeap::<i32>::new());
let _ = std::mem::replace(&mut tuple, (vec![], BinaryHeap::new()));
let mut refstr = "hello";
let _ = std::mem::replace(&mut refstr, "");
let mut slice: &[i32] = &[1, 2, 3];
let _ = std::mem::replace(&mut slice, &[]);
}
// lint is disabled for primitives because in this case `take`
// has no clear benefit over `replace` and sometimes is harder to read
fn dont_lint_primitive() {
let mut pbool = true;
let _ = std::mem::replace(&mut pbool, false);
let mut pint = 5;
let _ = std::mem::replace(&mut pint, 0);
}
fn main() {
replace_option_with_none();
replace_with_default();
dont_lint_primitive();
}
| {
let mut an_option = Some(1);
let _ = mem::replace(&mut an_option, None);
let an_option = &mut Some(1);
let _ = mem::replace(an_option, None);
} | identifier_body |
mem_replace.rs | // run-rustfix
#![allow(unused_imports)]
#![warn(
clippy::all,
clippy::style,
clippy::mem_replace_option_with_none,
clippy::mem_replace_with_default
)]
use std::collections::{BTreeMap, BTreeSet, BinaryHeap, HashMap, HashSet, LinkedList, VecDeque};
use std::mem;
fn replace_option_with_none() { |
fn replace_with_default() {
let mut s = String::from("foo");
let _ = std::mem::replace(&mut s, String::default());
let s = &mut String::from("foo");
let _ = std::mem::replace(s, String::default());
let _ = std::mem::replace(s, Default::default());
let mut v = vec![123];
let _ = std::mem::replace(&mut v, Vec::default());
let _ = std::mem::replace(&mut v, Default::default());
let _ = std::mem::replace(&mut v, Vec::new());
let _ = std::mem::replace(&mut v, vec![]);
let mut hash_map: HashMap<i32, i32> = HashMap::new();
let _ = std::mem::replace(&mut hash_map, HashMap::new());
let mut btree_map: BTreeMap<i32, i32> = BTreeMap::new();
let _ = std::mem::replace(&mut btree_map, BTreeMap::new());
let mut vd: VecDeque<i32> = VecDeque::new();
let _ = std::mem::replace(&mut vd, VecDeque::new());
let mut hash_set: HashSet<&str> = HashSet::new();
let _ = std::mem::replace(&mut hash_set, HashSet::new());
let mut btree_set: BTreeSet<&str> = BTreeSet::new();
let _ = std::mem::replace(&mut btree_set, BTreeSet::new());
let mut list: LinkedList<i32> = LinkedList::new();
let _ = std::mem::replace(&mut list, LinkedList::new());
let mut binary_heap: BinaryHeap<i32> = BinaryHeap::new();
let _ = std::mem::replace(&mut binary_heap, BinaryHeap::new());
let mut tuple = (vec![1, 2], BinaryHeap::<i32>::new());
let _ = std::mem::replace(&mut tuple, (vec![], BinaryHeap::new()));
let mut refstr = "hello";
let _ = std::mem::replace(&mut refstr, "");
let mut slice: &[i32] = &[1, 2, 3];
let _ = std::mem::replace(&mut slice, &[]);
}
// lint is disabled for primitives because in this case `take`
// has no clear benefit over `replace` and sometimes is harder to read
fn dont_lint_primitive() {
let mut pbool = true;
let _ = std::mem::replace(&mut pbool, false);
let mut pint = 5;
let _ = std::mem::replace(&mut pint, 0);
}
fn main() {
replace_option_with_none();
replace_with_default();
dont_lint_primitive();
} | let mut an_option = Some(1);
let _ = mem::replace(&mut an_option, None);
let an_option = &mut Some(1);
let _ = mem::replace(an_option, None);
} | random_line_split |
lib.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! This module contains shared types and messages for use by devtools/script.
//! The traits are here instead of in script so that the devtools crate can be
//! modified independently of the rest of Servo.
#![crate_name = "style_traits"]
#![crate_type = "rlib"]
#![feature(custom_derive)]
#![feature(plugin)]
#![plugin(serde_macros)]
#![plugin(plugins)]
#![deny(unsafe_code)]
#[macro_use] | extern crate cssparser;
extern crate euclid;
extern crate rustc_serialize;
extern crate serde;
extern crate util;
#[macro_use]
pub mod values;
pub mod viewport;
use cssparser::{Parser, SourcePosition};
pub trait ParseErrorReporter {
fn report_error(&self, input: &mut Parser, position: SourcePosition, message: &str);
fn clone(&self) -> Box<ParseErrorReporter + Send + Sync>;
} | random_line_split |
|
graph.rs | / self.links.len() as f32;
let mut v = 0.0;
for link in &self.links {
let p1 = self.nodes[link.from as usize].pos;
let p2 = self.nodes[link.to as usize].pos;
v += (Self::pos_distance(&p1, &p2) - mean).powi(2);
}
let variance = ((v as f32) / (self.links.len() as f32)).sqrt();
(mean, variance)
}
*/
pub fn get_node_degree(&self, id: ID) -> u32 {
self.get_neighbors(id).len() as u32
}
pub fn get_avg_node_degree(&self) -> f32 {
let mut n = 0;
for id in 0..self.node_count {
n += self.get_node_degree(id as u32);
}
(n as f32) / (self.node_count as f32)
}
pub fn get_mean_clustering_coefficient(&self) -> f32 {
let mut cc = 0.0f32;
for id in 0..self.node_count {
cc += self.get_local_clustering_coefficient(id as u32);
}
cc / (self.node_count as f32)
}
// Get neighbor count mean and variance
pub fn get_mean_link_count(&self) -> (f32, f32) {
let mut degrees = Vec::new();
let mut v = 0.0;
let mut c = 0;
let len = self.node_count as u32;
// calculate mean
for id in 0..len {
let degree = self.get_node_degree(id);
c += degree;
degrees.push(degree);
}
// calculate variance
let mean = c as f32 / len as f32;
for degree in degrees {
v += (degree as f32 - mean).powi(2);
}
let variance = ((v as f32) / (len as f32)).sqrt();
(mean, variance)
}
/*
pub fn link_distances(&self) -> (f32, f32, f32) {
let mut d2_min = infinity;
let mut d2_max = -infinity;
let mut d2_sum = 0.0;
for link in &self.links {
let to = self.nodes[link.to].gpos;
let from = self.nodes[link.from].gpos;
let d2 = from * to;
if d2 < d2_min {
d2_min = d2;
}
if d2 > d2_max {
d2_max = d2;
}
d2_sum += d2;
}
(d2_min.sqrt(), d2_mean.sqrt(), d2_max.sqrt())
}
//linear mapping
pub fn adjust_link_quality(&mut self, min: f32, max: f32) {
for link in &mut self.links {
let from = self.nodes[link.from as usize].pos;
let to = self.nodes[link.to as usize].pos;
let distance = Self::pos_distance(&from, &to);
if distance <= min {
link.quality = u16::MIN;
} else if distance >= max {
link.quality = u16::MAX;
} else {
link.quality = (u16::MAX as f32 * (distance - min) / (max - min)) as u16;
}
}
}
*/
pub fn has_link(&self, from: ID, to: ID) -> bool {
if let Some(_) = self.link_idx(from, to) {
true
} else {
false
}
}
fn has_any_link(&self, from: ID, to: ID) -> bool {
self.has_link(from, to) || self.has_link(to, from)
}
/*
* Calcualte connections between neighbors of a given node
* divided by maximim possible connections between those neihgbors.
* Method by Watts and Strogatz.
*/
pub fn get_local_clustering_coefficient(&self, id: ID) -> f32 {
//TODO: also count the connections from neighbors to node?
let ns = self.get_neighbors(id);
if ns.len() <= 1 {
0.0
} else {
// count number of connections between neighbors
let mut k = 0;
for a in ns {
for b in ns {
if a.to != b.to {
k += self.has_link(a.to, b.to) as u32;
}
}
}
(k as f32) / (ns.len() * (ns.len() - 1)) as f32
}
}
fn del_link(&mut self, a: ID, b: ID) {
self.del_links(&vec![a, b]);
}
pub fn del_links(&mut self, links: &Vec<ID>) {
if (links.len() % 2) != 0 {
panic!("del_links: Uneven elements for link list");
}
fn any(link: &Link, a: ID, b: ID) -> bool {
(link.from == a && link.to == b) || (link.from == b || link.to == a)
}
self.links.retain(|link| {
for s in links.chunks(2) {
if any(&link, s[0], s[1]) {
return false;
}
}
true
});
}
pub fn is_bidirectional(&self) -> bool {
for link in &self.links {
if !self.has_link(link.to, link.from) {
return false;
}
}
true
}
pub fn is_valid(&self) -> bool {
let len = self.node_count as ID;
for (i, link) in self.links.iter().enumerate() {
if link.to >= len || link.from >= len {
return false;
}
if i > 0 {
let prev = &self.links[i-1];
// check for order and duplicate links
if !(link.from > prev.from || (link.from == prev.from && link.to > prev.to)) {
return false;
}
}
}
true
}
pub fn remove_node(&mut self, id: ID) {
if self.node_count == 0 {
return;
}
self.node_count -= 1;
// adjust index
for link in &mut self.links {
if link.to > id {
link.to -= 1;
}
if link.from > id {
link.from -= 1;
}
}
// remove links
vec_filter(&mut self.links, |ref link| link.from != id && link.to != id);
// sort
self.links.sort_unstable_by(|a, b| a.cmp(b.from, b.to));
}
pub fn remove_nodes(&mut self, nodes: &Vec<ID>) {
for id in nodes {
self.remove_node(*id);
}
}
fn link_idx(&self, from: ID, to: ID) -> Option<usize> {
match self.links.binary_search_by(|link| link.cmp(from, to)) {
Ok(idx) => {
Some(idx)
},
Err(_) => {
None
}
}
}
pub fn get_link(&self, from: ID, to: ID) -> Option<Link> {
if let Some(idx) = self.link_idx(from, to) {
Some(self.links[idx].clone())
} else {
None
}
}
pub fn add_link(&mut self, from: ID, to: ID, tq: u16) {
if from != to {
match self.links.binary_search_by(|link| link.cmp(from, to)) {
Ok(idx) => {
self.links[idx].quality = tq;
},
Err(idx) => {
self.links.insert(idx, Link::new(from, to, tq));
}
}
}
}
pub fn get_neighbors(&self, id: ID) -> &[Link] {
match self.links.binary_search_by(|link| link.from.cmp(&id)) {
Ok(idx) => {
let mut start = idx;
let mut end = idx;
for i in (0..idx).rev() {
if self.links[i].from == id {
start = i;
}
}
for i in idx..self.links.len() {
if self.links[i].from == id {
end = i;
}
}
&self.links[start..end+1]
},
Err(idx) => {
&self.links[0..0]
}
}
}
pub fn clear_links(&mut self) {
self.links.clear();
}
pub fn is_directed(&self) -> bool {
for link in &self.links {
if self.link_idx(link.to, link.from).is_none() | {
return false;
} | conditional_block |
|
graph.rs | }
cc / (self.node_count as f32)
}
// Get neighbor count mean and variance
pub fn get_mean_link_count(&self) -> (f32, f32) {
let mut degrees = Vec::new();
let mut v = 0.0;
let mut c = 0;
let len = self.node_count as u32;
// calculate mean
for id in 0..len {
let degree = self.get_node_degree(id);
c += degree;
degrees.push(degree);
}
// calculate variance
let mean = c as f32 / len as f32;
for degree in degrees {
v += (degree as f32 - mean).powi(2);
}
let variance = ((v as f32) / (len as f32)).sqrt();
(mean, variance)
}
/*
pub fn link_distances(&self) -> (f32, f32, f32) {
let mut d2_min = infinity;
let mut d2_max = -infinity;
let mut d2_sum = 0.0;
for link in &self.links {
let to = self.nodes[link.to].gpos;
let from = self.nodes[link.from].gpos;
let d2 = from * to;
if d2 < d2_min {
d2_min = d2;
}
if d2 > d2_max {
d2_max = d2;
}
d2_sum += d2;
}
(d2_min.sqrt(), d2_mean.sqrt(), d2_max.sqrt())
}
//linear mapping
pub fn adjust_link_quality(&mut self, min: f32, max: f32) {
for link in &mut self.links {
let from = self.nodes[link.from as usize].pos;
let to = self.nodes[link.to as usize].pos;
let distance = Self::pos_distance(&from, &to);
if distance <= min {
link.quality = u16::MIN;
} else if distance >= max {
link.quality = u16::MAX;
} else {
link.quality = (u16::MAX as f32 * (distance - min) / (max - min)) as u16;
}
}
}
*/
pub fn has_link(&self, from: ID, to: ID) -> bool {
if let Some(_) = self.link_idx(from, to) {
true
} else {
false
}
}
fn has_any_link(&self, from: ID, to: ID) -> bool {
self.has_link(from, to) || self.has_link(to, from)
}
/*
* Calcualte connections between neighbors of a given node
* divided by maximim possible connections between those neihgbors.
* Method by Watts and Strogatz.
*/
pub fn get_local_clustering_coefficient(&self, id: ID) -> f32 {
//TODO: also count the connections from neighbors to node?
let ns = self.get_neighbors(id);
if ns.len() <= 1 {
0.0
} else {
// count number of connections between neighbors
let mut k = 0;
for a in ns {
for b in ns {
if a.to != b.to {
k += self.has_link(a.to, b.to) as u32;
}
}
}
(k as f32) / (ns.len() * (ns.len() - 1)) as f32
}
}
fn del_link(&mut self, a: ID, b: ID) {
self.del_links(&vec![a, b]);
}
pub fn del_links(&mut self, links: &Vec<ID>) {
if (links.len() % 2) != 0 {
panic!("del_links: Uneven elements for link list");
}
fn any(link: &Link, a: ID, b: ID) -> bool {
(link.from == a && link.to == b) || (link.from == b || link.to == a)
}
self.links.retain(|link| {
for s in links.chunks(2) {
if any(&link, s[0], s[1]) {
return false;
}
}
true
});
}
pub fn is_bidirectional(&self) -> bool {
for link in &self.links {
if !self.has_link(link.to, link.from) {
return false;
}
}
true
}
pub fn is_valid(&self) -> bool {
let len = self.node_count as ID;
for (i, link) in self.links.iter().enumerate() {
if link.to >= len || link.from >= len {
return false;
}
if i > 0 {
let prev = &self.links[i-1];
// check for order and duplicate links
if !(link.from > prev.from || (link.from == prev.from && link.to > prev.to)) {
return false;
}
}
}
true
}
pub fn remove_node(&mut self, id: ID) {
if self.node_count == 0 {
return;
}
self.node_count -= 1;
// adjust index
for link in &mut self.links {
if link.to > id {
link.to -= 1;
}
if link.from > id {
link.from -= 1;
}
}
// remove links
vec_filter(&mut self.links, |ref link| link.from != id && link.to != id);
// sort
self.links.sort_unstable_by(|a, b| a.cmp(b.from, b.to));
}
pub fn remove_nodes(&mut self, nodes: &Vec<ID>) {
for id in nodes {
self.remove_node(*id);
}
}
fn link_idx(&self, from: ID, to: ID) -> Option<usize> {
match self.links.binary_search_by(|link| link.cmp(from, to)) {
Ok(idx) => {
Some(idx)
},
Err(_) => {
None
}
}
}
pub fn get_link(&self, from: ID, to: ID) -> Option<Link> {
if let Some(idx) = self.link_idx(from, to) {
Some(self.links[idx].clone())
} else {
None
}
}
pub fn add_link(&mut self, from: ID, to: ID, tq: u16) {
if from != to {
match self.links.binary_search_by(|link| link.cmp(from, to)) {
Ok(idx) => {
self.links[idx].quality = tq;
},
Err(idx) => {
self.links.insert(idx, Link::new(from, to, tq));
}
}
}
}
pub fn get_neighbors(&self, id: ID) -> &[Link] {
match self.links.binary_search_by(|link| link.from.cmp(&id)) {
Ok(idx) => {
let mut start = idx;
let mut end = idx;
for i in (0..idx).rev() {
if self.links[i].from == id {
start = i;
}
}
for i in idx..self.links.len() {
if self.links[i].from == id {
end = i;
}
}
&self.links[start..end+1]
},
Err(idx) => {
&self.links[0..0]
}
}
}
pub fn clear_links(&mut self) {
self.links.clear();
}
pub fn is_directed(&self) -> bool {
for link in &self.links {
if self.link_idx(link.to, link.from).is_none() {
return false;
}
}
true
}
pub fn remove_unconnected_nodes(&mut self) {
let mut remove = Vec::new();
for id in 0..self.node_count as ID {
if self.get_node_degree(id) == 0 {
remove.push(id);
}
}
self.remove_nodes(&remove);
}
pub fn node_count(&self) -> usize {
self.node_count
}
pub fn link_count(&self) -> usize {
self.links.len()
}
pub fn link_cost_sum(&self) -> f32 {
self.links.iter().fold(0.0, |acc, link| acc + link.cost() as f32)
}
pub fn spanning_tree(&self) -> Graph {
Self::minimum_spanning_tree_impl(&self.links, self.node_count)
}
pub fn minimum_spanning_tree(&self) -> Graph {
// sort links by cost
let links = {
let mut links = self.links.clone();
links.sort_unstable_by(|a, b| a.cost().cmp(&b.cost()));
links
};
Self::minimum_spanning_tree_impl(&links, self.node_count)
}
// Implementation of the Kruskal minimum spanning tree algorithm
fn | minimum_spanning_tree_impl | identifier_name |
|
graph.rs | degree = self.get_node_degree(id);
c += degree;
degrees.push(degree);
}
// calculate variance
let mean = c as f32 / len as f32;
for degree in degrees {
v += (degree as f32 - mean).powi(2);
}
let variance = ((v as f32) / (len as f32)).sqrt();
(mean, variance)
}
/*
pub fn link_distances(&self) -> (f32, f32, f32) {
let mut d2_min = infinity;
let mut d2_max = -infinity;
let mut d2_sum = 0.0;
for link in &self.links {
let to = self.nodes[link.to].gpos;
let from = self.nodes[link.from].gpos;
let d2 = from * to;
if d2 < d2_min {
d2_min = d2;
}
if d2 > d2_max {
d2_max = d2;
}
d2_sum += d2;
}
(d2_min.sqrt(), d2_mean.sqrt(), d2_max.sqrt())
}
//linear mapping
pub fn adjust_link_quality(&mut self, min: f32, max: f32) {
for link in &mut self.links {
let from = self.nodes[link.from as usize].pos;
let to = self.nodes[link.to as usize].pos;
let distance = Self::pos_distance(&from, &to);
if distance <= min {
link.quality = u16::MIN;
} else if distance >= max {
link.quality = u16::MAX;
} else {
link.quality = (u16::MAX as f32 * (distance - min) / (max - min)) as u16;
}
}
}
*/
pub fn has_link(&self, from: ID, to: ID) -> bool {
if let Some(_) = self.link_idx(from, to) {
true
} else {
false
}
}
fn has_any_link(&self, from: ID, to: ID) -> bool {
self.has_link(from, to) || self.has_link(to, from)
}
/*
* Calcualte connections between neighbors of a given node
* divided by maximim possible connections between those neihgbors.
* Method by Watts and Strogatz.
*/
pub fn get_local_clustering_coefficient(&self, id: ID) -> f32 {
//TODO: also count the connections from neighbors to node?
let ns = self.get_neighbors(id);
if ns.len() <= 1 {
0.0
} else {
// count number of connections between neighbors
let mut k = 0;
for a in ns {
for b in ns {
if a.to != b.to {
k += self.has_link(a.to, b.to) as u32;
}
}
}
(k as f32) / (ns.len() * (ns.len() - 1)) as f32
}
}
fn del_link(&mut self, a: ID, b: ID) {
self.del_links(&vec![a, b]);
}
pub fn del_links(&mut self, links: &Vec<ID>) {
if (links.len() % 2) != 0 {
panic!("del_links: Uneven elements for link list");
}
fn any(link: &Link, a: ID, b: ID) -> bool {
(link.from == a && link.to == b) || (link.from == b || link.to == a)
}
self.links.retain(|link| {
for s in links.chunks(2) {
if any(&link, s[0], s[1]) {
return false;
}
}
true
});
}
pub fn is_bidirectional(&self) -> bool {
for link in &self.links {
if !self.has_link(link.to, link.from) {
return false;
}
}
true
}
pub fn is_valid(&self) -> bool {
let len = self.node_count as ID;
for (i, link) in self.links.iter().enumerate() {
if link.to >= len || link.from >= len {
return false;
}
if i > 0 {
let prev = &self.links[i-1];
// check for order and duplicate links
if !(link.from > prev.from || (link.from == prev.from && link.to > prev.to)) {
return false;
}
}
}
true
}
pub fn remove_node(&mut self, id: ID) {
if self.node_count == 0 {
return;
}
self.node_count -= 1;
// adjust index
for link in &mut self.links {
if link.to > id {
link.to -= 1;
}
if link.from > id {
link.from -= 1;
}
}
// remove links
vec_filter(&mut self.links, |ref link| link.from != id && link.to != id);
// sort
self.links.sort_unstable_by(|a, b| a.cmp(b.from, b.to));
}
pub fn remove_nodes(&mut self, nodes: &Vec<ID>) {
for id in nodes {
self.remove_node(*id);
}
}
fn link_idx(&self, from: ID, to: ID) -> Option<usize> {
match self.links.binary_search_by(|link| link.cmp(from, to)) {
Ok(idx) => {
Some(idx)
},
Err(_) => {
None
}
}
}
pub fn get_link(&self, from: ID, to: ID) -> Option<Link> {
if let Some(idx) = self.link_idx(from, to) {
Some(self.links[idx].clone())
} else {
None
}
}
pub fn add_link(&mut self, from: ID, to: ID, tq: u16) {
if from != to {
match self.links.binary_search_by(|link| link.cmp(from, to)) {
Ok(idx) => {
self.links[idx].quality = tq;
},
Err(idx) => {
self.links.insert(idx, Link::new(from, to, tq));
}
}
}
}
pub fn get_neighbors(&self, id: ID) -> &[Link] {
match self.links.binary_search_by(|link| link.from.cmp(&id)) {
Ok(idx) => {
let mut start = idx;
let mut end = idx;
for i in (0..idx).rev() {
if self.links[i].from == id {
start = i;
}
}
for i in idx..self.links.len() {
if self.links[i].from == id {
end = i;
}
}
&self.links[start..end+1]
},
Err(idx) => {
&self.links[0..0]
}
}
}
pub fn clear_links(&mut self) {
self.links.clear();
}
pub fn is_directed(&self) -> bool {
for link in &self.links {
if self.link_idx(link.to, link.from).is_none() {
return false;
}
}
true
}
pub fn remove_unconnected_nodes(&mut self) {
let mut remove = Vec::new();
for id in 0..self.node_count as ID {
if self.get_node_degree(id) == 0 {
remove.push(id);
}
}
self.remove_nodes(&remove);
}
pub fn node_count(&self) -> usize {
self.node_count
}
pub fn link_count(&self) -> usize {
self.links.len()
}
pub fn link_cost_sum(&self) -> f32 {
self.links.iter().fold(0.0, |acc, link| acc + link.cost() as f32)
}
pub fn spanning_tree(&self) -> Graph {
Self::minimum_spanning_tree_impl(&self.links, self.node_count)
}
pub fn minimum_spanning_tree(&self) -> Graph {
// sort links by cost
let links = {
let mut links = self.links.clone();
links.sort_unstable_by(|a, b| a.cost().cmp(&b.cost()));
links
};
Self::minimum_spanning_tree_impl(&links, self.node_count)
}
// Implementation of the Kruskal minimum spanning tree algorithm
fn minimum_spanning_tree_impl(links: &[Link], node_count: usize) -> Graph {
let mut roots = Vec::with_capacity(node_count);
let mut mst = Vec::new();
// initial root of every node is itself
for i in 0..node_count {
roots.push(i as ID);
} |
// find root of node
fn root(roots: &mut [ID], i: ID) -> usize {
let mut i = i as usize; | random_line_split |
|
graph.rs | ;
for link in &self.links {
let p1 = self.nodes[link.from as usize].pos;
let p2 = self.nodes[link.to as usize].pos;
d += Self::pos_distance(&p1, &p2);
}
let mean = d / self.links.len() as f32;
let mut v = 0.0;
for link in &self.links {
let p1 = self.nodes[link.from as usize].pos;
let p2 = self.nodes[link.to as usize].pos;
v += (Self::pos_distance(&p1, &p2) - mean).powi(2);
}
let variance = ((v as f32) / (self.links.len() as f32)).sqrt();
(mean, variance)
}
*/
pub fn get_node_degree(&self, id: ID) -> u32 {
self.get_neighbors(id).len() as u32
}
pub fn get_avg_node_degree(&self) -> f32 {
let mut n = 0;
for id in 0..self.node_count {
n += self.get_node_degree(id as u32);
}
(n as f32) / (self.node_count as f32)
}
pub fn get_mean_clustering_coefficient(&self) -> f32 {
let mut cc = 0.0f32;
for id in 0..self.node_count {
cc += self.get_local_clustering_coefficient(id as u32);
}
cc / (self.node_count as f32)
}
// Get neighbor count mean and variance
pub fn get_mean_link_count(&self) -> (f32, f32) {
let mut degrees = Vec::new();
let mut v = 0.0;
let mut c = 0;
let len = self.node_count as u32;
// calculate mean
for id in 0..len {
let degree = self.get_node_degree(id);
c += degree;
degrees.push(degree);
}
// calculate variance
let mean = c as f32 / len as f32;
for degree in degrees {
v += (degree as f32 - mean).powi(2);
}
let variance = ((v as f32) / (len as f32)).sqrt();
(mean, variance)
}
/*
pub fn link_distances(&self) -> (f32, f32, f32) {
let mut d2_min = infinity;
let mut d2_max = -infinity;
let mut d2_sum = 0.0;
for link in &self.links {
let to = self.nodes[link.to].gpos;
let from = self.nodes[link.from].gpos;
let d2 = from * to;
if d2 < d2_min {
d2_min = d2;
}
if d2 > d2_max {
d2_max = d2;
}
d2_sum += d2;
}
(d2_min.sqrt(), d2_mean.sqrt(), d2_max.sqrt())
}
//linear mapping
pub fn adjust_link_quality(&mut self, min: f32, max: f32) {
for link in &mut self.links {
let from = self.nodes[link.from as usize].pos;
let to = self.nodes[link.to as usize].pos;
let distance = Self::pos_distance(&from, &to);
if distance <= min {
link.quality = u16::MIN;
} else if distance >= max {
link.quality = u16::MAX;
} else {
link.quality = (u16::MAX as f32 * (distance - min) / (max - min)) as u16;
}
}
}
*/
pub fn has_link(&self, from: ID, to: ID) -> bool {
if let Some(_) = self.link_idx(from, to) {
true
} else {
false
}
}
fn has_any_link(&self, from: ID, to: ID) -> bool {
self.has_link(from, to) || self.has_link(to, from)
}
/*
* Calcualte connections between neighbors of a given node
* divided by maximim possible connections between those neihgbors.
* Method by Watts and Strogatz.
*/
pub fn get_local_clustering_coefficient(&self, id: ID) -> f32 {
//TODO: also count the connections from neighbors to node?
let ns = self.get_neighbors(id);
if ns.len() <= 1 {
0.0
} else {
// count number of connections between neighbors
let mut k = 0;
for a in ns {
for b in ns {
if a.to != b.to {
k += self.has_link(a.to, b.to) as u32;
}
}
}
(k as f32) / (ns.len() * (ns.len() - 1)) as f32
}
}
fn del_link(&mut self, a: ID, b: ID) {
self.del_links(&vec![a, b]);
}
pub fn del_links(&mut self, links: &Vec<ID>) {
if (links.len() % 2) != 0 {
panic!("del_links: Uneven elements for link list");
}
fn any(link: &Link, a: ID, b: ID) -> bool {
(link.from == a && link.to == b) || (link.from == b || link.to == a)
}
self.links.retain(|link| {
for s in links.chunks(2) {
if any(&link, s[0], s[1]) {
return false;
}
}
true
});
}
pub fn is_bidirectional(&self) -> bool {
for link in &self.links {
if !self.has_link(link.to, link.from) {
return false;
}
}
true
}
pub fn is_valid(&self) -> bool {
let len = self.node_count as ID;
for (i, link) in self.links.iter().enumerate() {
if link.to >= len || link.from >= len {
return false;
}
if i > 0 {
let prev = &self.links[i-1];
// check for order and duplicate links
if !(link.from > prev.from || (link.from == prev.from && link.to > prev.to)) {
return false;
}
}
}
true
}
pub fn remove_node(&mut self, id: ID) {
if self.node_count == 0 {
return;
}
self.node_count -= 1;
// adjust index
for link in &mut self.links {
if link.to > id {
link.to -= 1;
}
if link.from > id {
link.from -= 1;
}
}
// remove links
vec_filter(&mut self.links, |ref link| link.from != id && link.to != id);
// sort
self.links.sort_unstable_by(|a, b| a.cmp(b.from, b.to));
}
pub fn remove_nodes(&mut self, nodes: &Vec<ID>) {
for id in nodes {
self.remove_node(*id);
}
}
fn link_idx(&self, from: ID, to: ID) -> Option<usize> {
match self.links.binary_search_by(|link| link.cmp(from, to)) {
Ok(idx) => {
Some(idx)
},
Err(_) => {
None
}
}
}
pub fn get_link(&self, from: ID, to: ID) -> Option<Link> {
if let Some(idx) = self.link_idx(from, to) {
Some(self.links[idx].clone())
} else {
None
}
}
pub fn add_link(&mut self, from: ID, to: ID, tq: u16) {
if from != to {
match self.links.binary_search_by(|link| link.cmp(from, to)) {
Ok(idx) => {
self.links[idx].quality = tq;
},
Err(idx) => {
self.links.insert(idx, Link::new(from, to, tq));
}
}
}
}
pub fn get_neighbors(&self, id: ID) -> &[Link] | {
match self.links.binary_search_by(|link| link.from.cmp(&id)) {
Ok(idx) => {
let mut start = idx;
let mut end = idx;
for i in (0..idx).rev() {
if self.links[i].from == id {
start = i;
}
}
for i in idx..self.links.len() {
if self.links[i].from == id {
end = i;
}
}
&self.links[start..end+1]
},
Err(idx) => {
&self.links[0..0]
} | identifier_body |
|
classes_70.js | var searchData=
[
['paperfigures',['PaperFigures',['../class_paper_figures.html',1,'']]],
['parameter',['Parameter',['../class_parameter.html',1,'']]],
['pathway',['Pathway',['../class_pathway.html',1,'']]],
['pdfutil',['PdfUtil',['../class_pdf_util.html',1,'']]], | ['population',['Population',['../class_population.html',1,'']]],
['posterfigures',['PosterFigures',['../class_poster_figures.html',1,'']]],
['process',['Process',['../class_process.html',1,'']]],
['processmetaboliteusage',['ProcessMetaboliteUsage',['../class_process_metabolite_usage.html',1,'']]],
['protein',['Protein',['../class_protein.html',1,'']]],
['proteincomplex',['ProteinComplex',['../class_protein_complex.html',1,'']]],
['proteinmonomer',['ProteinMonomer',['../class_protein_monomer.html',1,'']]]
]; | ['physicalobject',['PhysicalObject',['../class_physical_object.html',1,'']]],
['polymer',['Polymer',['../class_polymer.html',1,'']]], | random_line_split |
_deprecated_schema_parser.py | """Parse the biothings schema"""
from .config import BIOTHINGS_SCHEMA_URL, PREFIX_TO_REMOVE
from .utils.dataload import load_json_or_yaml
from .utils.common import remove_prefix
class SchemaParser():
def __init__(self):
self.schema_json = remove_prefix(load_json_or_yaml(BIOTHINGS_SCHEMA_URL),
PREFIX_TO_REMOVE)
self.properties = {}
self.ids = []
self.clses = []
self.process_schema()
def | (self):
for rec in self.schema_json['@graph']:
if "rdfs:subPropertyOf" in rec and rec["rdfs:subPropertyOf"]["@id"] == "http://schema.org/identifier":
self.ids.append(rec["@id"])
elif rec["@type"] == "rdf:Property":
self.properties[rec["@id"]] = {"inverse_property": None}
if "schema:inverseOf" in rec:
self.properties[rec["@id"]]["inverse_property"] = rec["schema:inverseOf"]["@id"]
elif rec["@type"] == "rdfs:Class":
self.clses.append(rec["@id"])
| process_schema | identifier_name |
_deprecated_schema_parser.py | """Parse the biothings schema"""
from .config import BIOTHINGS_SCHEMA_URL, PREFIX_TO_REMOVE
from .utils.dataload import load_json_or_yaml
from .utils.common import remove_prefix
class SchemaParser():
def __init__(self):
self.schema_json = remove_prefix(load_json_or_yaml(BIOTHINGS_SCHEMA_URL),
PREFIX_TO_REMOVE)
self.properties = {}
self.ids = []
self.clses = []
self.process_schema()
def process_schema(self):
for rec in self.schema_json['@graph']:
if "rdfs:subPropertyOf" in rec and rec["rdfs:subPropertyOf"]["@id"] == "http://schema.org/identifier":
self.ids.append(rec["@id"])
elif rec["@type"] == "rdf:Property":
self.properties[rec["@id"]] = {"inverse_property": None}
if "schema:inverseOf" in rec: | self.properties[rec["@id"]]["inverse_property"] = rec["schema:inverseOf"]["@id"]
elif rec["@type"] == "rdfs:Class":
self.clses.append(rec["@id"]) | random_line_split |
|
_deprecated_schema_parser.py | """Parse the biothings schema"""
from .config import BIOTHINGS_SCHEMA_URL, PREFIX_TO_REMOVE
from .utils.dataload import load_json_or_yaml
from .utils.common import remove_prefix
class SchemaParser():
def __init__(self):
self.schema_json = remove_prefix(load_json_or_yaml(BIOTHINGS_SCHEMA_URL),
PREFIX_TO_REMOVE)
self.properties = {}
self.ids = []
self.clses = []
self.process_schema()
def process_schema(self):
| for rec in self.schema_json['@graph']:
if "rdfs:subPropertyOf" in rec and rec["rdfs:subPropertyOf"]["@id"] == "http://schema.org/identifier":
self.ids.append(rec["@id"])
elif rec["@type"] == "rdf:Property":
self.properties[rec["@id"]] = {"inverse_property": None}
if "schema:inverseOf" in rec:
self.properties[rec["@id"]]["inverse_property"] = rec["schema:inverseOf"]["@id"]
elif rec["@type"] == "rdfs:Class":
self.clses.append(rec["@id"]) | identifier_body |
|
_deprecated_schema_parser.py | """Parse the biothings schema"""
from .config import BIOTHINGS_SCHEMA_URL, PREFIX_TO_REMOVE
from .utils.dataload import load_json_or_yaml
from .utils.common import remove_prefix
class SchemaParser():
def __init__(self):
self.schema_json = remove_prefix(load_json_or_yaml(BIOTHINGS_SCHEMA_URL),
PREFIX_TO_REMOVE)
self.properties = {}
self.ids = []
self.clses = []
self.process_schema()
def process_schema(self):
for rec in self.schema_json['@graph']:
if "rdfs:subPropertyOf" in rec and rec["rdfs:subPropertyOf"]["@id"] == "http://schema.org/identifier":
|
elif rec["@type"] == "rdf:Property":
self.properties[rec["@id"]] = {"inverse_property": None}
if "schema:inverseOf" in rec:
self.properties[rec["@id"]]["inverse_property"] = rec["schema:inverseOf"]["@id"]
elif rec["@type"] == "rdfs:Class":
self.clses.append(rec["@id"])
| self.ids.append(rec["@id"]) | conditional_block |
transliterate.ts | import {
IntervalArray,
OptionReplaceArray,
OptionReplaceCombined,
OptionReplaceObject,
OptionsTransliterate,
} from '../types';
import { charmap, Charmap } from '../../data/charmap';
import {
deepClone,
escapeRegExp,
findStrOccurrences,
inRange,
hasChinese,
regexpReplaceCustom,
hasPunctuationOrSpace,
} from './utils';
export const defaultOptions: OptionsTransliterate = {
ignore: [],
replace: [],
replaceAfter: [],
trim: false,
unknown: '',
fixChineseSpacing: true,
};
export class Transliterate {
get options(): OptionsTransliterate {
return deepClone({ ...defaultOptions, ...this.confOptions });
}
constructor(
protected confOptions: OptionsTransliterate = deepClone(defaultOptions),
protected map: Charmap = charmap,
) {}
/**
* Set default config
* @param options
*/
public config(
options?: OptionsTransliterate,
reset = false,
): OptionsTransliterate {
if (reset) {
this.confOptions = {};
}
if (options && typeof options === 'object') {
this.confOptions = deepClone(options);
}
return this.confOptions;
}
/**
* Replace the source string using the code map
* @param str
* @param ignoreRanges
* @param unknown
*/
public codeMapReplace(
str: string,
ignoreRanges: IntervalArray = [],
opt: OptionsTransliterate,
): string {
let index = 0;
let result = '';
const strContainsChinese = opt.fixChineseSpacing && hasChinese(str);
let lastCharHasChinese = false;
for (let i = 0; i < str.length; i++) {
// Get current character, taking surrogates in consideration
const char =
/[\uD800-\uDBFF]/.test(str[i]) && /[\uDC00-\uDFFF]/.test(str[i + 1])
? str[i] + str[i + 1]
: str[i];
let s: string;
let ignoreFixingChinese = false;
switch (true) {
// current character is in ignored list
case inRange(index, ignoreRanges):
// could be UTF-32 with high and low surrogates
case char.length === 2 && inRange(index + 1, ignoreRanges):
s = char;
// if it's the first character of an ignored string, then leave ignoreFixingChinese to true
if (
!ignoreRanges.find(
(range) => range[1] >= index && range[0] === index,
)
) {
ignoreFixingChinese = true;
}
break;
default:
s = this.map[char] || opt.unknown || '';
}
// fix Chinese spacing issue
if (strContainsChinese) {
if (
lastCharHasChinese &&
!ignoreFixingChinese &&
!hasPunctuationOrSpace(s)
) |
lastCharHasChinese = !!s && hasChinese(char);
}
result += s;
index += char.length;
// If it's UTF-32 then skip next character
i += char.length - 1;
}
return result;
}
/**
* Convert the object version of the 'replace' option into tuple array one
* @param option replace option to be either an object or tuple array
* @return return the paired array version of replace option
*/
public formatReplaceOption(
option: OptionReplaceCombined,
): OptionReplaceArray {
if (option instanceof Array) {
// return a new copy of the array
return deepClone(option);
}
// convert object option to array one
const replaceArr: OptionReplaceArray = [];
for (const key in option as OptionReplaceObject) {
/* istanbul ignore else */
if (Object.prototype.hasOwnProperty.call(option, key)) {
replaceArr.push([key, option[key]]);
}
}
return replaceArr;
}
/**
* Search and replace a list of strings(regexps) and return the result string
* @param source Source string
* @param searches Search-replace string(regexp) pairs
*/
public replaceString(
source: string,
searches: OptionReplaceArray,
ignore: string[] = [],
): string {
const clonedSearches = deepClone(searches);
let result = source;
for (let i = 0; i < clonedSearches.length; i++) {
const item = clonedSearches[i];
switch (true) {
case item[0] instanceof RegExp:
item[0] = RegExp(
item[0].source,
`${item[0].flags.replace('g', '')}g`,
);
break;
case typeof item[0] === 'string' && item[0].length > 0:
item[0] = RegExp(escapeRegExp(item[0]), 'g');
break;
default:
item[0] = /[^\s\S]/; // Prevent ReDos attack
}
result = regexpReplaceCustom(result, item[0], item[1], ignore);
}
return result;
}
/**
* Set charmap data
* @param {Charmap} [data]
* @param {boolean} [reset=false]
* @memberof Transliterate
*/
public setData(data?: Charmap, reset = false): Charmap {
if (reset) {
this.map = deepClone(charmap);
}
if (data && typeof data === 'object' && Object.keys(data).length) {
this.map = deepClone(this.map);
for (const from in data) {
/* istanbul ignore else */
if (
Object.prototype.hasOwnProperty.call(data, from) &&
from.length < 3 &&
from <= '\udbff\udfff'
) {
this.map[from] = data[from];
}
}
}
return this.map;
}
/**
* Main transliterate function
* @param source The string which is being transliterated
* @param options Options object
*/
public transliterate(source: string, options?: OptionsTransliterate): string {
options = typeof options === 'object' ? options : {};
const opt: OptionsTransliterate = deepClone({
...this.options,
...options,
});
// force convert to string
let str = typeof source === 'string' ? source : String(source);
const replaceOption: OptionReplaceArray = this.formatReplaceOption(
opt.replace as OptionReplaceCombined,
);
if (replaceOption.length) {
str = this.replaceString(str, replaceOption, opt.ignore);
}
// ignore
const ignoreRanges: IntervalArray =
opt.ignore && opt.ignore.length > 0
? findStrOccurrences(str, opt.ignore)
: [];
str = this.codeMapReplace(str, ignoreRanges, opt);
// trim spaces at the beginning and ending of the string
if (opt.trim) {
str = str.trim();
}
const replaceAfterOption: OptionReplaceArray = this.formatReplaceOption(
opt.replaceAfter as OptionReplaceCombined,
);
if (replaceAfterOption.length) {
str = this.replaceString(str, replaceAfterOption);
}
return str;
}
}
| {
s = ' ' + s;
} | conditional_block |
transliterate.ts | import {
IntervalArray,
OptionReplaceArray,
OptionReplaceCombined,
OptionReplaceObject,
OptionsTransliterate,
} from '../types';
import { charmap, Charmap } from '../../data/charmap';
import {
deepClone,
escapeRegExp,
findStrOccurrences,
inRange,
hasChinese,
regexpReplaceCustom,
hasPunctuationOrSpace,
} from './utils';
export const defaultOptions: OptionsTransliterate = {
ignore: [],
replace: [],
replaceAfter: [],
trim: false,
unknown: '',
fixChineseSpacing: true,
};
export class Transliterate {
get options(): OptionsTransliterate {
return deepClone({ ...defaultOptions, ...this.confOptions });
}
constructor(
protected confOptions: OptionsTransliterate = deepClone(defaultOptions),
protected map: Charmap = charmap,
) {}
/**
* Set default config
* @param options
*/
public config(
options?: OptionsTransliterate,
reset = false,
): OptionsTransliterate |
/**
* Replace the source string using the code map
* @param str
* @param ignoreRanges
* @param unknown
*/
public codeMapReplace(
str: string,
ignoreRanges: IntervalArray = [],
opt: OptionsTransliterate,
): string {
let index = 0;
let result = '';
const strContainsChinese = opt.fixChineseSpacing && hasChinese(str);
let lastCharHasChinese = false;
for (let i = 0; i < str.length; i++) {
// Get current character, taking surrogates in consideration
const char =
/[\uD800-\uDBFF]/.test(str[i]) && /[\uDC00-\uDFFF]/.test(str[i + 1])
? str[i] + str[i + 1]
: str[i];
let s: string;
let ignoreFixingChinese = false;
switch (true) {
// current character is in ignored list
case inRange(index, ignoreRanges):
// could be UTF-32 with high and low surrogates
case char.length === 2 && inRange(index + 1, ignoreRanges):
s = char;
// if it's the first character of an ignored string, then leave ignoreFixingChinese to true
if (
!ignoreRanges.find(
(range) => range[1] >= index && range[0] === index,
)
) {
ignoreFixingChinese = true;
}
break;
default:
s = this.map[char] || opt.unknown || '';
}
// fix Chinese spacing issue
if (strContainsChinese) {
if (
lastCharHasChinese &&
!ignoreFixingChinese &&
!hasPunctuationOrSpace(s)
) {
s = ' ' + s;
}
lastCharHasChinese = !!s && hasChinese(char);
}
result += s;
index += char.length;
// If it's UTF-32 then skip next character
i += char.length - 1;
}
return result;
}
/**
* Convert the object version of the 'replace' option into tuple array one
* @param option replace option to be either an object or tuple array
* @return return the paired array version of replace option
*/
public formatReplaceOption(
option: OptionReplaceCombined,
): OptionReplaceArray {
if (option instanceof Array) {
// return a new copy of the array
return deepClone(option);
}
// convert object option to array one
const replaceArr: OptionReplaceArray = [];
for (const key in option as OptionReplaceObject) {
/* istanbul ignore else */
if (Object.prototype.hasOwnProperty.call(option, key)) {
replaceArr.push([key, option[key]]);
}
}
return replaceArr;
}
/**
* Search and replace a list of strings(regexps) and return the result string
* @param source Source string
* @param searches Search-replace string(regexp) pairs
*/
public replaceString(
source: string,
searches: OptionReplaceArray,
ignore: string[] = [],
): string {
const clonedSearches = deepClone(searches);
let result = source;
for (let i = 0; i < clonedSearches.length; i++) {
const item = clonedSearches[i];
switch (true) {
case item[0] instanceof RegExp:
item[0] = RegExp(
item[0].source,
`${item[0].flags.replace('g', '')}g`,
);
break;
case typeof item[0] === 'string' && item[0].length > 0:
item[0] = RegExp(escapeRegExp(item[0]), 'g');
break;
default:
item[0] = /[^\s\S]/; // Prevent ReDos attack
}
result = regexpReplaceCustom(result, item[0], item[1], ignore);
}
return result;
}
/**
* Set charmap data
* @param {Charmap} [data]
* @param {boolean} [reset=false]
* @memberof Transliterate
*/
public setData(data?: Charmap, reset = false): Charmap {
if (reset) {
this.map = deepClone(charmap);
}
if (data && typeof data === 'object' && Object.keys(data).length) {
this.map = deepClone(this.map);
for (const from in data) {
/* istanbul ignore else */
if (
Object.prototype.hasOwnProperty.call(data, from) &&
from.length < 3 &&
from <= '\udbff\udfff'
) {
this.map[from] = data[from];
}
}
}
return this.map;
}
/**
* Main transliterate function
* @param source The string which is being transliterated
* @param options Options object
*/
public transliterate(source: string, options?: OptionsTransliterate): string {
options = typeof options === 'object' ? options : {};
const opt: OptionsTransliterate = deepClone({
...this.options,
...options,
});
// force convert to string
let str = typeof source === 'string' ? source : String(source);
const replaceOption: OptionReplaceArray = this.formatReplaceOption(
opt.replace as OptionReplaceCombined,
);
if (replaceOption.length) {
str = this.replaceString(str, replaceOption, opt.ignore);
}
// ignore
const ignoreRanges: IntervalArray =
opt.ignore && opt.ignore.length > 0
? findStrOccurrences(str, opt.ignore)
: [];
str = this.codeMapReplace(str, ignoreRanges, opt);
// trim spaces at the beginning and ending of the string
if (opt.trim) {
str = str.trim();
}
const replaceAfterOption: OptionReplaceArray = this.formatReplaceOption(
opt.replaceAfter as OptionReplaceCombined,
);
if (replaceAfterOption.length) {
str = this.replaceString(str, replaceAfterOption);
}
return str;
}
}
| {
if (reset) {
this.confOptions = {};
}
if (options && typeof options === 'object') {
this.confOptions = deepClone(options);
}
return this.confOptions;
} | identifier_body |
transliterate.ts | import {
IntervalArray,
OptionReplaceArray,
OptionReplaceCombined,
OptionReplaceObject,
OptionsTransliterate,
} from '../types';
import { charmap, Charmap } from '../../data/charmap';
import {
deepClone,
escapeRegExp,
findStrOccurrences,
inRange,
hasChinese,
regexpReplaceCustom,
hasPunctuationOrSpace,
} from './utils';
export const defaultOptions: OptionsTransliterate = {
ignore: [],
replace: [],
replaceAfter: [],
trim: false,
unknown: '',
fixChineseSpacing: true,
};
export class Transliterate {
get options(): OptionsTransliterate {
return deepClone({ ...defaultOptions, ...this.confOptions });
}
constructor(
protected confOptions: OptionsTransliterate = deepClone(defaultOptions),
protected map: Charmap = charmap,
) {}
/**
* Set default config
* @param options
*/
public config(
options?: OptionsTransliterate,
reset = false,
): OptionsTransliterate {
if (reset) {
this.confOptions = {};
}
if (options && typeof options === 'object') {
this.confOptions = deepClone(options);
}
return this.confOptions;
}
/**
* Replace the source string using the code map
* @param str
* @param ignoreRanges
* @param unknown
*/
public codeMapReplace(
str: string,
ignoreRanges: IntervalArray = [],
opt: OptionsTransliterate,
): string {
let index = 0;
let result = '';
const strContainsChinese = opt.fixChineseSpacing && hasChinese(str);
let lastCharHasChinese = false;
for (let i = 0; i < str.length; i++) {
// Get current character, taking surrogates in consideration
const char =
/[\uD800-\uDBFF]/.test(str[i]) && /[\uDC00-\uDFFF]/.test(str[i + 1])
? str[i] + str[i + 1]
: str[i];
let s: string;
let ignoreFixingChinese = false;
switch (true) {
// current character is in ignored list
case inRange(index, ignoreRanges):
// could be UTF-32 with high and low surrogates
case char.length === 2 && inRange(index + 1, ignoreRanges):
s = char;
// if it's the first character of an ignored string, then leave ignoreFixingChinese to true
if (
!ignoreRanges.find(
(range) => range[1] >= index && range[0] === index,
)
) {
ignoreFixingChinese = true;
}
break;
default:
s = this.map[char] || opt.unknown || '';
}
// fix Chinese spacing issue
if (strContainsChinese) {
if (
lastCharHasChinese &&
!ignoreFixingChinese &&
!hasPunctuationOrSpace(s)
) {
s = ' ' + s;
}
lastCharHasChinese = !!s && hasChinese(char);
}
result += s;
index += char.length;
// If it's UTF-32 then skip next character
i += char.length - 1;
}
return result;
}
/**
* Convert the object version of the 'replace' option into tuple array one
* @param option replace option to be either an object or tuple array
* @return return the paired array version of replace option
*/
public formatReplaceOption(
option: OptionReplaceCombined,
): OptionReplaceArray {
if (option instanceof Array) {
// return a new copy of the array
return deepClone(option);
}
// convert object option to array one
const replaceArr: OptionReplaceArray = [];
for (const key in option as OptionReplaceObject) {
/* istanbul ignore else */
if (Object.prototype.hasOwnProperty.call(option, key)) {
replaceArr.push([key, option[key]]);
}
}
return replaceArr; | /**
* Search and replace a list of strings(regexps) and return the result string
* @param source Source string
* @param searches Search-replace string(regexp) pairs
*/
public replaceString(
source: string,
searches: OptionReplaceArray,
ignore: string[] = [],
): string {
const clonedSearches = deepClone(searches);
let result = source;
for (let i = 0; i < clonedSearches.length; i++) {
const item = clonedSearches[i];
switch (true) {
case item[0] instanceof RegExp:
item[0] = RegExp(
item[0].source,
`${item[0].flags.replace('g', '')}g`,
);
break;
case typeof item[0] === 'string' && item[0].length > 0:
item[0] = RegExp(escapeRegExp(item[0]), 'g');
break;
default:
item[0] = /[^\s\S]/; // Prevent ReDos attack
}
result = regexpReplaceCustom(result, item[0], item[1], ignore);
}
return result;
}
/**
* Set charmap data
* @param {Charmap} [data]
* @param {boolean} [reset=false]
* @memberof Transliterate
*/
public setData(data?: Charmap, reset = false): Charmap {
if (reset) {
this.map = deepClone(charmap);
}
if (data && typeof data === 'object' && Object.keys(data).length) {
this.map = deepClone(this.map);
for (const from in data) {
/* istanbul ignore else */
if (
Object.prototype.hasOwnProperty.call(data, from) &&
from.length < 3 &&
from <= '\udbff\udfff'
) {
this.map[from] = data[from];
}
}
}
return this.map;
}
/**
* Main transliterate function
* @param source The string which is being transliterated
* @param options Options object
*/
public transliterate(source: string, options?: OptionsTransliterate): string {
options = typeof options === 'object' ? options : {};
const opt: OptionsTransliterate = deepClone({
...this.options,
...options,
});
// force convert to string
let str = typeof source === 'string' ? source : String(source);
const replaceOption: OptionReplaceArray = this.formatReplaceOption(
opt.replace as OptionReplaceCombined,
);
if (replaceOption.length) {
str = this.replaceString(str, replaceOption, opt.ignore);
}
// ignore
const ignoreRanges: IntervalArray =
opt.ignore && opt.ignore.length > 0
? findStrOccurrences(str, opt.ignore)
: [];
str = this.codeMapReplace(str, ignoreRanges, opt);
// trim spaces at the beginning and ending of the string
if (opt.trim) {
str = str.trim();
}
const replaceAfterOption: OptionReplaceArray = this.formatReplaceOption(
opt.replaceAfter as OptionReplaceCombined,
);
if (replaceAfterOption.length) {
str = this.replaceString(str, replaceAfterOption);
}
return str;
}
} | }
| random_line_split |
transliterate.ts | import {
IntervalArray,
OptionReplaceArray,
OptionReplaceCombined,
OptionReplaceObject,
OptionsTransliterate,
} from '../types';
import { charmap, Charmap } from '../../data/charmap';
import {
deepClone,
escapeRegExp,
findStrOccurrences,
inRange,
hasChinese,
regexpReplaceCustom,
hasPunctuationOrSpace,
} from './utils';
export const defaultOptions: OptionsTransliterate = {
ignore: [],
replace: [],
replaceAfter: [],
trim: false,
unknown: '',
fixChineseSpacing: true,
};
export class | {
get options(): OptionsTransliterate {
return deepClone({ ...defaultOptions, ...this.confOptions });
}
constructor(
protected confOptions: OptionsTransliterate = deepClone(defaultOptions),
protected map: Charmap = charmap,
) {}
/**
* Set default config
* @param options
*/
public config(
options?: OptionsTransliterate,
reset = false,
): OptionsTransliterate {
if (reset) {
this.confOptions = {};
}
if (options && typeof options === 'object') {
this.confOptions = deepClone(options);
}
return this.confOptions;
}
/**
* Replace the source string using the code map
* @param str
* @param ignoreRanges
* @param unknown
*/
public codeMapReplace(
str: string,
ignoreRanges: IntervalArray = [],
opt: OptionsTransliterate,
): string {
let index = 0;
let result = '';
const strContainsChinese = opt.fixChineseSpacing && hasChinese(str);
let lastCharHasChinese = false;
for (let i = 0; i < str.length; i++) {
// Get current character, taking surrogates in consideration
const char =
/[\uD800-\uDBFF]/.test(str[i]) && /[\uDC00-\uDFFF]/.test(str[i + 1])
? str[i] + str[i + 1]
: str[i];
let s: string;
let ignoreFixingChinese = false;
switch (true) {
// current character is in ignored list
case inRange(index, ignoreRanges):
// could be UTF-32 with high and low surrogates
case char.length === 2 && inRange(index + 1, ignoreRanges):
s = char;
// if it's the first character of an ignored string, then leave ignoreFixingChinese to true
if (
!ignoreRanges.find(
(range) => range[1] >= index && range[0] === index,
)
) {
ignoreFixingChinese = true;
}
break;
default:
s = this.map[char] || opt.unknown || '';
}
// fix Chinese spacing issue
if (strContainsChinese) {
if (
lastCharHasChinese &&
!ignoreFixingChinese &&
!hasPunctuationOrSpace(s)
) {
s = ' ' + s;
}
lastCharHasChinese = !!s && hasChinese(char);
}
result += s;
index += char.length;
// If it's UTF-32 then skip next character
i += char.length - 1;
}
return result;
}
/**
* Convert the object version of the 'replace' option into tuple array one
* @param option replace option to be either an object or tuple array
* @return return the paired array version of replace option
*/
public formatReplaceOption(
option: OptionReplaceCombined,
): OptionReplaceArray {
if (option instanceof Array) {
// return a new copy of the array
return deepClone(option);
}
// convert object option to array one
const replaceArr: OptionReplaceArray = [];
for (const key in option as OptionReplaceObject) {
/* istanbul ignore else */
if (Object.prototype.hasOwnProperty.call(option, key)) {
replaceArr.push([key, option[key]]);
}
}
return replaceArr;
}
/**
* Search and replace a list of strings(regexps) and return the result string
* @param source Source string
* @param searches Search-replace string(regexp) pairs
*/
public replaceString(
source: string,
searches: OptionReplaceArray,
ignore: string[] = [],
): string {
const clonedSearches = deepClone(searches);
let result = source;
for (let i = 0; i < clonedSearches.length; i++) {
const item = clonedSearches[i];
switch (true) {
case item[0] instanceof RegExp:
item[0] = RegExp(
item[0].source,
`${item[0].flags.replace('g', '')}g`,
);
break;
case typeof item[0] === 'string' && item[0].length > 0:
item[0] = RegExp(escapeRegExp(item[0]), 'g');
break;
default:
item[0] = /[^\s\S]/; // Prevent ReDos attack
}
result = regexpReplaceCustom(result, item[0], item[1], ignore);
}
return result;
}
/**
* Set charmap data
* @param {Charmap} [data]
* @param {boolean} [reset=false]
* @memberof Transliterate
*/
public setData(data?: Charmap, reset = false): Charmap {
if (reset) {
this.map = deepClone(charmap);
}
if (data && typeof data === 'object' && Object.keys(data).length) {
this.map = deepClone(this.map);
for (const from in data) {
/* istanbul ignore else */
if (
Object.prototype.hasOwnProperty.call(data, from) &&
from.length < 3 &&
from <= '\udbff\udfff'
) {
this.map[from] = data[from];
}
}
}
return this.map;
}
/**
* Main transliterate function
* @param source The string which is being transliterated
* @param options Options object
*/
public transliterate(source: string, options?: OptionsTransliterate): string {
options = typeof options === 'object' ? options : {};
const opt: OptionsTransliterate = deepClone({
...this.options,
...options,
});
// force convert to string
let str = typeof source === 'string' ? source : String(source);
const replaceOption: OptionReplaceArray = this.formatReplaceOption(
opt.replace as OptionReplaceCombined,
);
if (replaceOption.length) {
str = this.replaceString(str, replaceOption, opt.ignore);
}
// ignore
const ignoreRanges: IntervalArray =
opt.ignore && opt.ignore.length > 0
? findStrOccurrences(str, opt.ignore)
: [];
str = this.codeMapReplace(str, ignoreRanges, opt);
// trim spaces at the beginning and ending of the string
if (opt.trim) {
str = str.trim();
}
const replaceAfterOption: OptionReplaceArray = this.formatReplaceOption(
opt.replaceAfter as OptionReplaceCombined,
);
if (replaceAfterOption.length) {
str = this.replaceString(str, replaceAfterOption);
}
return str;
}
}
| Transliterate | identifier_name |
TestHelper.js | 'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.getCellMetadata = getCellMetadata;
var _initCellMetadata = require('./initCellMetadata');
var _initCellMetadata2 = _interopRequireDefault(_initCellMetadata);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
// Default cell sizes and offsets for use in below tests
function | () {
var cellSizes = [10, // 0: 0..0 (min)
20, // 1: 0..10
15, // 2: 0..30
10, // 3: 5..45
15, // 4: 20..55
30, // 5: 50..70
20, // 6: 70..100
10, // 7: 80..110
30 // 8: 110..110 (max)
];
return (0, _initCellMetadata2.default)({
cellCount: cellSizes.length,
size: function size(_ref) {
var index = _ref.index;
return cellSizes[index];
}
});
} | getCellMetadata | identifier_name |
TestHelper.js | 'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.getCellMetadata = getCellMetadata;
var _initCellMetadata = require('./initCellMetadata');
var _initCellMetadata2 = _interopRequireDefault(_initCellMetadata);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
// Default cell sizes and offsets for use in below tests
function getCellMetadata() {
var cellSizes = [10, // 0: 0..0 (min)
20, // 1: 0..10 | 20, // 6: 70..100
10, // 7: 80..110
30 // 8: 110..110 (max)
];
return (0, _initCellMetadata2.default)({
cellCount: cellSizes.length,
size: function size(_ref) {
var index = _ref.index;
return cellSizes[index];
}
});
} | 15, // 2: 0..30
10, // 3: 5..45
15, // 4: 20..55
30, // 5: 50..70 | random_line_split |
TestHelper.js | 'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.getCellMetadata = getCellMetadata;
var _initCellMetadata = require('./initCellMetadata');
var _initCellMetadata2 = _interopRequireDefault(_initCellMetadata);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
// Default cell sizes and offsets for use in below tests
function getCellMetadata() | {
var cellSizes = [10, // 0: 0..0 (min)
20, // 1: 0..10
15, // 2: 0..30
10, // 3: 5..45
15, // 4: 20..55
30, // 5: 50..70
20, // 6: 70..100
10, // 7: 80..110
30 // 8: 110..110 (max)
];
return (0, _initCellMetadata2.default)({
cellCount: cellSizes.length,
size: function size(_ref) {
var index = _ref.index;
return cellSizes[index];
}
});
} | identifier_body |
|
test.py | from supervisorerrormiddleware import SupervisorErrorMiddleware
import os
import sys
import paste.fixture
class DummyOutput:
def __init__(self):
self._buffer = []
def write(self, data):
self._buffer.append(data)
def flush(self):
self._buffer = []
def bad_app(environ, start_response):
if environ['PATH_INFO'] != '/good':
raise Exception("Bad Kitty")
else:
start_response("200 OK", [('Content-type', 'text/html')])
return ["Good Kitty"]
def test_without_supervisor():
old_stdout = sys.stdout
try:
sys.stdout = DummyOutput()
app = bad_app
app = SupervisorErrorMiddleware(app)
app = paste.fixture.TestApp(app)
failed = False
try: | app.get("/")
except:
failed = True
assert failed
output = "".join(sys.stdout._buffer)
sys.stdout.flush()
assert not "Bad Kitty" in output
assert not "GET" in output
response = app.get("/good")
output = "".join(sys.stdout._buffer)
sys.stdout.flush()
response.mustcontain("Good Kitty")
assert not "Bad Kitty" in output
assert not "GET" in output
finally:
sys.stdout = old_stdout
def test_with_supervisor():
#Why is there output when stdout is redirected? Because
#paste.fixture.TestApp gets around the redirection.
old_stdout = sys.stdout
try:
os.environ['SUPERVISOR_ENABLED'] = "1" #fake supervisor
sys.stdout = DummyOutput()
app = bad_app
app = SupervisorErrorMiddleware(app)
app = paste.fixture.TestApp(app)
failed = False
try:
app.get("/")
except:
failed = True
assert failed
output = "".join(sys.stdout._buffer)
sys.stdout.flush()
assert "Bad Kitty" in output
assert "GET" in output
response = app.get("/good")
output = "".join(sys.stdout._buffer)
sys.stdout.flush()
response.mustcontain("Good Kitty")
assert not "Bad Kitty" in output
assert not "GET" in output
finally:
sys.stdout = old_stdout
del os.environ['SUPERVISOR_ENABLED'] | random_line_split |
|
test.py | from supervisorerrormiddleware import SupervisorErrorMiddleware
import os
import sys
import paste.fixture
class DummyOutput:
|
def bad_app(environ, start_response):
if environ['PATH_INFO'] != '/good':
raise Exception("Bad Kitty")
else:
start_response("200 OK", [('Content-type', 'text/html')])
return ["Good Kitty"]
def test_without_supervisor():
old_stdout = sys.stdout
try:
sys.stdout = DummyOutput()
app = bad_app
app = SupervisorErrorMiddleware(app)
app = paste.fixture.TestApp(app)
failed = False
try:
app.get("/")
except:
failed = True
assert failed
output = "".join(sys.stdout._buffer)
sys.stdout.flush()
assert not "Bad Kitty" in output
assert not "GET" in output
response = app.get("/good")
output = "".join(sys.stdout._buffer)
sys.stdout.flush()
response.mustcontain("Good Kitty")
assert not "Bad Kitty" in output
assert not "GET" in output
finally:
sys.stdout = old_stdout
def test_with_supervisor():
#Why is there output when stdout is redirected? Because
#paste.fixture.TestApp gets around the redirection.
old_stdout = sys.stdout
try:
os.environ['SUPERVISOR_ENABLED'] = "1" #fake supervisor
sys.stdout = DummyOutput()
app = bad_app
app = SupervisorErrorMiddleware(app)
app = paste.fixture.TestApp(app)
failed = False
try:
app.get("/")
except:
failed = True
assert failed
output = "".join(sys.stdout._buffer)
sys.stdout.flush()
assert "Bad Kitty" in output
assert "GET" in output
response = app.get("/good")
output = "".join(sys.stdout._buffer)
sys.stdout.flush()
response.mustcontain("Good Kitty")
assert not "Bad Kitty" in output
assert not "GET" in output
finally:
sys.stdout = old_stdout
del os.environ['SUPERVISOR_ENABLED']
| def __init__(self):
self._buffer = []
def write(self, data):
self._buffer.append(data)
def flush(self):
self._buffer = [] | identifier_body |
test.py | from supervisorerrormiddleware import SupervisorErrorMiddleware
import os
import sys
import paste.fixture
class DummyOutput:
def __init__(self):
self._buffer = []
def write(self, data):
self._buffer.append(data)
def flush(self):
self._buffer = []
def bad_app(environ, start_response):
if environ['PATH_INFO'] != '/good':
raise Exception("Bad Kitty")
else:
|
def test_without_supervisor():
old_stdout = sys.stdout
try:
sys.stdout = DummyOutput()
app = bad_app
app = SupervisorErrorMiddleware(app)
app = paste.fixture.TestApp(app)
failed = False
try:
app.get("/")
except:
failed = True
assert failed
output = "".join(sys.stdout._buffer)
sys.stdout.flush()
assert not "Bad Kitty" in output
assert not "GET" in output
response = app.get("/good")
output = "".join(sys.stdout._buffer)
sys.stdout.flush()
response.mustcontain("Good Kitty")
assert not "Bad Kitty" in output
assert not "GET" in output
finally:
sys.stdout = old_stdout
def test_with_supervisor():
#Why is there output when stdout is redirected? Because
#paste.fixture.TestApp gets around the redirection.
old_stdout = sys.stdout
try:
os.environ['SUPERVISOR_ENABLED'] = "1" #fake supervisor
sys.stdout = DummyOutput()
app = bad_app
app = SupervisorErrorMiddleware(app)
app = paste.fixture.TestApp(app)
failed = False
try:
app.get("/")
except:
failed = True
assert failed
output = "".join(sys.stdout._buffer)
sys.stdout.flush()
assert "Bad Kitty" in output
assert "GET" in output
response = app.get("/good")
output = "".join(sys.stdout._buffer)
sys.stdout.flush()
response.mustcontain("Good Kitty")
assert not "Bad Kitty" in output
assert not "GET" in output
finally:
sys.stdout = old_stdout
del os.environ['SUPERVISOR_ENABLED']
| start_response("200 OK", [('Content-type', 'text/html')])
return ["Good Kitty"] | conditional_block |
test.py | from supervisorerrormiddleware import SupervisorErrorMiddleware
import os
import sys
import paste.fixture
class DummyOutput:
def __init__(self):
self._buffer = []
def write(self, data):
self._buffer.append(data)
def flush(self):
self._buffer = []
def | (environ, start_response):
if environ['PATH_INFO'] != '/good':
raise Exception("Bad Kitty")
else:
start_response("200 OK", [('Content-type', 'text/html')])
return ["Good Kitty"]
def test_without_supervisor():
old_stdout = sys.stdout
try:
sys.stdout = DummyOutput()
app = bad_app
app = SupervisorErrorMiddleware(app)
app = paste.fixture.TestApp(app)
failed = False
try:
app.get("/")
except:
failed = True
assert failed
output = "".join(sys.stdout._buffer)
sys.stdout.flush()
assert not "Bad Kitty" in output
assert not "GET" in output
response = app.get("/good")
output = "".join(sys.stdout._buffer)
sys.stdout.flush()
response.mustcontain("Good Kitty")
assert not "Bad Kitty" in output
assert not "GET" in output
finally:
sys.stdout = old_stdout
def test_with_supervisor():
#Why is there output when stdout is redirected? Because
#paste.fixture.TestApp gets around the redirection.
old_stdout = sys.stdout
try:
os.environ['SUPERVISOR_ENABLED'] = "1" #fake supervisor
sys.stdout = DummyOutput()
app = bad_app
app = SupervisorErrorMiddleware(app)
app = paste.fixture.TestApp(app)
failed = False
try:
app.get("/")
except:
failed = True
assert failed
output = "".join(sys.stdout._buffer)
sys.stdout.flush()
assert "Bad Kitty" in output
assert "GET" in output
response = app.get("/good")
output = "".join(sys.stdout._buffer)
sys.stdout.flush()
response.mustcontain("Good Kitty")
assert not "Bad Kitty" in output
assert not "GET" in output
finally:
sys.stdout = old_stdout
del os.environ['SUPERVISOR_ENABLED']
| bad_app | identifier_name |
applicationGatewaySslPolicy.js | /*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
* Changes may cause incorrect behavior and will be lost if the code is
* regenerated.
*/
'use strict';
var util = require('util');
/**
* @class
* Initializes a new instance of the ApplicationGatewaySslPolicy class.
* @constructor
* Application gateway SSL policy
*
* @member {array} [disabledSslProtocols] SSL protocols to be disabled on
* Application Gateway
*
*/
function | () {
}
/**
* Defines the metadata of ApplicationGatewaySslPolicy
*
* @returns {object} metadata of ApplicationGatewaySslPolicy
*
*/
ApplicationGatewaySslPolicy.prototype.mapper = function () {
return {
required: false,
serializedName: 'ApplicationGatewaySslPolicy',
type: {
name: 'Composite',
className: 'ApplicationGatewaySslPolicy',
modelProperties: {
disabledSslProtocols: {
required: false,
serializedName: 'disabledSslProtocols',
type: {
name: 'Sequence',
element: {
required: false,
serializedName: 'StringElementType',
type: {
name: 'String'
}
}
}
}
}
}
};
};
module.exports = ApplicationGatewaySslPolicy;
| ApplicationGatewaySslPolicy | identifier_name |
applicationGatewaySslPolicy.js | /*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
* Changes may cause incorrect behavior and will be lost if the code is
* regenerated.
*/
'use strict';
var util = require('util');
/**
* @class
* Initializes a new instance of the ApplicationGatewaySslPolicy class.
* @constructor
* Application gateway SSL policy
*
* @member {array} [disabledSslProtocols] SSL protocols to be disabled on
* Application Gateway
*
*/
function ApplicationGatewaySslPolicy() {
}
/**
* Defines the metadata of ApplicationGatewaySslPolicy
*
* @returns {object} metadata of ApplicationGatewaySslPolicy
*
*/
ApplicationGatewaySslPolicy.prototype.mapper = function () {
return {
required: false,
serializedName: 'ApplicationGatewaySslPolicy',
type: {
name: 'Composite', | serializedName: 'disabledSslProtocols',
type: {
name: 'Sequence',
element: {
required: false,
serializedName: 'StringElementType',
type: {
name: 'String'
}
}
}
}
}
}
};
};
module.exports = ApplicationGatewaySslPolicy; | className: 'ApplicationGatewaySslPolicy',
modelProperties: {
disabledSslProtocols: {
required: false, | random_line_split |
applicationGatewaySslPolicy.js | /*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
* Changes may cause incorrect behavior and will be lost if the code is
* regenerated.
*/
'use strict';
var util = require('util');
/**
* @class
* Initializes a new instance of the ApplicationGatewaySslPolicy class.
* @constructor
* Application gateway SSL policy
*
* @member {array} [disabledSslProtocols] SSL protocols to be disabled on
* Application Gateway
*
*/
function ApplicationGatewaySslPolicy() |
/**
* Defines the metadata of ApplicationGatewaySslPolicy
*
* @returns {object} metadata of ApplicationGatewaySslPolicy
*
*/
ApplicationGatewaySslPolicy.prototype.mapper = function () {
return {
required: false,
serializedName: 'ApplicationGatewaySslPolicy',
type: {
name: 'Composite',
className: 'ApplicationGatewaySslPolicy',
modelProperties: {
disabledSslProtocols: {
required: false,
serializedName: 'disabledSslProtocols',
type: {
name: 'Sequence',
element: {
required: false,
serializedName: 'StringElementType',
type: {
name: 'String'
}
}
}
}
}
}
};
};
module.exports = ApplicationGatewaySslPolicy;
| {
} | identifier_body |
dry.js | 'use strict';
/* Controllers */
var app = angular.module('ng-dashboard.controllers.dry', []);
app.controller('DryController', ['$scope', 'DryFactory', function ($scope, DryFactory) {
DryFactory.query({}, function (data) {
$scope.numberOfWarnings = data.numberOfWarnings;
$scope.numberOfHighPriorityWarnings = data.numberOfHighPriorityWarnings;
$scope.numberOfNormalPriorityWarnings = data.numberOfNormalPriorityWarnings;
$scope.numberOfLowPriorityWarnings = data.numberOfLowPriorityWarnings;
$scope.numberOfNewWarnings = data.numberOfNewWarnings - data.numberOfFixedWarnings;
$scope.diffLow = data.numberOfNewLowPriorityWarnings;
$scope.diffNormal = data.numberOfNewNormalPriorityWarnings;
$scope.diffHigh = data.numberOfNewHighPriorityWarnings;
setupDryChart(data);
if((data.numberOfFixedWarnings - data.numberOfNewWarnings) > 0){
$scope.less = true;
} else if((data.numberOfFixedWarnings - data.numberOfNewWarnings) < 0) {
$scope.more = true;
} else {
$scope.same = true;
}
$scope.done = true;
fn_computeSize();
}, function (error) {
$scope.error = true;
})
}]);
function | (data) {
var chartData = [
{
value: data.numberOfHighPriorityWarnings,
color:"#d9534f"
},
{
value : data.numberOfNormalPriorityWarnings,
color : "#f0ad4e"
},
{
value : data.numberOfLowPriorityWarnings,
color : "#5bc0de"
}
]
var ctx = document.getElementById("dry-chart").getContext("2d");
var dryChart = new Chart(ctx).Doughnut(chartData);
} | setupDryChart | identifier_name |
dry.js | 'use strict';
/* Controllers */
var app = angular.module('ng-dashboard.controllers.dry', []);
app.controller('DryController', ['$scope', 'DryFactory', function ($scope, DryFactory) {
DryFactory.query({}, function (data) {
$scope.numberOfWarnings = data.numberOfWarnings;
$scope.numberOfHighPriorityWarnings = data.numberOfHighPriorityWarnings;
$scope.numberOfNormalPriorityWarnings = data.numberOfNormalPriorityWarnings;
$scope.numberOfLowPriorityWarnings = data.numberOfLowPriorityWarnings;
$scope.numberOfNewWarnings = data.numberOfNewWarnings - data.numberOfFixedWarnings;
$scope.diffLow = data.numberOfNewLowPriorityWarnings;
$scope.diffNormal = data.numberOfNewNormalPriorityWarnings;
$scope.diffHigh = data.numberOfNewHighPriorityWarnings;
setupDryChart(data);
if((data.numberOfFixedWarnings - data.numberOfNewWarnings) > 0){
$scope.less = true;
} else if((data.numberOfFixedWarnings - data.numberOfNewWarnings) < 0) {
$scope.more = true;
} else |
$scope.done = true;
fn_computeSize();
}, function (error) {
$scope.error = true;
})
}]);
function setupDryChart(data) {
var chartData = [
{
value: data.numberOfHighPriorityWarnings,
color:"#d9534f"
},
{
value : data.numberOfNormalPriorityWarnings,
color : "#f0ad4e"
},
{
value : data.numberOfLowPriorityWarnings,
color : "#5bc0de"
}
]
var ctx = document.getElementById("dry-chart").getContext("2d");
var dryChart = new Chart(ctx).Doughnut(chartData);
} | {
$scope.same = true;
} | conditional_block |
dry.js | 'use strict';
/* Controllers */
var app = angular.module('ng-dashboard.controllers.dry', []);
app.controller('DryController', ['$scope', 'DryFactory', function ($scope, DryFactory) {
DryFactory.query({}, function (data) {
$scope.numberOfWarnings = data.numberOfWarnings;
$scope.numberOfHighPriorityWarnings = data.numberOfHighPriorityWarnings;
$scope.numberOfNormalPriorityWarnings = data.numberOfNormalPriorityWarnings;
$scope.numberOfLowPriorityWarnings = data.numberOfLowPriorityWarnings;
$scope.numberOfNewWarnings = data.numberOfNewWarnings - data.numberOfFixedWarnings;
$scope.diffLow = data.numberOfNewLowPriorityWarnings;
$scope.diffNormal = data.numberOfNewNormalPriorityWarnings;
$scope.diffHigh = data.numberOfNewHighPriorityWarnings;
setupDryChart(data);
if((data.numberOfFixedWarnings - data.numberOfNewWarnings) > 0){
$scope.less = true;
} else if((data.numberOfFixedWarnings - data.numberOfNewWarnings) < 0) {
$scope.more = true;
} else {
$scope.same = true;
}
$scope.done = true;
fn_computeSize();
}, function (error) {
$scope.error = true;
})
}]);
function setupDryChart(data) {
var chartData = [
{ | },
{
value : data.numberOfNormalPriorityWarnings,
color : "#f0ad4e"
},
{
value : data.numberOfLowPriorityWarnings,
color : "#5bc0de"
}
]
var ctx = document.getElementById("dry-chart").getContext("2d");
var dryChart = new Chart(ctx).Doughnut(chartData);
} | value: data.numberOfHighPriorityWarnings,
color:"#d9534f" | random_line_split |
dry.js | 'use strict';
/* Controllers */
var app = angular.module('ng-dashboard.controllers.dry', []);
app.controller('DryController', ['$scope', 'DryFactory', function ($scope, DryFactory) {
DryFactory.query({}, function (data) {
$scope.numberOfWarnings = data.numberOfWarnings;
$scope.numberOfHighPriorityWarnings = data.numberOfHighPriorityWarnings;
$scope.numberOfNormalPriorityWarnings = data.numberOfNormalPriorityWarnings;
$scope.numberOfLowPriorityWarnings = data.numberOfLowPriorityWarnings;
$scope.numberOfNewWarnings = data.numberOfNewWarnings - data.numberOfFixedWarnings;
$scope.diffLow = data.numberOfNewLowPriorityWarnings;
$scope.diffNormal = data.numberOfNewNormalPriorityWarnings;
$scope.diffHigh = data.numberOfNewHighPriorityWarnings;
setupDryChart(data);
if((data.numberOfFixedWarnings - data.numberOfNewWarnings) > 0){
$scope.less = true;
} else if((data.numberOfFixedWarnings - data.numberOfNewWarnings) < 0) {
$scope.more = true;
} else {
$scope.same = true;
}
$scope.done = true;
fn_computeSize();
}, function (error) {
$scope.error = true;
})
}]);
function setupDryChart(data) | {
var chartData = [
{
value: data.numberOfHighPriorityWarnings,
color:"#d9534f"
},
{
value : data.numberOfNormalPriorityWarnings,
color : "#f0ad4e"
},
{
value : data.numberOfLowPriorityWarnings,
color : "#5bc0de"
}
]
var ctx = document.getElementById("dry-chart").getContext("2d");
var dryChart = new Chart(ctx).Doughnut(chartData);
} | identifier_body |
|
Absorber.js | "use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var ColorUtils_1 = require("./Utils/ColorUtils");
var Utils_1 = require("./Utils/Utils");
var Absorber = (function () {
function Absorber(container, options, position) {
var _a, _b;
this.container = container;
this.initialPosition = position;
this.options = options;
var size = options.size.value * container.retina.pixelRatio;
var random = typeof options.size.random === "boolean" ? options.size.random : options.size.random.enable;
var minSize = typeof options.size.random === "boolean" ? 1 : options.size.random.minimumValue;
if (random) {
size = Utils_1.Utils.randomInRange(minSize, size);
}
this.opacity = this.options.opacity;
this.size = size * container.retina.pixelRatio;
this.mass = size * options.size.density;
this.limit = options.size.limit;
var color = typeof options.color === "string" ? { value: options.color } : options.color;
this.color = (_a = ColorUtils_1.ColorUtils.colorToRgb(color)) !== null && _a !== void 0 ? _a : {
b: 0,
g: 0,
r: 0,
};
this.position = (_b = this.initialPosition) !== null && _b !== void 0 ? _b : this.calcPosition();
}
Absorber.prototype.attract = function (particle) {
var container = this.container;
var dx = this.position.x - (particle.position.x + particle.offset.x);
var dy = this.position.y - (particle.position.y + particle.offset.y);
var distance = Math.sqrt(Math.abs(dx * dx + dy * dy));
var angle = Math.atan2(dx, dy) * (180 / Math.PI);
var acceleration = this.mass / Math.pow(distance, 2);
if (distance < this.size + particle.size.value) {
var remove = false;
var sizeFactor = particle.size.value * 0.033;
if (this.size > particle.size.value && distance < this.size - particle.size.value) {
container.particles.remove(particle);
remove = true;
}
else {
particle.size.value -= sizeFactor;
particle.velocity.horizontal += Math.sin(angle * (Math.PI / 180)) * acceleration;
particle.velocity.vertical += Math.cos(angle * (Math.PI / 180)) * acceleration;
}
if (this.limit === undefined || this.size < this.limit) {
this.size += sizeFactor;
}
this.mass += sizeFactor * this.options.size.density;
return !remove;
}
else |
};
Absorber.prototype.resize = function () {
var initialPosition = this.initialPosition;
this.position = initialPosition && Utils_1.Utils.isPointInside(initialPosition, this.container.canvas.size) ?
initialPosition :
this.calcPosition();
};
Absorber.prototype.draw = function () {
var container = this.container;
container.canvas.drawAbsorber(this);
};
Absorber.prototype.calcPosition = function () {
var _a;
var container = this.container;
var percentPosition = (_a = this.options.position) !== null && _a !== void 0 ? _a : {
x: Math.random() * 100,
y: Math.random() * 100,
};
return {
x: percentPosition.x / 100 * container.canvas.size.width,
y: percentPosition.y / 100 * container.canvas.size.height,
};
};
return Absorber;
}());
exports.Absorber = Absorber;
| {
particle.velocity.horizontal += Math.sin(angle * (Math.PI / 180)) * acceleration;
particle.velocity.vertical += Math.cos(angle * (Math.PI / 180)) * acceleration;
return true;
} | conditional_block |
Absorber.js | "use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var ColorUtils_1 = require("./Utils/ColorUtils");
var Utils_1 = require("./Utils/Utils");
var Absorber = (function () {
function Absorber(container, options, position) | };
this.position = (_b = this.initialPosition) !== null && _b !== void 0 ? _b : this.calcPosition();
}
Absorber.prototype.attract = function (particle) {
var container = this.container;
var dx = this.position.x - (particle.position.x + particle.offset.x);
var dy = this.position.y - (particle.position.y + particle.offset.y);
var distance = Math.sqrt(Math.abs(dx * dx + dy * dy));
var angle = Math.atan2(dx, dy) * (180 / Math.PI);
var acceleration = this.mass / Math.pow(distance, 2);
if (distance < this.size + particle.size.value) {
var remove = false;
var sizeFactor = particle.size.value * 0.033;
if (this.size > particle.size.value && distance < this.size - particle.size.value) {
container.particles.remove(particle);
remove = true;
}
else {
particle.size.value -= sizeFactor;
particle.velocity.horizontal += Math.sin(angle * (Math.PI / 180)) * acceleration;
particle.velocity.vertical += Math.cos(angle * (Math.PI / 180)) * acceleration;
}
if (this.limit === undefined || this.size < this.limit) {
this.size += sizeFactor;
}
this.mass += sizeFactor * this.options.size.density;
return !remove;
}
else {
particle.velocity.horizontal += Math.sin(angle * (Math.PI / 180)) * acceleration;
particle.velocity.vertical += Math.cos(angle * (Math.PI / 180)) * acceleration;
return true;
}
};
Absorber.prototype.resize = function () {
var initialPosition = this.initialPosition;
this.position = initialPosition && Utils_1.Utils.isPointInside(initialPosition, this.container.canvas.size) ?
initialPosition :
this.calcPosition();
};
Absorber.prototype.draw = function () {
var container = this.container;
container.canvas.drawAbsorber(this);
};
Absorber.prototype.calcPosition = function () {
var _a;
var container = this.container;
var percentPosition = (_a = this.options.position) !== null && _a !== void 0 ? _a : {
x: Math.random() * 100,
y: Math.random() * 100,
};
return {
x: percentPosition.x / 100 * container.canvas.size.width,
y: percentPosition.y / 100 * container.canvas.size.height,
};
};
return Absorber;
}());
exports.Absorber = Absorber;
| {
var _a, _b;
this.container = container;
this.initialPosition = position;
this.options = options;
var size = options.size.value * container.retina.pixelRatio;
var random = typeof options.size.random === "boolean" ? options.size.random : options.size.random.enable;
var minSize = typeof options.size.random === "boolean" ? 1 : options.size.random.minimumValue;
if (random) {
size = Utils_1.Utils.randomInRange(minSize, size);
}
this.opacity = this.options.opacity;
this.size = size * container.retina.pixelRatio;
this.mass = size * options.size.density;
this.limit = options.size.limit;
var color = typeof options.color === "string" ? { value: options.color } : options.color;
this.color = (_a = ColorUtils_1.ColorUtils.colorToRgb(color)) !== null && _a !== void 0 ? _a : {
b: 0,
g: 0,
r: 0, | identifier_body |
Absorber.js | "use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var ColorUtils_1 = require("./Utils/ColorUtils");
var Utils_1 = require("./Utils/Utils");
var Absorber = (function () {
function | (container, options, position) {
var _a, _b;
this.container = container;
this.initialPosition = position;
this.options = options;
var size = options.size.value * container.retina.pixelRatio;
var random = typeof options.size.random === "boolean" ? options.size.random : options.size.random.enable;
var minSize = typeof options.size.random === "boolean" ? 1 : options.size.random.minimumValue;
if (random) {
size = Utils_1.Utils.randomInRange(minSize, size);
}
this.opacity = this.options.opacity;
this.size = size * container.retina.pixelRatio;
this.mass = size * options.size.density;
this.limit = options.size.limit;
var color = typeof options.color === "string" ? { value: options.color } : options.color;
this.color = (_a = ColorUtils_1.ColorUtils.colorToRgb(color)) !== null && _a !== void 0 ? _a : {
b: 0,
g: 0,
r: 0,
};
this.position = (_b = this.initialPosition) !== null && _b !== void 0 ? _b : this.calcPosition();
}
Absorber.prototype.attract = function (particle) {
var container = this.container;
var dx = this.position.x - (particle.position.x + particle.offset.x);
var dy = this.position.y - (particle.position.y + particle.offset.y);
var distance = Math.sqrt(Math.abs(dx * dx + dy * dy));
var angle = Math.atan2(dx, dy) * (180 / Math.PI);
var acceleration = this.mass / Math.pow(distance, 2);
if (distance < this.size + particle.size.value) {
var remove = false;
var sizeFactor = particle.size.value * 0.033;
if (this.size > particle.size.value && distance < this.size - particle.size.value) {
container.particles.remove(particle);
remove = true;
}
else {
particle.size.value -= sizeFactor;
particle.velocity.horizontal += Math.sin(angle * (Math.PI / 180)) * acceleration;
particle.velocity.vertical += Math.cos(angle * (Math.PI / 180)) * acceleration;
}
if (this.limit === undefined || this.size < this.limit) {
this.size += sizeFactor;
}
this.mass += sizeFactor * this.options.size.density;
return !remove;
}
else {
particle.velocity.horizontal += Math.sin(angle * (Math.PI / 180)) * acceleration;
particle.velocity.vertical += Math.cos(angle * (Math.PI / 180)) * acceleration;
return true;
}
};
Absorber.prototype.resize = function () {
var initialPosition = this.initialPosition;
this.position = initialPosition && Utils_1.Utils.isPointInside(initialPosition, this.container.canvas.size) ?
initialPosition :
this.calcPosition();
};
Absorber.prototype.draw = function () {
var container = this.container;
container.canvas.drawAbsorber(this);
};
Absorber.prototype.calcPosition = function () {
var _a;
var container = this.container;
var percentPosition = (_a = this.options.position) !== null && _a !== void 0 ? _a : {
x: Math.random() * 100,
y: Math.random() * 100,
};
return {
x: percentPosition.x / 100 * container.canvas.size.width,
y: percentPosition.y / 100 * container.canvas.size.height,
};
};
return Absorber;
}());
exports.Absorber = Absorber;
| Absorber | identifier_name |
Absorber.js | "use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var ColorUtils_1 = require("./Utils/ColorUtils");
var Utils_1 = require("./Utils/Utils");
var Absorber = (function () {
function Absorber(container, options, position) {
var _a, _b;
this.container = container;
this.initialPosition = position;
this.options = options;
var size = options.size.value * container.retina.pixelRatio;
var random = typeof options.size.random === "boolean" ? options.size.random : options.size.random.enable;
var minSize = typeof options.size.random === "boolean" ? 1 : options.size.random.minimumValue;
if (random) {
size = Utils_1.Utils.randomInRange(minSize, size);
}
this.opacity = this.options.opacity;
this.size = size * container.retina.pixelRatio;
this.mass = size * options.size.density;
this.limit = options.size.limit;
var color = typeof options.color === "string" ? { value: options.color } : options.color;
this.color = (_a = ColorUtils_1.ColorUtils.colorToRgb(color)) !== null && _a !== void 0 ? _a : {
b: 0,
g: 0,
r: 0,
};
this.position = (_b = this.initialPosition) !== null && _b !== void 0 ? _b : this.calcPosition();
}
Absorber.prototype.attract = function (particle) {
var container = this.container;
var dx = this.position.x - (particle.position.x + particle.offset.x);
var dy = this.position.y - (particle.position.y + particle.offset.y);
var distance = Math.sqrt(Math.abs(dx * dx + dy * dy));
var angle = Math.atan2(dx, dy) * (180 / Math.PI);
var acceleration = this.mass / Math.pow(distance, 2);
if (distance < this.size + particle.size.value) {
var remove = false;
var sizeFactor = particle.size.value * 0.033;
if (this.size > particle.size.value && distance < this.size - particle.size.value) {
container.particles.remove(particle);
remove = true;
}
else {
particle.size.value -= sizeFactor;
particle.velocity.horizontal += Math.sin(angle * (Math.PI / 180)) * acceleration;
particle.velocity.vertical += Math.cos(angle * (Math.PI / 180)) * acceleration;
}
if (this.limit === undefined || this.size < this.limit) {
this.size += sizeFactor;
}
this.mass += sizeFactor * this.options.size.density;
return !remove;
}
else {
particle.velocity.horizontal += Math.sin(angle * (Math.PI / 180)) * acceleration;
particle.velocity.vertical += Math.cos(angle * (Math.PI / 180)) * acceleration;
return true;
}
};
Absorber.prototype.resize = function () {
var initialPosition = this.initialPosition;
this.position = initialPosition && Utils_1.Utils.isPointInside(initialPosition, this.container.canvas.size) ?
initialPosition :
this.calcPosition();
};
Absorber.prototype.draw = function () {
var container = this.container;
container.canvas.drawAbsorber(this); | };
Absorber.prototype.calcPosition = function () {
var _a;
var container = this.container;
var percentPosition = (_a = this.options.position) !== null && _a !== void 0 ? _a : {
x: Math.random() * 100,
y: Math.random() * 100,
};
return {
x: percentPosition.x / 100 * container.canvas.size.width,
y: percentPosition.y / 100 * container.canvas.size.height,
};
};
return Absorber;
}());
exports.Absorber = Absorber; | random_line_split |
|
test_scale.py | #!/usr/bin/env python
# Copyright (C) 2006-2016 Music Technology Group - Universitat Pompeu Fabra
#
# This file is part of Essentia
#
# Essentia is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation (FSF), either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the Affero GNU General Public License
# version 3 along with this program. If not, see http://www.gnu.org/licenses/
from essentia_test import *
|
def testRegression(self):
inputSize = 1024
input = range(inputSize)
factor = 0.5
expected = [factor * n for n in input]
output = Scale(factor=factor, clipping=False)(input)
self.assertEqualVector(output, expected)
def testZero(self):
inputSize = 1024
input = [0] * inputSize
expected = input[:]
output = Scale()(input)
self.assertEqualVector(output, input)
def testEmpty(self):
input = []
expected = input[:]
output = Scale()(input)
self.assertEqualVector(output, input)
def testClipping(self):
inputSize = 1024
maxAbsValue= 10
factor = 1
input = [n + maxAbsValue for n in range(inputSize)]
expected = [maxAbsValue] * inputSize
output = Scale(factor=factor, clipping=True, maxAbsValue=maxAbsValue)(input)
self.assertEqualVector(output, expected)
def testInvalidParam(self):
self.assertConfigureFails(Scale(), { 'maxAbsValue': -1 })
suite = allTests(TestScale)
if __name__ == '__main__':
TextTestRunner(verbosity=2).run(suite) |
class TestScale(TestCase):
| random_line_split |
test_scale.py | #!/usr/bin/env python
# Copyright (C) 2006-2016 Music Technology Group - Universitat Pompeu Fabra
#
# This file is part of Essentia
#
# Essentia is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation (FSF), either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the Affero GNU General Public License
# version 3 along with this program. If not, see http://www.gnu.org/licenses/
from essentia_test import *
class TestScale(TestCase):
def testRegression(self):
inputSize = 1024
input = range(inputSize)
factor = 0.5
expected = [factor * n for n in input]
output = Scale(factor=factor, clipping=False)(input)
self.assertEqualVector(output, expected)
def testZero(self):
inputSize = 1024
input = [0] * inputSize
expected = input[:]
output = Scale()(input)
self.assertEqualVector(output, input)
def | (self):
input = []
expected = input[:]
output = Scale()(input)
self.assertEqualVector(output, input)
def testClipping(self):
inputSize = 1024
maxAbsValue= 10
factor = 1
input = [n + maxAbsValue for n in range(inputSize)]
expected = [maxAbsValue] * inputSize
output = Scale(factor=factor, clipping=True, maxAbsValue=maxAbsValue)(input)
self.assertEqualVector(output, expected)
def testInvalidParam(self):
self.assertConfigureFails(Scale(), { 'maxAbsValue': -1 })
suite = allTests(TestScale)
if __name__ == '__main__':
TextTestRunner(verbosity=2).run(suite)
| testEmpty | identifier_name |
test_scale.py | #!/usr/bin/env python
# Copyright (C) 2006-2016 Music Technology Group - Universitat Pompeu Fabra
#
# This file is part of Essentia
#
# Essentia is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation (FSF), either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the Affero GNU General Public License
# version 3 along with this program. If not, see http://www.gnu.org/licenses/
from essentia_test import *
class TestScale(TestCase):
def testRegression(self):
inputSize = 1024
input = range(inputSize)
factor = 0.5
expected = [factor * n for n in input]
output = Scale(factor=factor, clipping=False)(input)
self.assertEqualVector(output, expected)
def testZero(self):
inputSize = 1024
input = [0] * inputSize
expected = input[:]
output = Scale()(input)
self.assertEqualVector(output, input)
def testEmpty(self):
input = []
expected = input[:]
output = Scale()(input)
self.assertEqualVector(output, input)
def testClipping(self):
inputSize = 1024
maxAbsValue= 10
factor = 1
input = [n + maxAbsValue for n in range(inputSize)]
expected = [maxAbsValue] * inputSize
output = Scale(factor=factor, clipping=True, maxAbsValue=maxAbsValue)(input)
self.assertEqualVector(output, expected)
def testInvalidParam(self):
|
suite = allTests(TestScale)
if __name__ == '__main__':
TextTestRunner(verbosity=2).run(suite)
| self.assertConfigureFails(Scale(), { 'maxAbsValue': -1 }) | identifier_body |
test_scale.py | #!/usr/bin/env python
# Copyright (C) 2006-2016 Music Technology Group - Universitat Pompeu Fabra
#
# This file is part of Essentia
#
# Essentia is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation (FSF), either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the Affero GNU General Public License
# version 3 along with this program. If not, see http://www.gnu.org/licenses/
from essentia_test import *
class TestScale(TestCase):
def testRegression(self):
inputSize = 1024
input = range(inputSize)
factor = 0.5
expected = [factor * n for n in input]
output = Scale(factor=factor, clipping=False)(input)
self.assertEqualVector(output, expected)
def testZero(self):
inputSize = 1024
input = [0] * inputSize
expected = input[:]
output = Scale()(input)
self.assertEqualVector(output, input)
def testEmpty(self):
input = []
expected = input[:]
output = Scale()(input)
self.assertEqualVector(output, input)
def testClipping(self):
inputSize = 1024
maxAbsValue= 10
factor = 1
input = [n + maxAbsValue for n in range(inputSize)]
expected = [maxAbsValue] * inputSize
output = Scale(factor=factor, clipping=True, maxAbsValue=maxAbsValue)(input)
self.assertEqualVector(output, expected)
def testInvalidParam(self):
self.assertConfigureFails(Scale(), { 'maxAbsValue': -1 })
suite = allTests(TestScale)
if __name__ == '__main__':
| TextTestRunner(verbosity=2).run(suite) | conditional_block |
|
index.js | import classnames from 'classnames';
import PropTypes from 'prop-types';
import React, { Component } from 'react';
import telephonyStatuses from 'ringcentral-integration/enums/telephonyStatus';
import callDirections from 'ringcentral-integration/enums/callDirections';
import CloseIcon from '../../assets/images/CloseIcon.svg';
import { Button } from '../Button';
import LogNotification from '../LogNotificationV2';
import styles from './styles.scss';
import i18n from './i18n';
export default class NotificationSection extends Component {
componentWillUpdate(nextProps) {
const {
logNotification,
onCloseNotification,
currentNotificationIdentify,
} = nextProps;
if (currentNotificationIdentify) {
const { call = {} } = logNotification;
const { result } = call;
if (result) |
}
}
renderLogSection() {
const {
formatPhone,
currentLocale,
logNotification,
showNotiLogButton,
onCloseNotification,
onSaveNotification,
onExpandNotification,
onDiscardNotification,
currentNotificationIdentify,
currentSession,
onReject,
onHangup,
shrinkNotification,
} = this.props;
const { call } = logNotification;
const { result, telephonyStatus } = call;
const status = result || telephonyStatus;
let statusI18n = null;
const isIncomingCall =
status === telephonyStatuses.ringing &&
call.direction === callDirections.inbound;
if (isIncomingCall) {
statusI18n = i18n.getString('ringing', currentLocale);
} else {
statusI18n = i18n.getString('callConnected', currentLocale);
}
return (
<div className={classnames(styles.root)}>
<div className={styles.notificationModal}>
<div className={styles.modalHeader}>
<div className={styles.modalTitle}>{statusI18n}</div>
<div className={styles.modalCloseBtn}>
<Button dataSign="closeButton" onClick={onCloseNotification}>
<CloseIcon />
</Button>
</div>
</div>
<LogNotification
showEndButton
showLogButton={showNotiLogButton}
currentLocale={currentLocale}
formatPhone={formatPhone}
currentLog={logNotification}
isExpand={logNotification.notificationIsExpand}
onSave={onSaveNotification}
onExpand={onExpandNotification}
onDiscard={onDiscardNotification}
onReject={() => onReject(currentNotificationIdentify)}
onHangup={() => onHangup(currentNotificationIdentify)}
currentSession={currentSession}
shrinkNotification={shrinkNotification}
/>
</div>
</div>
);
}
render() {
return this.renderLogSection();
}
}
NotificationSection.propTypes = {
currentLocale: PropTypes.string.isRequired,
formatPhone: PropTypes.func.isRequired,
// - Notification
logNotification: PropTypes.object,
onCloseNotification: PropTypes.func,
onDiscardNotification: PropTypes.func,
onSaveNotification: PropTypes.func,
onExpandNotification: PropTypes.func,
showNotiLogButton: PropTypes.bool,
currentNotificationIdentify: PropTypes.string,
currentSession: PropTypes.object,
onReject: PropTypes.func.isRequired,
onHangup: PropTypes.func.isRequired,
shrinkNotification: PropTypes.func,
};
NotificationSection.defaultProps = {
// Notification
logNotification: undefined,
onCloseNotification: undefined,
onDiscardNotification: undefined,
onSaveNotification: undefined,
onExpandNotification: undefined,
showNotiLogButton: true,
currentNotificationIdentify: '',
currentSession: undefined,
shrinkNotification: undefined,
};
| {
onCloseNotification();
} | conditional_block |
index.js | import classnames from 'classnames';
import PropTypes from 'prop-types';
import React, { Component } from 'react';
import telephonyStatuses from 'ringcentral-integration/enums/telephonyStatus';
import callDirections from 'ringcentral-integration/enums/callDirections';
import CloseIcon from '../../assets/images/CloseIcon.svg';
import { Button } from '../Button';
import LogNotification from '../LogNotificationV2';
import styles from './styles.scss';
import i18n from './i18n';
export default class NotificationSection extends Component {
componentWillUpdate(nextProps) |
renderLogSection() {
const {
formatPhone,
currentLocale,
logNotification,
showNotiLogButton,
onCloseNotification,
onSaveNotification,
onExpandNotification,
onDiscardNotification,
currentNotificationIdentify,
currentSession,
onReject,
onHangup,
shrinkNotification,
} = this.props;
const { call } = logNotification;
const { result, telephonyStatus } = call;
const status = result || telephonyStatus;
let statusI18n = null;
const isIncomingCall =
status === telephonyStatuses.ringing &&
call.direction === callDirections.inbound;
if (isIncomingCall) {
statusI18n = i18n.getString('ringing', currentLocale);
} else {
statusI18n = i18n.getString('callConnected', currentLocale);
}
return (
<div className={classnames(styles.root)}>
<div className={styles.notificationModal}>
<div className={styles.modalHeader}>
<div className={styles.modalTitle}>{statusI18n}</div>
<div className={styles.modalCloseBtn}>
<Button dataSign="closeButton" onClick={onCloseNotification}>
<CloseIcon />
</Button>
</div>
</div>
<LogNotification
showEndButton
showLogButton={showNotiLogButton}
currentLocale={currentLocale}
formatPhone={formatPhone}
currentLog={logNotification}
isExpand={logNotification.notificationIsExpand}
onSave={onSaveNotification}
onExpand={onExpandNotification}
onDiscard={onDiscardNotification}
onReject={() => onReject(currentNotificationIdentify)}
onHangup={() => onHangup(currentNotificationIdentify)}
currentSession={currentSession}
shrinkNotification={shrinkNotification}
/>
</div>
</div>
);
}
render() {
return this.renderLogSection();
}
}
NotificationSection.propTypes = {
currentLocale: PropTypes.string.isRequired,
formatPhone: PropTypes.func.isRequired,
// - Notification
logNotification: PropTypes.object,
onCloseNotification: PropTypes.func,
onDiscardNotification: PropTypes.func,
onSaveNotification: PropTypes.func,
onExpandNotification: PropTypes.func,
showNotiLogButton: PropTypes.bool,
currentNotificationIdentify: PropTypes.string,
currentSession: PropTypes.object,
onReject: PropTypes.func.isRequired,
onHangup: PropTypes.func.isRequired,
shrinkNotification: PropTypes.func,
};
NotificationSection.defaultProps = {
// Notification
logNotification: undefined,
onCloseNotification: undefined,
onDiscardNotification: undefined,
onSaveNotification: undefined,
onExpandNotification: undefined,
showNotiLogButton: true,
currentNotificationIdentify: '',
currentSession: undefined,
shrinkNotification: undefined,
};
| {
const {
logNotification,
onCloseNotification,
currentNotificationIdentify,
} = nextProps;
if (currentNotificationIdentify) {
const { call = {} } = logNotification;
const { result } = call;
if (result) {
onCloseNotification();
}
}
} | identifier_body |
index.js | import classnames from 'classnames';
import PropTypes from 'prop-types';
import React, { Component } from 'react';
import telephonyStatuses from 'ringcentral-integration/enums/telephonyStatus';
import callDirections from 'ringcentral-integration/enums/callDirections';
import CloseIcon from '../../assets/images/CloseIcon.svg';
import { Button } from '../Button';
import LogNotification from '../LogNotificationV2';
import styles from './styles.scss';
import i18n from './i18n';
export default class NotificationSection extends Component {
componentWillUpdate(nextProps) {
const {
logNotification,
onCloseNotification,
currentNotificationIdentify,
} = nextProps;
if (currentNotificationIdentify) {
const { call = {} } = logNotification;
const { result } = call;
if (result) {
onCloseNotification();
}
}
}
renderLogSection() {
const {
formatPhone,
currentLocale,
logNotification,
showNotiLogButton,
onCloseNotification,
onSaveNotification,
onExpandNotification,
onDiscardNotification,
currentNotificationIdentify,
currentSession,
onReject,
onHangup,
shrinkNotification,
} = this.props;
const { call } = logNotification;
const { result, telephonyStatus } = call;
const status = result || telephonyStatus;
let statusI18n = null;
const isIncomingCall =
status === telephonyStatuses.ringing &&
call.direction === callDirections.inbound;
if (isIncomingCall) { | <div className={classnames(styles.root)}>
<div className={styles.notificationModal}>
<div className={styles.modalHeader}>
<div className={styles.modalTitle}>{statusI18n}</div>
<div className={styles.modalCloseBtn}>
<Button dataSign="closeButton" onClick={onCloseNotification}>
<CloseIcon />
</Button>
</div>
</div>
<LogNotification
showEndButton
showLogButton={showNotiLogButton}
currentLocale={currentLocale}
formatPhone={formatPhone}
currentLog={logNotification}
isExpand={logNotification.notificationIsExpand}
onSave={onSaveNotification}
onExpand={onExpandNotification}
onDiscard={onDiscardNotification}
onReject={() => onReject(currentNotificationIdentify)}
onHangup={() => onHangup(currentNotificationIdentify)}
currentSession={currentSession}
shrinkNotification={shrinkNotification}
/>
</div>
</div>
);
}
render() {
return this.renderLogSection();
}
}
NotificationSection.propTypes = {
currentLocale: PropTypes.string.isRequired,
formatPhone: PropTypes.func.isRequired,
// - Notification
logNotification: PropTypes.object,
onCloseNotification: PropTypes.func,
onDiscardNotification: PropTypes.func,
onSaveNotification: PropTypes.func,
onExpandNotification: PropTypes.func,
showNotiLogButton: PropTypes.bool,
currentNotificationIdentify: PropTypes.string,
currentSession: PropTypes.object,
onReject: PropTypes.func.isRequired,
onHangup: PropTypes.func.isRequired,
shrinkNotification: PropTypes.func,
};
NotificationSection.defaultProps = {
// Notification
logNotification: undefined,
onCloseNotification: undefined,
onDiscardNotification: undefined,
onSaveNotification: undefined,
onExpandNotification: undefined,
showNotiLogButton: true,
currentNotificationIdentify: '',
currentSession: undefined,
shrinkNotification: undefined,
}; | statusI18n = i18n.getString('ringing', currentLocale);
} else {
statusI18n = i18n.getString('callConnected', currentLocale);
}
return ( | random_line_split |
index.js | import classnames from 'classnames';
import PropTypes from 'prop-types';
import React, { Component } from 'react';
import telephonyStatuses from 'ringcentral-integration/enums/telephonyStatus';
import callDirections from 'ringcentral-integration/enums/callDirections';
import CloseIcon from '../../assets/images/CloseIcon.svg';
import { Button } from '../Button';
import LogNotification from '../LogNotificationV2';
import styles from './styles.scss';
import i18n from './i18n';
export default class NotificationSection extends Component {
componentWillUpdate(nextProps) {
const {
logNotification,
onCloseNotification,
currentNotificationIdentify,
} = nextProps;
if (currentNotificationIdentify) {
const { call = {} } = logNotification;
const { result } = call;
if (result) {
onCloseNotification();
}
}
}
| () {
const {
formatPhone,
currentLocale,
logNotification,
showNotiLogButton,
onCloseNotification,
onSaveNotification,
onExpandNotification,
onDiscardNotification,
currentNotificationIdentify,
currentSession,
onReject,
onHangup,
shrinkNotification,
} = this.props;
const { call } = logNotification;
const { result, telephonyStatus } = call;
const status = result || telephonyStatus;
let statusI18n = null;
const isIncomingCall =
status === telephonyStatuses.ringing &&
call.direction === callDirections.inbound;
if (isIncomingCall) {
statusI18n = i18n.getString('ringing', currentLocale);
} else {
statusI18n = i18n.getString('callConnected', currentLocale);
}
return (
<div className={classnames(styles.root)}>
<div className={styles.notificationModal}>
<div className={styles.modalHeader}>
<div className={styles.modalTitle}>{statusI18n}</div>
<div className={styles.modalCloseBtn}>
<Button dataSign="closeButton" onClick={onCloseNotification}>
<CloseIcon />
</Button>
</div>
</div>
<LogNotification
showEndButton
showLogButton={showNotiLogButton}
currentLocale={currentLocale}
formatPhone={formatPhone}
currentLog={logNotification}
isExpand={logNotification.notificationIsExpand}
onSave={onSaveNotification}
onExpand={onExpandNotification}
onDiscard={onDiscardNotification}
onReject={() => onReject(currentNotificationIdentify)}
onHangup={() => onHangup(currentNotificationIdentify)}
currentSession={currentSession}
shrinkNotification={shrinkNotification}
/>
</div>
</div>
);
}
render() {
return this.renderLogSection();
}
}
NotificationSection.propTypes = {
currentLocale: PropTypes.string.isRequired,
formatPhone: PropTypes.func.isRequired,
// - Notification
logNotification: PropTypes.object,
onCloseNotification: PropTypes.func,
onDiscardNotification: PropTypes.func,
onSaveNotification: PropTypes.func,
onExpandNotification: PropTypes.func,
showNotiLogButton: PropTypes.bool,
currentNotificationIdentify: PropTypes.string,
currentSession: PropTypes.object,
onReject: PropTypes.func.isRequired,
onHangup: PropTypes.func.isRequired,
shrinkNotification: PropTypes.func,
};
NotificationSection.defaultProps = {
// Notification
logNotification: undefined,
onCloseNotification: undefined,
onDiscardNotification: undefined,
onSaveNotification: undefined,
onExpandNotification: undefined,
showNotiLogButton: true,
currentNotificationIdentify: '',
currentSession: undefined,
shrinkNotification: undefined,
};
| renderLogSection | identifier_name |
CumulusCI.py | import logging
from cumulusci.cli.config import CliConfig
from cumulusci.core.config import TaskConfig
from cumulusci.core.exceptions import TaskNotFoundError
from cumulusci.core.exceptions import TaskOptionsError
from cumulusci.core.tasks import CURRENT_TASK
from cumulusci.core.utils import import_class
from cumulusci.tasks.robotframework.robotframework import Robot
from robot.api import logger
from robot.libraries.BuiltIn import BuiltIn
from simple_salesforce import Salesforce
class CumulusCI(object):
""" Library for accessing CumulusCI for the local git project
This library allows Robot Framework tests to access credentials to a
Salesforce org created by CumulusCI, including Scratch Orgs. It also
exposes the core logic of CumulusCI including interactions with the
Salesforce API's and project specific configuration including custom
and customized tasks and flows.
Initialization requires a single argument, the org name for the target
CumulusCI org. If running your tests via cci's robot task (recommended),
you can initialize the library in your tests taking advantage of the
variable set by the robot task:
| ``*** Settings ***``
|
| Library cumulusci.robotframework.CumulusCI ${ORG}
"""
ROBOT_LIBRARY_SCOPE = "GLOBAL"
def __init__(self, org_name=None):
if not org_name:
org_name = 'dev'
self.org_name = org_name
self._project_config = None
self._org = None
self._sf = None
self._tooling = None
# Turn off info logging of all http requests
logging.getLogger('requests.packages.urllib3.connectionpool').setLevel(logging.WARN)
@property
def project_config(self):
if self._project_config is None:
if CURRENT_TASK and isinstance(CURRENT_TASK, Robot):
# If CumulusCI is running a task, use that task's config
return CURRENT_TASK.project_config
else:
logger.console('Initializing CumulusCI config\n')
self._project_config = CliConfig().project_config
return self._project_config
def set_project_config(self, project_config):
logger.console('\n')
self._project_config = project_config
@property
def keychain(self):
return self.project_config.keychain
@property
def org(self):
if self._org is None:
if CURRENT_TASK and isinstance(CURRENT_TASK, Robot):
# If CumulusCI is running a task, use that task's org
return CURRENT_TASK.org_config
else:
self._org = self.keychain.get_org(self.org_name)
return self._org
@property
def sf(self):
if self._sf is None:
|
return self._sf
@property
def tooling(self):
if self._tooling is None:
self._tooling = self._init_api('tooling/')
return self._tooling
def set_login_url(self):
""" Sets the LOGIN_URL variable in the suite scope which will
automatically log into the target Salesforce org.
Typically, this is run during Suite Setup
"""
BuiltIn().set_suite_variable('${LOGIN_URL}', self.org.start_url)
def get_org_info(self):
""" Returns a dictionary of the org information for the current target
Salesforce org
"""
return self.org.config
def login_url(self, org=None):
""" Returns the login url which will automatically log into the target
Salesforce org. By default, the org_name passed to the library
constructor is used but this can be overridden with the org option
to log into a different org.
"""
if org is None:
org = self.org
else:
org = self.keychain.get_org(org)
return org.start_url
def run_task(self, task_name, **options):
""" Runs a named CumulusCI task for the current project with optional
support for overriding task options via kwargs.
Examples:
| =Keyword= | =task_name= | =task_options= | =comment= |
| Run Task | deploy | | Run deploy with standard options |
| Run Task | deploy | path=path/to/some/metadata | Run deploy with custom path |
"""
task_config = self.project_config.get_task(task_name)
class_path = task_config.class_path
logger.console('\n')
task_class, task_config = self._init_task(class_path, options, task_config)
return self._run_task(task_class, task_config)
def run_task_class(self, class_path, **options):
""" Runs a CumulusCI task class with task options via kwargs.
Use this keyword to run logic from CumulusCI tasks which have not
been configured in the project's cumulusci.yml file. This is
most useful in cases where a test needs to use task logic for
logic unique to the test and thus not worth making into a named
task for the project
Examples:
| =Keyword= | =task_class= | =task_options= |
| Run Task Class | cumulusci.task.utils.DownloadZip | url=http://test.com/test.zip dir=test_zip |
"""
logger.console('\n')
task_class, task_config = self._init_task(class_path, options, TaskConfig())
return self._run_task(task_class, task_config)
def _init_api(self, base_url=None):
api_version = self.project_config.project__package__api_version
rv = Salesforce(
instance=self.org.instance_url.replace('https://', ''),
session_id=self.org.access_token,
version=api_version,
)
if base_url is not None:
rv.base_url += base_url
return rv
def _init_task(self, class_path, options, task_config):
task_class = import_class(class_path)
task_config = self._parse_task_options(options, task_class, task_config)
return task_class, task_config
def _parse_task_options(self, options, task_class, task_config):
if 'options' not in task_config.config:
task_config.config['options'] = {}
# Parse options and add to task config
if options:
for name, value in options.items():
# Validate the option
if name not in task_class.task_options:
raise TaskOptionsError(
'Option "{}" is not available for task {}'.format(
name,
task_name,
),
)
# Override the option in the task config
task_config.config['options'][name] = value
return task_config
def _run_task(self, task_class, task_config):
exception = None
task = task_class(self.project_config,
task_config, org_config=self.org)
task()
return task.return_values
| self._sf = self._init_api() | conditional_block |
CumulusCI.py | import logging
from cumulusci.cli.config import CliConfig
from cumulusci.core.config import TaskConfig
from cumulusci.core.exceptions import TaskNotFoundError
from cumulusci.core.exceptions import TaskOptionsError
from cumulusci.core.tasks import CURRENT_TASK
from cumulusci.core.utils import import_class
from cumulusci.tasks.robotframework.robotframework import Robot
from robot.api import logger
from robot.libraries.BuiltIn import BuiltIn
from simple_salesforce import Salesforce
class CumulusCI(object):
""" Library for accessing CumulusCI for the local git project
This library allows Robot Framework tests to access credentials to a
Salesforce org created by CumulusCI, including Scratch Orgs. It also
exposes the core logic of CumulusCI including interactions with the
Salesforce API's and project specific configuration including custom
and customized tasks and flows.
Initialization requires a single argument, the org name for the target
CumulusCI org. If running your tests via cci's robot task (recommended),
you can initialize the library in your tests taking advantage of the
variable set by the robot task:
| ``*** Settings ***``
|
| Library cumulusci.robotframework.CumulusCI ${ORG}
"""
ROBOT_LIBRARY_SCOPE = "GLOBAL"
def __init__(self, org_name=None):
if not org_name:
org_name = 'dev'
self.org_name = org_name
self._project_config = None
self._org = None
self._sf = None
self._tooling = None
# Turn off info logging of all http requests
logging.getLogger('requests.packages.urllib3.connectionpool').setLevel(logging.WARN)
@property
def project_config(self):
if self._project_config is None:
if CURRENT_TASK and isinstance(CURRENT_TASK, Robot):
# If CumulusCI is running a task, use that task's config
return CURRENT_TASK.project_config
else:
logger.console('Initializing CumulusCI config\n')
self._project_config = CliConfig().project_config
return self._project_config
def set_project_config(self, project_config):
logger.console('\n')
self._project_config = project_config
@property
def keychain(self):
return self.project_config.keychain
@property
def org(self):
if self._org is None:
if CURRENT_TASK and isinstance(CURRENT_TASK, Robot):
# If CumulusCI is running a task, use that task's org
return CURRENT_TASK.org_config
else:
self._org = self.keychain.get_org(self.org_name)
return self._org
@property
def sf(self):
if self._sf is None:
self._sf = self._init_api()
return self._sf
@property
def tooling(self):
if self._tooling is None:
self._tooling = self._init_api('tooling/')
return self._tooling
def set_login_url(self):
""" Sets the LOGIN_URL variable in the suite scope which will
automatically log into the target Salesforce org.
Typically, this is run during Suite Setup
"""
BuiltIn().set_suite_variable('${LOGIN_URL}', self.org.start_url)
def get_org_info(self):
""" Returns a dictionary of the org information for the current target
Salesforce org
"""
return self.org.config
def login_url(self, org=None):
|
def run_task(self, task_name, **options):
""" Runs a named CumulusCI task for the current project with optional
support for overriding task options via kwargs.
Examples:
| =Keyword= | =task_name= | =task_options= | =comment= |
| Run Task | deploy | | Run deploy with standard options |
| Run Task | deploy | path=path/to/some/metadata | Run deploy with custom path |
"""
task_config = self.project_config.get_task(task_name)
class_path = task_config.class_path
logger.console('\n')
task_class, task_config = self._init_task(class_path, options, task_config)
return self._run_task(task_class, task_config)
def run_task_class(self, class_path, **options):
""" Runs a CumulusCI task class with task options via kwargs.
Use this keyword to run logic from CumulusCI tasks which have not
been configured in the project's cumulusci.yml file. This is
most useful in cases where a test needs to use task logic for
logic unique to the test and thus not worth making into a named
task for the project
Examples:
| =Keyword= | =task_class= | =task_options= |
| Run Task Class | cumulusci.task.utils.DownloadZip | url=http://test.com/test.zip dir=test_zip |
"""
logger.console('\n')
task_class, task_config = self._init_task(class_path, options, TaskConfig())
return self._run_task(task_class, task_config)
def _init_api(self, base_url=None):
api_version = self.project_config.project__package__api_version
rv = Salesforce(
instance=self.org.instance_url.replace('https://', ''),
session_id=self.org.access_token,
version=api_version,
)
if base_url is not None:
rv.base_url += base_url
return rv
def _init_task(self, class_path, options, task_config):
task_class = import_class(class_path)
task_config = self._parse_task_options(options, task_class, task_config)
return task_class, task_config
def _parse_task_options(self, options, task_class, task_config):
if 'options' not in task_config.config:
task_config.config['options'] = {}
# Parse options and add to task config
if options:
for name, value in options.items():
# Validate the option
if name not in task_class.task_options:
raise TaskOptionsError(
'Option "{}" is not available for task {}'.format(
name,
task_name,
),
)
# Override the option in the task config
task_config.config['options'][name] = value
return task_config
def _run_task(self, task_class, task_config):
exception = None
task = task_class(self.project_config,
task_config, org_config=self.org)
task()
return task.return_values
| """ Returns the login url which will automatically log into the target
Salesforce org. By default, the org_name passed to the library
constructor is used but this can be overridden with the org option
to log into a different org.
"""
if org is None:
org = self.org
else:
org = self.keychain.get_org(org)
return org.start_url | identifier_body |
CumulusCI.py | import logging
from cumulusci.cli.config import CliConfig
from cumulusci.core.config import TaskConfig
from cumulusci.core.exceptions import TaskNotFoundError
from cumulusci.core.exceptions import TaskOptionsError
from cumulusci.core.tasks import CURRENT_TASK
from cumulusci.core.utils import import_class
from cumulusci.tasks.robotframework.robotframework import Robot
from robot.api import logger
from robot.libraries.BuiltIn import BuiltIn
from simple_salesforce import Salesforce
class CumulusCI(object):
""" Library for accessing CumulusCI for the local git project
This library allows Robot Framework tests to access credentials to a
Salesforce org created by CumulusCI, including Scratch Orgs. It also
exposes the core logic of CumulusCI including interactions with the
Salesforce API's and project specific configuration including custom
and customized tasks and flows.
Initialization requires a single argument, the org name for the target
CumulusCI org. If running your tests via cci's robot task (recommended),
you can initialize the library in your tests taking advantage of the
variable set by the robot task:
| ``*** Settings ***``
|
| Library cumulusci.robotframework.CumulusCI ${ORG}
"""
ROBOT_LIBRARY_SCOPE = "GLOBAL"
def __init__(self, org_name=None):
if not org_name:
org_name = 'dev'
self.org_name = org_name
self._project_config = None
self._org = None
self._sf = None
self._tooling = None
# Turn off info logging of all http requests
logging.getLogger('requests.packages.urllib3.connectionpool').setLevel(logging.WARN)
@property
def project_config(self):
if self._project_config is None:
if CURRENT_TASK and isinstance(CURRENT_TASK, Robot):
# If CumulusCI is running a task, use that task's config
return CURRENT_TASK.project_config
else:
logger.console('Initializing CumulusCI config\n')
self._project_config = CliConfig().project_config
return self._project_config
def set_project_config(self, project_config):
logger.console('\n')
self._project_config = project_config
@property
def keychain(self):
return self.project_config.keychain
@property
def org(self):
if self._org is None:
if CURRENT_TASK and isinstance(CURRENT_TASK, Robot):
# If CumulusCI is running a task, use that task's org
return CURRENT_TASK.org_config
else:
self._org = self.keychain.get_org(self.org_name)
return self._org
@property
def sf(self):
if self._sf is None:
self._sf = self._init_api()
return self._sf
@property
def tooling(self):
if self._tooling is None:
self._tooling = self._init_api('tooling/')
return self._tooling
def set_login_url(self):
""" Sets the LOGIN_URL variable in the suite scope which will
automatically log into the target Salesforce org.
Typically, this is run during Suite Setup
"""
BuiltIn().set_suite_variable('${LOGIN_URL}', self.org.start_url)
def get_org_info(self):
""" Returns a dictionary of the org information for the current target
Salesforce org
"""
return self.org.config
def login_url(self, org=None):
""" Returns the login url which will automatically log into the target
Salesforce org. By default, the org_name passed to the library
constructor is used but this can be overridden with the org option
to log into a different org.
"""
if org is None:
org = self.org
else:
org = self.keychain.get_org(org)
return org.start_url
def run_task(self, task_name, **options):
""" Runs a named CumulusCI task for the current project with optional
support for overriding task options via kwargs.
Examples:
| =Keyword= | =task_name= | =task_options= | =comment= |
| Run Task | deploy | | Run deploy with standard options |
| Run Task | deploy | path=path/to/some/metadata | Run deploy with custom path |
"""
task_config = self.project_config.get_task(task_name)
class_path = task_config.class_path
logger.console('\n')
task_class, task_config = self._init_task(class_path, options, task_config)
return self._run_task(task_class, task_config)
def run_task_class(self, class_path, **options):
""" Runs a CumulusCI task class with task options via kwargs.
Use this keyword to run logic from CumulusCI tasks which have not
been configured in the project's cumulusci.yml file. This is
most useful in cases where a test needs to use task logic for
logic unique to the test and thus not worth making into a named
task for the project
Examples:
| =Keyword= | =task_class= | =task_options= |
| Run Task Class | cumulusci.task.utils.DownloadZip | url=http://test.com/test.zip dir=test_zip |
"""
logger.console('\n')
task_class, task_config = self._init_task(class_path, options, TaskConfig())
return self._run_task(task_class, task_config)
def _init_api(self, base_url=None):
api_version = self.project_config.project__package__api_version
| if base_url is not None:
rv.base_url += base_url
return rv
def _init_task(self, class_path, options, task_config):
task_class = import_class(class_path)
task_config = self._parse_task_options(options, task_class, task_config)
return task_class, task_config
def _parse_task_options(self, options, task_class, task_config):
if 'options' not in task_config.config:
task_config.config['options'] = {}
# Parse options and add to task config
if options:
for name, value in options.items():
# Validate the option
if name not in task_class.task_options:
raise TaskOptionsError(
'Option "{}" is not available for task {}'.format(
name,
task_name,
),
)
# Override the option in the task config
task_config.config['options'][name] = value
return task_config
def _run_task(self, task_class, task_config):
exception = None
task = task_class(self.project_config,
task_config, org_config=self.org)
task()
return task.return_values | rv = Salesforce(
instance=self.org.instance_url.replace('https://', ''),
session_id=self.org.access_token,
version=api_version,
) | random_line_split |
CumulusCI.py | import logging
from cumulusci.cli.config import CliConfig
from cumulusci.core.config import TaskConfig
from cumulusci.core.exceptions import TaskNotFoundError
from cumulusci.core.exceptions import TaskOptionsError
from cumulusci.core.tasks import CURRENT_TASK
from cumulusci.core.utils import import_class
from cumulusci.tasks.robotframework.robotframework import Robot
from robot.api import logger
from robot.libraries.BuiltIn import BuiltIn
from simple_salesforce import Salesforce
class CumulusCI(object):
""" Library for accessing CumulusCI for the local git project
This library allows Robot Framework tests to access credentials to a
Salesforce org created by CumulusCI, including Scratch Orgs. It also
exposes the core logic of CumulusCI including interactions with the
Salesforce API's and project specific configuration including custom
and customized tasks and flows.
Initialization requires a single argument, the org name for the target
CumulusCI org. If running your tests via cci's robot task (recommended),
you can initialize the library in your tests taking advantage of the
variable set by the robot task:
| ``*** Settings ***``
|
| Library cumulusci.robotframework.CumulusCI ${ORG}
"""
ROBOT_LIBRARY_SCOPE = "GLOBAL"
def __init__(self, org_name=None):
if not org_name:
org_name = 'dev'
self.org_name = org_name
self._project_config = None
self._org = None
self._sf = None
self._tooling = None
# Turn off info logging of all http requests
logging.getLogger('requests.packages.urllib3.connectionpool').setLevel(logging.WARN)
@property
def project_config(self):
if self._project_config is None:
if CURRENT_TASK and isinstance(CURRENT_TASK, Robot):
# If CumulusCI is running a task, use that task's config
return CURRENT_TASK.project_config
else:
logger.console('Initializing CumulusCI config\n')
self._project_config = CliConfig().project_config
return self._project_config
def set_project_config(self, project_config):
logger.console('\n')
self._project_config = project_config
@property
def keychain(self):
return self.project_config.keychain
@property
def | (self):
if self._org is None:
if CURRENT_TASK and isinstance(CURRENT_TASK, Robot):
# If CumulusCI is running a task, use that task's org
return CURRENT_TASK.org_config
else:
self._org = self.keychain.get_org(self.org_name)
return self._org
@property
def sf(self):
if self._sf is None:
self._sf = self._init_api()
return self._sf
@property
def tooling(self):
if self._tooling is None:
self._tooling = self._init_api('tooling/')
return self._tooling
def set_login_url(self):
""" Sets the LOGIN_URL variable in the suite scope which will
automatically log into the target Salesforce org.
Typically, this is run during Suite Setup
"""
BuiltIn().set_suite_variable('${LOGIN_URL}', self.org.start_url)
def get_org_info(self):
""" Returns a dictionary of the org information for the current target
Salesforce org
"""
return self.org.config
def login_url(self, org=None):
""" Returns the login url which will automatically log into the target
Salesforce org. By default, the org_name passed to the library
constructor is used but this can be overridden with the org option
to log into a different org.
"""
if org is None:
org = self.org
else:
org = self.keychain.get_org(org)
return org.start_url
def run_task(self, task_name, **options):
""" Runs a named CumulusCI task for the current project with optional
support for overriding task options via kwargs.
Examples:
| =Keyword= | =task_name= | =task_options= | =comment= |
| Run Task | deploy | | Run deploy with standard options |
| Run Task | deploy | path=path/to/some/metadata | Run deploy with custom path |
"""
task_config = self.project_config.get_task(task_name)
class_path = task_config.class_path
logger.console('\n')
task_class, task_config = self._init_task(class_path, options, task_config)
return self._run_task(task_class, task_config)
def run_task_class(self, class_path, **options):
""" Runs a CumulusCI task class with task options via kwargs.
Use this keyword to run logic from CumulusCI tasks which have not
been configured in the project's cumulusci.yml file. This is
most useful in cases where a test needs to use task logic for
logic unique to the test and thus not worth making into a named
task for the project
Examples:
| =Keyword= | =task_class= | =task_options= |
| Run Task Class | cumulusci.task.utils.DownloadZip | url=http://test.com/test.zip dir=test_zip |
"""
logger.console('\n')
task_class, task_config = self._init_task(class_path, options, TaskConfig())
return self._run_task(task_class, task_config)
def _init_api(self, base_url=None):
api_version = self.project_config.project__package__api_version
rv = Salesforce(
instance=self.org.instance_url.replace('https://', ''),
session_id=self.org.access_token,
version=api_version,
)
if base_url is not None:
rv.base_url += base_url
return rv
def _init_task(self, class_path, options, task_config):
task_class = import_class(class_path)
task_config = self._parse_task_options(options, task_class, task_config)
return task_class, task_config
def _parse_task_options(self, options, task_class, task_config):
if 'options' not in task_config.config:
task_config.config['options'] = {}
# Parse options and add to task config
if options:
for name, value in options.items():
# Validate the option
if name not in task_class.task_options:
raise TaskOptionsError(
'Option "{}" is not available for task {}'.format(
name,
task_name,
),
)
# Override the option in the task config
task_config.config['options'][name] = value
return task_config
def _run_task(self, task_class, task_config):
exception = None
task = task_class(self.project_config,
task_config, org_config=self.org)
task()
return task.return_values
| org | identifier_name |
draw_texture_program_info.ts | /**
* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
import {Rotation} from './types';
export function vertexShaderSource(
flipHorizontal: boolean, flipVertical: boolean, rotation: Rotation) {
const horizontalScale = flipHorizontal ? -1 : 1;
const verticalScale = flipVertical ? -1 : 1;
const rotateAngle = rotation === 0 ? '0.' : rotation * (Math.PI / 180);
return `#version 300 es
precision highp float;
in vec2 position;
in vec2 texCoords;
out vec2 uv;
vec2 rotate(vec2 uvCoods, vec2 pivot, float rotation) {
float cosa = cos(rotation);
float sina = sin(rotation);
uvCoods -= pivot;
return vec2(
cosa * uvCoods.x - sina * uvCoods.y,
cosa * uvCoods.y + sina * uvCoods.x
) + pivot;
}
void main() {
uv = rotate(texCoords, vec2(0.5), ${rotateAngle});
// Invert geometry to match the image orientation from the camera.
gl_Position = vec4(position * vec2(${horizontalScale}., ${
verticalScale}. * -1.), 0, 1);
}`;
}
export function fragmentShaderSource() {
return `#version 300 es
precision highp float;
uniform sampler2D inputTexture;
in vec2 uv;
out vec4 fragColor;
void main() {
vec4 texSample = texture(inputTexture, uv);
fragColor = texSample;
}`;
}
export function vertices() |
export function texCoords() {
return new Float32Array([
// clang-format off
0, 0,
0, 1,
1, 1,
1, 1,
0, 0,
1, 0,
// clang-format on
]);
}
| {
return new Float32Array([
// clang-format off
-1, -1,
-1, 1,
1, 1,
1, 1,
-1, -1,
1, -1,
// clang-format on
]);
} | identifier_body |
draw_texture_program_info.ts | /**
* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
import {Rotation} from './types';
export function vertexShaderSource(
flipHorizontal: boolean, flipVertical: boolean, rotation: Rotation) {
const horizontalScale = flipHorizontal ? -1 : 1;
const verticalScale = flipVertical ? -1 : 1;
const rotateAngle = rotation === 0 ? '0.' : rotation * (Math.PI / 180);
return `#version 300 es
precision highp float;
in vec2 position;
in vec2 texCoords;
out vec2 uv;
vec2 rotate(vec2 uvCoods, vec2 pivot, float rotation) {
float cosa = cos(rotation);
float sina = sin(rotation);
uvCoods -= pivot;
return vec2(
cosa * uvCoods.x - sina * uvCoods.y,
cosa * uvCoods.y + sina * uvCoods.x
) + pivot;
}
void main() {
uv = rotate(texCoords, vec2(0.5), ${rotateAngle});
// Invert geometry to match the image orientation from the camera.
gl_Position = vec4(position * vec2(${horizontalScale}., ${
verticalScale}. * -1.), 0, 1);
}`;
}
export function fragmentShaderSource() {
return `#version 300 es
precision highp float;
uniform sampler2D inputTexture;
in vec2 uv;
out vec4 fragColor;
void main() {
vec4 texSample = texture(inputTexture, uv);
fragColor = texSample;
}`;
}
export function | () {
return new Float32Array([
// clang-format off
-1, -1,
-1, 1,
1, 1,
1, 1,
-1, -1,
1, -1,
// clang-format on
]);
}
export function texCoords() {
return new Float32Array([
// clang-format off
0, 0,
0, 1,
1, 1,
1, 1,
0, 0,
1, 0,
// clang-format on
]);
}
| vertices | identifier_name |
draw_texture_program_info.ts | /**
* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
import {Rotation} from './types';
export function vertexShaderSource(
flipHorizontal: boolean, flipVertical: boolean, rotation: Rotation) {
const horizontalScale = flipHorizontal ? -1 : 1;
const verticalScale = flipVertical ? -1 : 1;
const rotateAngle = rotation === 0 ? '0.' : rotation * (Math.PI / 180);
return `#version 300 es
precision highp float;
in vec2 position;
in vec2 texCoords;
out vec2 uv;
vec2 rotate(vec2 uvCoods, vec2 pivot, float rotation) {
float cosa = cos(rotation);
float sina = sin(rotation);
uvCoods -= pivot;
return vec2(
cosa * uvCoods.x - sina * uvCoods.y,
cosa * uvCoods.y + sina * uvCoods.x
) + pivot;
}
void main() {
uv = rotate(texCoords, vec2(0.5), ${rotateAngle});
| verticalScale}. * -1.), 0, 1);
}`;
}
export function fragmentShaderSource() {
return `#version 300 es
precision highp float;
uniform sampler2D inputTexture;
in vec2 uv;
out vec4 fragColor;
void main() {
vec4 texSample = texture(inputTexture, uv);
fragColor = texSample;
}`;
}
export function vertices() {
return new Float32Array([
// clang-format off
-1, -1,
-1, 1,
1, 1,
1, 1,
-1, -1,
1, -1,
// clang-format on
]);
}
export function texCoords() {
return new Float32Array([
// clang-format off
0, 0,
0, 1,
1, 1,
1, 1,
0, 0,
1, 0,
// clang-format on
]);
} | // Invert geometry to match the image orientation from the camera.
gl_Position = vec4(position * vec2(${horizontalScale}., ${ | random_line_split |
reftest.rs | // Copyright 2013 The Servo Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern crate extra;
extern crate png;
extern crate std;
extern crate test;
use std::io;
use std::io::{File, Reader, Process};
use std::io::process::ExitStatus;
use std::os;
use std::str;
use test::{DynTestName, DynTestFn, TestDesc, TestOpts, TestDescAndFn};
use test::run_tests_console;
fn main() {
let args = os::args();
let mut parts = args.tail().split(|e| "--" == e.as_slice());
let files = parts.next().unwrap(); // .split() is never empty
let servo_args = parts.next().unwrap_or(&[]);
if files.len() == 0 {
fail!("error: at least one reftest list must be given");
}
let tests = parse_lists(files, servo_args);
let test_opts = TestOpts {
filter: None,
run_ignored: false,
logfile: None,
run_tests: true,
run_benchmarks: false,
ratchet_noise_percent: None,
ratchet_metrics: None,
save_metrics: None,
test_shard: None,
};
match run_tests_console(&test_opts, tests) {
Ok(false) => os::set_exit_status(1), // tests failed
Err(_) => os::set_exit_status(2), // I/O-related failure
_ => (),
}
}
#[deriving(Eq)]
enum ReftestKind {
Same,
Different,
}
struct Reftest {
name: ~str,
kind: ReftestKind,
files: [~str, ..2],
id: uint,
servo_args: ~[~str],
}
fn parse_lists(filenames: &[~str], servo_args: &[~str]) -> ~[TestDescAndFn] {
let mut tests: ~[TestDescAndFn] = ~[];
let mut next_id = 0;
for file in filenames.iter() {
let file_path = Path::new(file.clone());
let contents = match File::open_mode(&file_path, io::Open, io::Read)
.and_then(|mut f| {
f.read_to_end()
}) {
Ok(s) => str::from_utf8_owned(s),
_ => fail!("Could not read file"),
};
for line in contents.unwrap().lines() {
// ignore comments |
let parts: ~[&str] = line.split(' ').filter(|p| !p.is_empty()).collect();
if parts.len() != 3 {
fail!("reftest line: '{:s}' doesn't match 'KIND LEFT RIGHT'", line);
}
let kind = match parts[0] {
"==" => Same,
"!=" => Different,
_ => fail!("reftest line: '{:s}' has invalid kind '{:s}'",
line, parts[0])
};
let src_path = file_path.dir_path();
let src_dir = src_path.display().to_str();
let file_left = src_dir + "/" + parts[1];
let file_right = src_dir + "/" + parts[2];
let reftest = Reftest {
name: parts[1] + " / " + parts[2],
kind: kind,
files: [file_left, file_right],
id: next_id,
servo_args: servo_args.to_owned(),
};
next_id += 1;
tests.push(make_test(reftest));
}
}
tests
}
fn make_test(reftest: Reftest) -> TestDescAndFn {
let name = reftest.name.clone();
TestDescAndFn {
desc: TestDesc {
name: DynTestName(name),
ignore: false,
should_fail: false,
},
testfn: DynTestFn(proc() {
check_reftest(reftest);
}),
}
}
fn capture(reftest: &Reftest, side: uint) -> png::Image {
let filename = format!("/tmp/servo-reftest-{:06u}-{:u}.png", reftest.id, side);
let mut args = reftest.servo_args.clone();
args.push_all_move(~[~"-f", ~"-o", filename.clone(), reftest.files[side].clone()]);
let retval = match Process::status("./servo", args) {
Ok(status) => status,
Err(e) => fail!("failed to execute process: {}", e),
};
assert!(retval == ExitStatus(0));
png::load_png(&from_str::<Path>(filename).unwrap()).unwrap()
}
fn check_reftest(reftest: Reftest) {
let left = capture(&reftest, 0);
let right = capture(&reftest, 1);
let pixels: ~[u8] = left.pixels.iter().zip(right.pixels.iter()).map(|(&a, &b)| {
if a as i8 - b as i8 == 0 {
// White for correct
0xFF
} else {
// "1100" in the RGBA channel with an error for an incorrect value
// This results in some number of C0 and FFs, which is much more
// readable (and distinguishable) than the previous difference-wise
// scaling but does not require reconstructing the actual RGBA pixel.
0xC0
}
}).collect();
if pixels.iter().any(|&a| a < 255) {
let output = from_str::<Path>(format!("/tmp/servo-reftest-{:06u}-diff.png", reftest.id)).unwrap();
let img = png::Image {
width: left.width,
height: left.height,
color_type: png::RGBA8,
pixels: pixels,
};
let res = png::store_png(&img, &output);
assert!(res.is_ok());
assert!(reftest.kind == Different);
} else {
assert!(reftest.kind == Same);
}
} | if line.starts_with("#") {
continue;
} | random_line_split |
reftest.rs | // Copyright 2013 The Servo Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern crate extra;
extern crate png;
extern crate std;
extern crate test;
use std::io;
use std::io::{File, Reader, Process};
use std::io::process::ExitStatus;
use std::os;
use std::str;
use test::{DynTestName, DynTestFn, TestDesc, TestOpts, TestDescAndFn};
use test::run_tests_console;
fn main() {
let args = os::args();
let mut parts = args.tail().split(|e| "--" == e.as_slice());
let files = parts.next().unwrap(); // .split() is never empty
let servo_args = parts.next().unwrap_or(&[]);
if files.len() == 0 {
fail!("error: at least one reftest list must be given");
}
let tests = parse_lists(files, servo_args);
let test_opts = TestOpts {
filter: None,
run_ignored: false,
logfile: None,
run_tests: true,
run_benchmarks: false,
ratchet_noise_percent: None,
ratchet_metrics: None,
save_metrics: None,
test_shard: None,
};
match run_tests_console(&test_opts, tests) {
Ok(false) => os::set_exit_status(1), // tests failed
Err(_) => os::set_exit_status(2), // I/O-related failure
_ => (),
}
}
#[deriving(Eq)]
enum ReftestKind {
Same,
Different,
}
struct Reftest {
name: ~str,
kind: ReftestKind,
files: [~str, ..2],
id: uint,
servo_args: ~[~str],
}
fn parse_lists(filenames: &[~str], servo_args: &[~str]) -> ~[TestDescAndFn] {
let mut tests: ~[TestDescAndFn] = ~[];
let mut next_id = 0;
for file in filenames.iter() {
let file_path = Path::new(file.clone());
let contents = match File::open_mode(&file_path, io::Open, io::Read)
.and_then(|mut f| {
f.read_to_end()
}) {
Ok(s) => str::from_utf8_owned(s),
_ => fail!("Could not read file"),
};
for line in contents.unwrap().lines() {
// ignore comments
if line.starts_with("#") {
continue;
}
let parts: ~[&str] = line.split(' ').filter(|p| !p.is_empty()).collect();
if parts.len() != 3 {
fail!("reftest line: '{:s}' doesn't match 'KIND LEFT RIGHT'", line);
}
let kind = match parts[0] {
"==" => Same,
"!=" => Different,
_ => fail!("reftest line: '{:s}' has invalid kind '{:s}'",
line, parts[0])
};
let src_path = file_path.dir_path();
let src_dir = src_path.display().to_str();
let file_left = src_dir + "/" + parts[1];
let file_right = src_dir + "/" + parts[2];
let reftest = Reftest {
name: parts[1] + " / " + parts[2],
kind: kind,
files: [file_left, file_right],
id: next_id,
servo_args: servo_args.to_owned(),
};
next_id += 1;
tests.push(make_test(reftest));
}
}
tests
}
fn make_test(reftest: Reftest) -> TestDescAndFn |
fn capture(reftest: &Reftest, side: uint) -> png::Image {
let filename = format!("/tmp/servo-reftest-{:06u}-{:u}.png", reftest.id, side);
let mut args = reftest.servo_args.clone();
args.push_all_move(~[~"-f", ~"-o", filename.clone(), reftest.files[side].clone()]);
let retval = match Process::status("./servo", args) {
Ok(status) => status,
Err(e) => fail!("failed to execute process: {}", e),
};
assert!(retval == ExitStatus(0));
png::load_png(&from_str::<Path>(filename).unwrap()).unwrap()
}
fn check_reftest(reftest: Reftest) {
let left = capture(&reftest, 0);
let right = capture(&reftest, 1);
let pixels: ~[u8] = left.pixels.iter().zip(right.pixels.iter()).map(|(&a, &b)| {
if a as i8 - b as i8 == 0 {
// White for correct
0xFF
} else {
// "1100" in the RGBA channel with an error for an incorrect value
// This results in some number of C0 and FFs, which is much more
// readable (and distinguishable) than the previous difference-wise
// scaling but does not require reconstructing the actual RGBA pixel.
0xC0
}
}).collect();
if pixels.iter().any(|&a| a < 255) {
let output = from_str::<Path>(format!("/tmp/servo-reftest-{:06u}-diff.png", reftest.id)).unwrap();
let img = png::Image {
width: left.width,
height: left.height,
color_type: png::RGBA8,
pixels: pixels,
};
let res = png::store_png(&img, &output);
assert!(res.is_ok());
assert!(reftest.kind == Different);
} else {
assert!(reftest.kind == Same);
}
}
| {
let name = reftest.name.clone();
TestDescAndFn {
desc: TestDesc {
name: DynTestName(name),
ignore: false,
should_fail: false,
},
testfn: DynTestFn(proc() {
check_reftest(reftest);
}),
}
} | identifier_body |
reftest.rs | // Copyright 2013 The Servo Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern crate extra;
extern crate png;
extern crate std;
extern crate test;
use std::io;
use std::io::{File, Reader, Process};
use std::io::process::ExitStatus;
use std::os;
use std::str;
use test::{DynTestName, DynTestFn, TestDesc, TestOpts, TestDescAndFn};
use test::run_tests_console;
fn main() {
let args = os::args();
let mut parts = args.tail().split(|e| "--" == e.as_slice());
let files = parts.next().unwrap(); // .split() is never empty
let servo_args = parts.next().unwrap_or(&[]);
if files.len() == 0 {
fail!("error: at least one reftest list must be given");
}
let tests = parse_lists(files, servo_args);
let test_opts = TestOpts {
filter: None,
run_ignored: false,
logfile: None,
run_tests: true,
run_benchmarks: false,
ratchet_noise_percent: None,
ratchet_metrics: None,
save_metrics: None,
test_shard: None,
};
match run_tests_console(&test_opts, tests) {
Ok(false) => os::set_exit_status(1), // tests failed
Err(_) => os::set_exit_status(2), // I/O-related failure
_ => (),
}
}
#[deriving(Eq)]
enum ReftestKind {
Same,
Different,
}
struct Reftest {
name: ~str,
kind: ReftestKind,
files: [~str, ..2],
id: uint,
servo_args: ~[~str],
}
fn parse_lists(filenames: &[~str], servo_args: &[~str]) -> ~[TestDescAndFn] {
let mut tests: ~[TestDescAndFn] = ~[];
let mut next_id = 0;
for file in filenames.iter() {
let file_path = Path::new(file.clone());
let contents = match File::open_mode(&file_path, io::Open, io::Read)
.and_then(|mut f| {
f.read_to_end()
}) {
Ok(s) => str::from_utf8_owned(s),
_ => fail!("Could not read file"),
};
for line in contents.unwrap().lines() {
// ignore comments
if line.starts_with("#") {
continue;
}
let parts: ~[&str] = line.split(' ').filter(|p| !p.is_empty()).collect();
if parts.len() != 3 {
fail!("reftest line: '{:s}' doesn't match 'KIND LEFT RIGHT'", line);
}
let kind = match parts[0] {
"==" => Same,
"!=" => Different,
_ => fail!("reftest line: '{:s}' has invalid kind '{:s}'",
line, parts[0])
};
let src_path = file_path.dir_path();
let src_dir = src_path.display().to_str();
let file_left = src_dir + "/" + parts[1];
let file_right = src_dir + "/" + parts[2];
let reftest = Reftest {
name: parts[1] + " / " + parts[2],
kind: kind,
files: [file_left, file_right],
id: next_id,
servo_args: servo_args.to_owned(),
};
next_id += 1;
tests.push(make_test(reftest));
}
}
tests
}
fn make_test(reftest: Reftest) -> TestDescAndFn {
let name = reftest.name.clone();
TestDescAndFn {
desc: TestDesc {
name: DynTestName(name),
ignore: false,
should_fail: false,
},
testfn: DynTestFn(proc() {
check_reftest(reftest);
}),
}
}
fn | (reftest: &Reftest, side: uint) -> png::Image {
let filename = format!("/tmp/servo-reftest-{:06u}-{:u}.png", reftest.id, side);
let mut args = reftest.servo_args.clone();
args.push_all_move(~[~"-f", ~"-o", filename.clone(), reftest.files[side].clone()]);
let retval = match Process::status("./servo", args) {
Ok(status) => status,
Err(e) => fail!("failed to execute process: {}", e),
};
assert!(retval == ExitStatus(0));
png::load_png(&from_str::<Path>(filename).unwrap()).unwrap()
}
fn check_reftest(reftest: Reftest) {
let left = capture(&reftest, 0);
let right = capture(&reftest, 1);
let pixels: ~[u8] = left.pixels.iter().zip(right.pixels.iter()).map(|(&a, &b)| {
if a as i8 - b as i8 == 0 {
// White for correct
0xFF
} else {
// "1100" in the RGBA channel with an error for an incorrect value
// This results in some number of C0 and FFs, which is much more
// readable (and distinguishable) than the previous difference-wise
// scaling but does not require reconstructing the actual RGBA pixel.
0xC0
}
}).collect();
if pixels.iter().any(|&a| a < 255) {
let output = from_str::<Path>(format!("/tmp/servo-reftest-{:06u}-diff.png", reftest.id)).unwrap();
let img = png::Image {
width: left.width,
height: left.height,
color_type: png::RGBA8,
pixels: pixels,
};
let res = png::store_png(&img, &output);
assert!(res.is_ok());
assert!(reftest.kind == Different);
} else {
assert!(reftest.kind == Same);
}
}
| capture | identifier_name |
models.py | (image):
"""
Validates that a particular image is small enough to be a badge and square.
"""
if image.width != image.height:
raise ValidationError(_(u"The badge image must be square."))
if not image.size < (250 * 1024):
raise ValidationError(_(u"The badge image file size must be less than 250KB."))
def validate_lowercase(string):
"""
Validates that a string is lowercase.
"""
if not string.islower():
raise ValidationError(_(u"This value must be all lowercase."))
class CourseBadgesDisabledError(Exception):
"""
Exception raised when Course Badges aren't enabled, but an attempt to fetch one is made anyway.
"""
class BadgeClass(models.Model):
"""
Specifies a badge class to be registered with a backend.
"""
slug = models.SlugField(max_length=255, validators=[validate_lowercase])
issuing_component = models.SlugField(max_length=50, default='', blank=True, validators=[validate_lowercase])
display_name = models.CharField(max_length=255)
course_id = CourseKeyField(max_length=255, blank=True, default=None)
description = models.TextField()
criteria = models.TextField()
# Mode a badge was awarded for. Included for legacy/migration purposes.
mode = models.CharField(max_length=100, default='', blank=True)
image = models.ImageField(upload_to='badge_classes', validators=[validate_badge_image])
def __unicode__(self):
return u"<Badge '{slug}' for '{issuing_component}'>".format(
slug=self.slug, issuing_component=self.issuing_component
)
@classmethod
def get_badge_class(
cls, slug, issuing_component, display_name=None, description=None, criteria=None, image_file_handle=None,
mode='', course_id=None, create=True
):
"""
Looks up a badge class by its slug, issuing component, and course_id and returns it should it exist.
If it does not exist, and create is True, creates it according to the arguments. Otherwise, returns None.
The expectation is that an XBlock or platform developer should not need to concern themselves with whether
or not a badge class has already been created, but should just feed all requirements to this function
and it will 'do the right thing'. It should be the exception, rather than the common case, that a badge class
would need to be looked up without also being created were it missing.
"""
slug = slug.lower()
issuing_component = issuing_component.lower()
if course_id and not modulestore().get_course(course_id).issue_badges:
raise CourseBadgesDisabledError("This course does not have badges enabled.")
if not course_id:
course_id = CourseKeyField.Empty
try:
return cls.objects.get(slug=slug, issuing_component=issuing_component, course_id=course_id)
except cls.DoesNotExist:
if not create:
return None
badge_class = cls(
slug=slug,
issuing_component=issuing_component,
display_name=display_name,
course_id=course_id,
mode=mode,
description=description,
criteria=criteria,
)
badge_class.image.save(image_file_handle.name, image_file_handle)
badge_class.full_clean()
badge_class.save()
return badge_class
@lazy
def backend(self):
"""
Loads the badging backend.
"""
module, klass = settings.BADGING_BACKEND.rsplit('.', 1)
module = import_module(module)
return getattr(module, klass)()
def get_for_user(self, user):
"""
Get the assertion for this badge class for this user, if it has been awarded.
"""
return self.badgeassertion_set.filter(user=user)
def award(self, user, evidence_url=None):
"""
Contacts the backend to have a badge assertion created for this badge class for this user.
"""
return self.backend.award(self, user, evidence_url=evidence_url)
def save(self, **kwargs):
"""
Slugs must always be lowercase.
"""
self.slug = self.slug and self.slug.lower()
self.issuing_component = self.issuing_component and self.issuing_component.lower()
super(BadgeClass, self).save(**kwargs)
class Meta(object):
app_label = "badges"
unique_together = (('slug', 'issuing_component', 'course_id'),)
verbose_name_plural = "Badge Classes"
class BadgeAssertion(TimeStampedModel):
"""
Tracks badges on our side of the badge baking transaction
"""
user = models.ForeignKey(User)
badge_class = models.ForeignKey(BadgeClass)
data = JSONField()
backend = models.CharField(max_length=50)
image_url = models.URLField()
assertion_url = models.URLField()
def __unicode__(self):
return u"<{username} Badge Assertion for {slug} for {issuing_component}".format(
username=self.user.username, slug=self.badge_class.slug,
issuing_component=self.badge_class.issuing_component,
)
@classmethod
def assertions_for_user(cls, user, course_id=None):
"""
Get all assertions for a user, optionally constrained to a course.
"""
if course_id:
return cls.objects.filter(user=user, badge_class__course_id=course_id)
return cls.objects.filter(user=user)
class Meta(object):
app_label = "badges"
# Abstract model doesn't index this, so we have to.
BadgeAssertion._meta.get_field('created').db_index = True # pylint: disable=protected-access
class CourseCompleteImageConfiguration(models.Model):
"""
Contains the icon configuration for badges for a specific course mode.
"""
mode = models.CharField(
max_length=125,
help_text=_(u'The course mode for this badge image. For example, "verified" or "honor".'),
unique=True,
)
icon = models.ImageField(
# Actual max is 256KB, but need overhead for badge baking. This should be more than enough.
help_text=_(
u"Badge images must be square PNG files. The file size should be under 250KB."
),
upload_to='course_complete_badges',
validators=[validate_badge_image]
)
default = models.BooleanField(
help_text=_(
u"Set this value to True if you want this image to be the default image for any course modes "
u"that do not have a specified badge image. You can have only one default image."
),
default=False,
)
def __unicode__(self):
return u"<CourseCompleteImageConfiguration for '{mode}'{default}>".format(
mode=self.mode,
default=u" (default)" if self.default else u''
)
def clean(self):
"""
Make sure there's not more than one default.
"""
# pylint: disable=no-member
if self.default and CourseCompleteImageConfiguration.objects.filter(default=True).exclude(id=self.id):
raise ValidationError(_(u"There can be only one default image."))
@classmethod
def image_for_mode(cls, mode):
"""
Get the image for a particular mode.
"""
try:
return cls.objects.get(mode=mode).icon
except cls.DoesNotExist:
# Fall back to default, if there is one.
return cls.objects.get(default=True).icon
class Meta(object):
app_label = "badges"
class CourseEventBadgesConfiguration(ConfigurationModel):
"""
Determines the settings for meta course awards-- such as completing a certain
number of courses or enrolling in a certain number of them.
"""
courses_completed = models.TextField(
blank=True, default='',
help_text=_(
u"On each line, put the number of completed courses to award a badge for, a comma, and the slug of a "
u"badge class you have created that has the issuing component 'openedx__course'. "
u"For example: 3,enrolled_3_courses"
)
)
courses_enrolled = models.TextField(
blank=True, default='',
help_text=_(
u"On each line, put the number of enrolled courses to award a badge for, a comma, and the slug of a "
u"badge class you have created that has the issuing component 'openedx__course'. "
u"For example: 3,enrolled_3_courses"
)
)
course_groups = models.TextField(
blank=True, default='',
help_text=_(
u"Each line is a comma-separated list. The first item in each line is the slug of a badge class you "
u"have created that has an issuing component of 'openedx__course'. The remaining items in each line are "
u"the course keys the learner needs to complete to be awarded the badge. For example: "
u"slug_for_compsci_courses_group_badge,course-v1:CompSci+Course+First,course-v1:CompsSci+Course+Second"
)
)
def __unicode__(self):
return u"<CourseEventBadgesConfiguration ({})>".format(u"Enabled" if self.enabled else u"Disabled")
@property
def completed_settings(self):
"""
Parses the settings from the courses_completed field.
"""
return deserialize_count_specs(self.courses_completed)
@property
def enrolled_settings(self):
"""
Parses the settings from the courses_completed field.
"""
return deserialize_count_specs(self.courses_enrolled)
| validate_badge_image | identifier_name |
|
models.py | be a badge and square.
"""
if image.width != image.height:
raise ValidationError(_(u"The badge image must be square."))
if not image.size < (250 * 1024):
raise ValidationError(_(u"The badge image file size must be less than 250KB."))
def validate_lowercase(string):
|
class CourseBadgesDisabledError(Exception):
"""
Exception raised when Course Badges aren't enabled, but an attempt to fetch one is made anyway.
"""
class BadgeClass(models.Model):
"""
Specifies a badge class to be registered with a backend.
"""
slug = models.SlugField(max_length=255, validators=[validate_lowercase])
issuing_component = models.SlugField(max_length=50, default='', blank=True, validators=[validate_lowercase])
display_name = models.CharField(max_length=255)
course_id = CourseKeyField(max_length=255, blank=True, default=None)
description = models.TextField()
criteria = models.TextField()
# Mode a badge was awarded for. Included for legacy/migration purposes.
mode = models.CharField(max_length=100, default='', blank=True)
image = models.ImageField(upload_to='badge_classes', validators=[validate_badge_image])
def __unicode__(self):
return u"<Badge '{slug}' for '{issuing_component}'>".format(
slug=self.slug, issuing_component=self.issuing_component
)
@classmethod
def get_badge_class(
cls, slug, issuing_component, display_name=None, description=None, criteria=None, image_file_handle=None,
mode='', course_id=None, create=True
):
"""
Looks up a badge class by its slug, issuing component, and course_id and returns it should it exist.
If it does not exist, and create is True, creates it according to the arguments. Otherwise, returns None.
The expectation is that an XBlock or platform developer should not need to concern themselves with whether
or not a badge class has already been created, but should just feed all requirements to this function
and it will 'do the right thing'. It should be the exception, rather than the common case, that a badge class
would need to be looked up without also being created were it missing.
"""
slug = slug.lower()
issuing_component = issuing_component.lower()
if course_id and not modulestore().get_course(course_id).issue_badges:
raise CourseBadgesDisabledError("This course does not have badges enabled.")
if not course_id:
course_id = CourseKeyField.Empty
try:
return cls.objects.get(slug=slug, issuing_component=issuing_component, course_id=course_id)
except cls.DoesNotExist:
if not create:
return None
badge_class = cls(
slug=slug,
issuing_component=issuing_component,
display_name=display_name,
course_id=course_id,
mode=mode,
description=description,
criteria=criteria,
)
badge_class.image.save(image_file_handle.name, image_file_handle)
badge_class.full_clean()
badge_class.save()
return badge_class
@lazy
def backend(self):
"""
Loads the badging backend.
"""
module, klass = settings.BADGING_BACKEND.rsplit('.', 1)
module = import_module(module)
return getattr(module, klass)()
def get_for_user(self, user):
"""
Get the assertion for this badge class for this user, if it has been awarded.
"""
return self.badgeassertion_set.filter(user=user)
def award(self, user, evidence_url=None):
"""
Contacts the backend to have a badge assertion created for this badge class for this user.
"""
return self.backend.award(self, user, evidence_url=evidence_url)
def save(self, **kwargs):
"""
Slugs must always be lowercase.
"""
self.slug = self.slug and self.slug.lower()
self.issuing_component = self.issuing_component and self.issuing_component.lower()
super(BadgeClass, self).save(**kwargs)
class Meta(object):
app_label = "badges"
unique_together = (('slug', 'issuing_component', 'course_id'),)
verbose_name_plural = "Badge Classes"
class BadgeAssertion(TimeStampedModel):
"""
Tracks badges on our side of the badge baking transaction
"""
user = models.ForeignKey(User)
badge_class = models.ForeignKey(BadgeClass)
data = JSONField()
backend = models.CharField(max_length=50)
image_url = models.URLField()
assertion_url = models.URLField()
def __unicode__(self):
return u"<{username} Badge Assertion for {slug} for {issuing_component}".format(
username=self.user.username, slug=self.badge_class.slug,
issuing_component=self.badge_class.issuing_component,
)
@classmethod
def assertions_for_user(cls, user, course_id=None):
"""
Get all assertions for a user, optionally constrained to a course.
"""
if course_id:
return cls.objects.filter(user=user, badge_class__course_id=course_id)
return cls.objects.filter(user=user)
class Meta(object):
app_label = "badges"
# Abstract model doesn't index this, so we have to.
BadgeAssertion._meta.get_field('created').db_index = True # pylint: disable=protected-access
class CourseCompleteImageConfiguration(models.Model):
"""
Contains the icon configuration for badges for a specific course mode.
"""
mode = models.CharField(
max_length=125,
help_text=_(u'The course mode for this badge image. For example, "verified" or "honor".'),
unique=True,
)
icon = models.ImageField(
# Actual max is 256KB, but need overhead for badge baking. This should be more than enough.
help_text=_(
u"Badge images must be square PNG files. The file size should be under 250KB."
),
upload_to='course_complete_badges',
validators=[validate_badge_image]
)
default = models.BooleanField(
help_text=_(
u"Set this value to True if you want this image to be the default image for any course modes "
u"that do not have a specified badge image. You can have only one default image."
),
default=False,
)
def __unicode__(self):
return u"<CourseCompleteImageConfiguration for '{mode}'{default}>".format(
mode=self.mode,
default=u" (default)" if self.default else u''
)
def clean(self):
"""
Make sure there's not more than one default.
"""
# pylint: disable=no-member
if self.default and CourseCompleteImageConfiguration.objects.filter(default=True).exclude(id=self.id):
raise ValidationError(_(u"There can be only one default image."))
@classmethod
def image_for_mode(cls, mode):
"""
Get the image for a particular mode.
"""
try:
return cls.objects.get(mode=mode).icon
except cls.DoesNotExist:
# Fall back to default, if there is one.
return cls.objects.get(default=True).icon
class Meta(object):
app_label = "badges"
class CourseEventBadgesConfiguration(ConfigurationModel):
"""
Determines the settings for meta course awards-- such as completing a certain
number of courses or enrolling in a certain number of them.
"""
courses_completed = models.TextField(
blank=True, default='',
help_text=_(
u"On each line, put the number of completed courses to award a badge for, a comma, and the slug of a "
u"badge class you have created that has the issuing component 'openedx__course'. "
u"For example: 3,enrolled_3_courses"
)
)
courses_enrolled = models.TextField(
blank=True, default='',
help_text=_(
u"On each line, put the number of enrolled courses to award a badge for, a comma, and the slug of a "
u"badge class you have created that has the issuing component 'openedx__course'. "
u"For example: 3,enrolled_3_courses"
)
)
course_groups = models.TextField(
blank=True, default='',
help_text=_(
u"Each line is a comma-separated list. The first item in each line is the slug of a badge class you "
u"have created that has an issuing component of 'openedx__course'. The remaining items in each line are "
u"the course keys the learner needs to complete to be awarded the badge. For example: "
u"slug_for_compsci_courses_group_badge,course-v1:CompSci+Course+First,course-v1:CompsSci+Course+Second"
)
)
def __unicode__(self):
return u"<CourseEventBadgesConfiguration ({})>".format(u"Enabled" if self.enabled else u"Disabled")
@property
def completed_settings(self):
"""
Parses the settings from the courses_completed field.
"""
return deserialize_count_specs(self.courses_completed)
@property
def enrolled_settings(self):
"""
Parses the settings from the courses_completed field.
"""
return deserialize_count_specs(self.courses_enrolled)
@property
def course_group_settings(self):
"""
Parses the course group | """
Validates that a string is lowercase.
"""
if not string.islower():
raise ValidationError(_(u"This value must be all lowercase.")) | identifier_body |
models.py | be a badge and square.
"""
if image.width != image.height:
raise ValidationError(_(u"The badge image must be square."))
if not image.size < (250 * 1024):
raise ValidationError(_(u"The badge image file size must be less than 250KB."))
def validate_lowercase(string):
"""
Validates that a string is lowercase.
"""
if not string.islower():
raise ValidationError(_(u"This value must be all lowercase."))
class CourseBadgesDisabledError(Exception):
"""
Exception raised when Course Badges aren't enabled, but an attempt to fetch one is made anyway.
"""
class BadgeClass(models.Model):
"""
Specifies a badge class to be registered with a backend.
"""
slug = models.SlugField(max_length=255, validators=[validate_lowercase])
issuing_component = models.SlugField(max_length=50, default='', blank=True, validators=[validate_lowercase])
display_name = models.CharField(max_length=255)
course_id = CourseKeyField(max_length=255, blank=True, default=None)
description = models.TextField()
criteria = models.TextField()
# Mode a badge was awarded for. Included for legacy/migration purposes.
mode = models.CharField(max_length=100, default='', blank=True)
image = models.ImageField(upload_to='badge_classes', validators=[validate_badge_image]) | def __unicode__(self):
return u"<Badge '{slug}' for '{issuing_component}'>".format(
slug=self.slug, issuing_component=self.issuing_component
)
@classmethod
def get_badge_class(
cls, slug, issuing_component, display_name=None, description=None, criteria=None, image_file_handle=None,
mode='', course_id=None, create=True
):
"""
Looks up a badge class by its slug, issuing component, and course_id and returns it should it exist.
If it does not exist, and create is True, creates it according to the arguments. Otherwise, returns None.
The expectation is that an XBlock or platform developer should not need to concern themselves with whether
or not a badge class has already been created, but should just feed all requirements to this function
and it will 'do the right thing'. It should be the exception, rather than the common case, that a badge class
would need to be looked up without also being created were it missing.
"""
slug = slug.lower()
issuing_component = issuing_component.lower()
if course_id and not modulestore().get_course(course_id).issue_badges:
raise CourseBadgesDisabledError("This course does not have badges enabled.")
if not course_id:
course_id = CourseKeyField.Empty
try:
return cls.objects.get(slug=slug, issuing_component=issuing_component, course_id=course_id)
except cls.DoesNotExist:
if not create:
return None
badge_class = cls(
slug=slug,
issuing_component=issuing_component,
display_name=display_name,
course_id=course_id,
mode=mode,
description=description,
criteria=criteria,
)
badge_class.image.save(image_file_handle.name, image_file_handle)
badge_class.full_clean()
badge_class.save()
return badge_class
@lazy
def backend(self):
"""
Loads the badging backend.
"""
module, klass = settings.BADGING_BACKEND.rsplit('.', 1)
module = import_module(module)
return getattr(module, klass)()
def get_for_user(self, user):
"""
Get the assertion for this badge class for this user, if it has been awarded.
"""
return self.badgeassertion_set.filter(user=user)
def award(self, user, evidence_url=None):
"""
Contacts the backend to have a badge assertion created for this badge class for this user.
"""
return self.backend.award(self, user, evidence_url=evidence_url)
def save(self, **kwargs):
"""
Slugs must always be lowercase.
"""
self.slug = self.slug and self.slug.lower()
self.issuing_component = self.issuing_component and self.issuing_component.lower()
super(BadgeClass, self).save(**kwargs)
class Meta(object):
app_label = "badges"
unique_together = (('slug', 'issuing_component', 'course_id'),)
verbose_name_plural = "Badge Classes"
class BadgeAssertion(TimeStampedModel):
"""
Tracks badges on our side of the badge baking transaction
"""
user = models.ForeignKey(User)
badge_class = models.ForeignKey(BadgeClass)
data = JSONField()
backend = models.CharField(max_length=50)
image_url = models.URLField()
assertion_url = models.URLField()
def __unicode__(self):
return u"<{username} Badge Assertion for {slug} for {issuing_component}".format(
username=self.user.username, slug=self.badge_class.slug,
issuing_component=self.badge_class.issuing_component,
)
@classmethod
def assertions_for_user(cls, user, course_id=None):
"""
Get all assertions for a user, optionally constrained to a course.
"""
if course_id:
return cls.objects.filter(user=user, badge_class__course_id=course_id)
return cls.objects.filter(user=user)
class Meta(object):
app_label = "badges"
# Abstract model doesn't index this, so we have to.
BadgeAssertion._meta.get_field('created').db_index = True # pylint: disable=protected-access
class CourseCompleteImageConfiguration(models.Model):
"""
Contains the icon configuration for badges for a specific course mode.
"""
mode = models.CharField(
max_length=125,
help_text=_(u'The course mode for this badge image. For example, "verified" or "honor".'),
unique=True,
)
icon = models.ImageField(
# Actual max is 256KB, but need overhead for badge baking. This should be more than enough.
help_text=_(
u"Badge images must be square PNG files. The file size should be under 250KB."
),
upload_to='course_complete_badges',
validators=[validate_badge_image]
)
default = models.BooleanField(
help_text=_(
u"Set this value to True if you want this image to be the default image for any course modes "
u"that do not have a specified badge image. You can have only one default image."
),
default=False,
)
def __unicode__(self):
return u"<CourseCompleteImageConfiguration for '{mode}'{default}>".format(
mode=self.mode,
default=u" (default)" if self.default else u''
)
def clean(self):
"""
Make sure there's not more than one default.
"""
# pylint: disable=no-member
if self.default and CourseCompleteImageConfiguration.objects.filter(default=True).exclude(id=self.id):
raise ValidationError(_(u"There can be only one default image."))
@classmethod
def image_for_mode(cls, mode):
"""
Get the image for a particular mode.
"""
try:
return cls.objects.get(mode=mode).icon
except cls.DoesNotExist:
# Fall back to default, if there is one.
return cls.objects.get(default=True).icon
class Meta(object):
app_label = "badges"
class CourseEventBadgesConfiguration(ConfigurationModel):
"""
Determines the settings for meta course awards-- such as completing a certain
number of courses or enrolling in a certain number of them.
"""
courses_completed = models.TextField(
blank=True, default='',
help_text=_(
u"On each line, put the number of completed courses to award a badge for, a comma, and the slug of a "
u"badge class you have created that has the issuing component 'openedx__course'. "
u"For example: 3,enrolled_3_courses"
)
)
courses_enrolled = models.TextField(
blank=True, default='',
help_text=_(
u"On each line, put the number of enrolled courses to award a badge for, a comma, and the slug of a "
u"badge class you have created that has the issuing component 'openedx__course'. "
u"For example: 3,enrolled_3_courses"
)
)
course_groups = models.TextField(
blank=True, default='',
help_text=_(
u"Each line is a comma-separated list. The first item in each line is the slug of a badge class you "
u"have created that has an issuing component of 'openedx__course'. The remaining items in each line are "
u"the course keys the learner needs to complete to be awarded the badge. For example: "
u"slug_for_compsci_courses_group_badge,course-v1:CompSci+Course+First,course-v1:CompsSci+Course+Second"
)
)
def __unicode__(self):
return u"<CourseEventBadgesConfiguration ({})>".format(u"Enabled" if self.enabled else u"Disabled")
@property
def completed_settings(self):
"""
Parses the settings from the courses_completed field.
"""
return deserialize_count_specs(self.courses_completed)
@property
def enrolled_settings(self):
"""
Parses the settings from the courses_completed field.
"""
return deserialize_count_specs(self.courses_enrolled)
@property
def course_group_settings(self):
"""
Parses the course group settings | random_line_split |
|
models.py | less than 250KB."))
def validate_lowercase(string):
"""
Validates that a string is lowercase.
"""
if not string.islower():
raise ValidationError(_(u"This value must be all lowercase."))
class CourseBadgesDisabledError(Exception):
"""
Exception raised when Course Badges aren't enabled, but an attempt to fetch one is made anyway.
"""
class BadgeClass(models.Model):
"""
Specifies a badge class to be registered with a backend.
"""
slug = models.SlugField(max_length=255, validators=[validate_lowercase])
issuing_component = models.SlugField(max_length=50, default='', blank=True, validators=[validate_lowercase])
display_name = models.CharField(max_length=255)
course_id = CourseKeyField(max_length=255, blank=True, default=None)
description = models.TextField()
criteria = models.TextField()
# Mode a badge was awarded for. Included for legacy/migration purposes.
mode = models.CharField(max_length=100, default='', blank=True)
image = models.ImageField(upload_to='badge_classes', validators=[validate_badge_image])
def __unicode__(self):
return u"<Badge '{slug}' for '{issuing_component}'>".format(
slug=self.slug, issuing_component=self.issuing_component
)
@classmethod
def get_badge_class(
cls, slug, issuing_component, display_name=None, description=None, criteria=None, image_file_handle=None,
mode='', course_id=None, create=True
):
"""
Looks up a badge class by its slug, issuing component, and course_id and returns it should it exist.
If it does not exist, and create is True, creates it according to the arguments. Otherwise, returns None.
The expectation is that an XBlock or platform developer should not need to concern themselves with whether
or not a badge class has already been created, but should just feed all requirements to this function
and it will 'do the right thing'. It should be the exception, rather than the common case, that a badge class
would need to be looked up without also being created were it missing.
"""
slug = slug.lower()
issuing_component = issuing_component.lower()
if course_id and not modulestore().get_course(course_id).issue_badges:
raise CourseBadgesDisabledError("This course does not have badges enabled.")
if not course_id:
course_id = CourseKeyField.Empty
try:
return cls.objects.get(slug=slug, issuing_component=issuing_component, course_id=course_id)
except cls.DoesNotExist:
if not create:
return None
badge_class = cls(
slug=slug,
issuing_component=issuing_component,
display_name=display_name,
course_id=course_id,
mode=mode,
description=description,
criteria=criteria,
)
badge_class.image.save(image_file_handle.name, image_file_handle)
badge_class.full_clean()
badge_class.save()
return badge_class
@lazy
def backend(self):
"""
Loads the badging backend.
"""
module, klass = settings.BADGING_BACKEND.rsplit('.', 1)
module = import_module(module)
return getattr(module, klass)()
def get_for_user(self, user):
"""
Get the assertion for this badge class for this user, if it has been awarded.
"""
return self.badgeassertion_set.filter(user=user)
def award(self, user, evidence_url=None):
"""
Contacts the backend to have a badge assertion created for this badge class for this user.
"""
return self.backend.award(self, user, evidence_url=evidence_url)
def save(self, **kwargs):
"""
Slugs must always be lowercase.
"""
self.slug = self.slug and self.slug.lower()
self.issuing_component = self.issuing_component and self.issuing_component.lower()
super(BadgeClass, self).save(**kwargs)
class Meta(object):
app_label = "badges"
unique_together = (('slug', 'issuing_component', 'course_id'),)
verbose_name_plural = "Badge Classes"
class BadgeAssertion(TimeStampedModel):
"""
Tracks badges on our side of the badge baking transaction
"""
user = models.ForeignKey(User)
badge_class = models.ForeignKey(BadgeClass)
data = JSONField()
backend = models.CharField(max_length=50)
image_url = models.URLField()
assertion_url = models.URLField()
def __unicode__(self):
return u"<{username} Badge Assertion for {slug} for {issuing_component}".format(
username=self.user.username, slug=self.badge_class.slug,
issuing_component=self.badge_class.issuing_component,
)
@classmethod
def assertions_for_user(cls, user, course_id=None):
"""
Get all assertions for a user, optionally constrained to a course.
"""
if course_id:
return cls.objects.filter(user=user, badge_class__course_id=course_id)
return cls.objects.filter(user=user)
class Meta(object):
app_label = "badges"
# Abstract model doesn't index this, so we have to.
BadgeAssertion._meta.get_field('created').db_index = True # pylint: disable=protected-access
class CourseCompleteImageConfiguration(models.Model):
"""
Contains the icon configuration for badges for a specific course mode.
"""
mode = models.CharField(
max_length=125,
help_text=_(u'The course mode for this badge image. For example, "verified" or "honor".'),
unique=True,
)
icon = models.ImageField(
# Actual max is 256KB, but need overhead for badge baking. This should be more than enough.
help_text=_(
u"Badge images must be square PNG files. The file size should be under 250KB."
),
upload_to='course_complete_badges',
validators=[validate_badge_image]
)
default = models.BooleanField(
help_text=_(
u"Set this value to True if you want this image to be the default image for any course modes "
u"that do not have a specified badge image. You can have only one default image."
),
default=False,
)
def __unicode__(self):
return u"<CourseCompleteImageConfiguration for '{mode}'{default}>".format(
mode=self.mode,
default=u" (default)" if self.default else u''
)
def clean(self):
"""
Make sure there's not more than one default.
"""
# pylint: disable=no-member
if self.default and CourseCompleteImageConfiguration.objects.filter(default=True).exclude(id=self.id):
raise ValidationError(_(u"There can be only one default image."))
@classmethod
def image_for_mode(cls, mode):
"""
Get the image for a particular mode.
"""
try:
return cls.objects.get(mode=mode).icon
except cls.DoesNotExist:
# Fall back to default, if there is one.
return cls.objects.get(default=True).icon
class Meta(object):
app_label = "badges"
class CourseEventBadgesConfiguration(ConfigurationModel):
"""
Determines the settings for meta course awards-- such as completing a certain
number of courses or enrolling in a certain number of them.
"""
courses_completed = models.TextField(
blank=True, default='',
help_text=_(
u"On each line, put the number of completed courses to award a badge for, a comma, and the slug of a "
u"badge class you have created that has the issuing component 'openedx__course'. "
u"For example: 3,enrolled_3_courses"
)
)
courses_enrolled = models.TextField(
blank=True, default='',
help_text=_(
u"On each line, put the number of enrolled courses to award a badge for, a comma, and the slug of a "
u"badge class you have created that has the issuing component 'openedx__course'. "
u"For example: 3,enrolled_3_courses"
)
)
course_groups = models.TextField(
blank=True, default='',
help_text=_(
u"Each line is a comma-separated list. The first item in each line is the slug of a badge class you "
u"have created that has an issuing component of 'openedx__course'. The remaining items in each line are "
u"the course keys the learner needs to complete to be awarded the badge. For example: "
u"slug_for_compsci_courses_group_badge,course-v1:CompSci+Course+First,course-v1:CompsSci+Course+Second"
)
)
def __unicode__(self):
return u"<CourseEventBadgesConfiguration ({})>".format(u"Enabled" if self.enabled else u"Disabled")
@property
def completed_settings(self):
"""
Parses the settings from the courses_completed field.
"""
return deserialize_count_specs(self.courses_completed)
@property
def enrolled_settings(self):
"""
Parses the settings from the courses_completed field.
"""
return deserialize_count_specs(self.courses_enrolled)
@property
def course_group_settings(self):
"""
Parses the course group settings. In example, the format is:
slug_for_compsci_courses_group_badge,course-v1:CompSci+Course+First,course-v1:CompsSci+Course+Second
"""
specs = self.course_groups.strip()
if not specs:
| return {} | conditional_block |
|
unsized3.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test structs with always-unsized fields.
#![allow(unknown_features)]
#![feature(box_syntax)]
use std::mem;
use std::raw;
use std::slice;
struct | <T> {
f: [T],
}
struct Bar {
f1: usize,
f2: [usize],
}
struct Baz {
f1: usize,
f2: str,
}
trait Tr {
fn foo(&self) -> usize;
}
struct St {
f: usize
}
impl Tr for St {
fn foo(&self) -> usize {
self.f
}
}
struct Qux<'a> {
f: Tr+'a
}
pub fn main() {
let _: &Foo<f64>;
let _: &Bar;
let _: &Baz;
let _: Box<Foo<i32>>;
let _: Box<Bar>;
let _: Box<Baz>;
let _ = mem::size_of::<Box<Foo<u8>>>();
let _ = mem::size_of::<Box<Bar>>();
let _ = mem::size_of::<Box<Baz>>();
unsafe {
struct Foo_<T> {
f: [T; 3]
}
let data: Box<Foo_<i32>> = box Foo_{f: [1, 2, 3] };
let x: &Foo<i32> = mem::transmute(slice::from_raw_parts(&*data, 3));
assert!(x.f.len() == 3);
assert!(x.f[0] == 1);
struct Baz_ {
f1: usize,
f2: [u8; 5],
}
let data: Box<_> = box Baz_ {
f1: 42, f2: ['a' as u8, 'b' as u8, 'c' as u8, 'd' as u8, 'e' as u8] };
let x: &Baz = mem::transmute(slice::from_raw_parts(&*data, 5));
assert!(x.f1 == 42);
let chs: Vec<char> = x.f2.chars().collect();
assert!(chs.len() == 5);
assert!(chs[0] == 'a');
assert!(chs[1] == 'b');
assert!(chs[2] == 'c');
assert!(chs[3] == 'd');
assert!(chs[4] == 'e');
struct Qux_ {
f: St
}
let obj: Box<St> = box St { f: 42 };
let obj: &Tr = &*obj;
let obj: raw::TraitObject = mem::transmute(&*obj);
let data: Box<_> = box Qux_{ f: St { f: 234 } };
let x: &Qux = mem::transmute(raw::TraitObject { vtable: obj.vtable,
data: mem::transmute(&*data) });
assert!(x.f.foo() == 234);
}
}
| Foo | identifier_name |
unsized3.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test structs with always-unsized fields.
#![allow(unknown_features)]
#![feature(box_syntax)]
use std::mem;
use std::raw;
use std::slice;
struct Foo<T> {
f: [T],
}
struct Bar {
f1: usize,
f2: [usize],
}
struct Baz {
f1: usize,
f2: str,
}
trait Tr {
fn foo(&self) -> usize;
}
struct St {
f: usize
}
impl Tr for St {
fn foo(&self) -> usize {
self.f
}
}
struct Qux<'a> {
f: Tr+'a
}
pub fn main() {
let _: &Foo<f64>;
let _: &Bar;
let _: &Baz;
let _: Box<Foo<i32>>;
let _: Box<Bar>;
let _: Box<Baz>;
let _ = mem::size_of::<Box<Foo<u8>>>();
let _ = mem::size_of::<Box<Bar>>();
let _ = mem::size_of::<Box<Baz>>();
unsafe {
struct Foo_<T> {
f: [T; 3]
}
let data: Box<Foo_<i32>> = box Foo_{f: [1, 2, 3] };
let x: &Foo<i32> = mem::transmute(slice::from_raw_parts(&*data, 3));
assert!(x.f.len() == 3);
assert!(x.f[0] == 1);
struct Baz_ {
f1: usize,
f2: [u8; 5],
}
| let x: &Baz = mem::transmute(slice::from_raw_parts(&*data, 5));
assert!(x.f1 == 42);
let chs: Vec<char> = x.f2.chars().collect();
assert!(chs.len() == 5);
assert!(chs[0] == 'a');
assert!(chs[1] == 'b');
assert!(chs[2] == 'c');
assert!(chs[3] == 'd');
assert!(chs[4] == 'e');
struct Qux_ {
f: St
}
let obj: Box<St> = box St { f: 42 };
let obj: &Tr = &*obj;
let obj: raw::TraitObject = mem::transmute(&*obj);
let data: Box<_> = box Qux_{ f: St { f: 234 } };
let x: &Qux = mem::transmute(raw::TraitObject { vtable: obj.vtable,
data: mem::transmute(&*data) });
assert!(x.f.foo() == 234);
}
} | let data: Box<_> = box Baz_ {
f1: 42, f2: ['a' as u8, 'b' as u8, 'c' as u8, 'd' as u8, 'e' as u8] }; | random_line_split |
rpc.rs | extern crate crypto;
use bytes::BytesMut;
use crypto::digest::Digest;
use encoding::{all::ISO_8859_1, DecoderTrap, EncoderTrap, Encoding};
use futures::SinkExt;
use tokio::{
io::{AsyncRead, AsyncWrite},
net::TcpStream,
stream::StreamExt,
};
use tokio_util::codec::{Decoder, Encoder, Framed};
use tracing::*;
use crate::{errors::Error, util};
fn compute_nonce_hash(pass: &str, nonce: &str) -> String {
let mut digest = crypto::md5::Md5::new();
digest.input_str(&format!("{}{}", nonce, pass));
digest.result_str()
}
const TERMCHAR: u8 = 3;
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum CodecMode {
Client,
Server,
}
pub struct BoincCodec {
mode: CodecMode,
next_index: usize,
}
impl BoincCodec {
#[must_use]
pub const fn new(mode: CodecMode) -> Self {
Self {
mode,
next_index: 0,
}
}
}
impl Decoder for BoincCodec {
type Item = Vec<treexml::Element>;
type Error = Error;
fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {
let read_to = src.len();
if let Some(offset) = src[self.next_index..read_to]
.iter()
.position(|b| *b == TERMCHAR)
{
let newline_index = offset + self.next_index;
self.next_index = 0;
let line = src.split_to(newline_index + 1);
let line = &line[..line.len() - 1];
let line = ISO_8859_1
.decode(line, DecoderTrap::Strict)
.map_err(|e| Error::DataParseError(format!("Invalid data received: {}", e)))?;
trace!("Received data: {}", line);
let line = line.trim_start_matches("<?xml version=\"1.0\" encoding=\"ISO-8859-1\" ?>");
let root_node = util::parse_node(line)?;
let expected_root = match self.mode {
CodecMode::Client => "boinc_gui_rpc_reply",
CodecMode::Server => "boinc_gui_rpc_request",
};
if root_node.name != expected_root {
return Err(Error::DataParseError(format!(
"Invalid root: {}. Expected: {}",
root_node.name, expected_root
)));
}
Ok(Some(root_node.children))
} else {
self.next_index = read_to;
Ok(None)
}
}
}
impl Encoder<Vec<treexml::Element>> for BoincCodec {
type Error = Error;
fn encode(
&mut self,
item: Vec<treexml::Element>,
dst: &mut BytesMut,
) -> Result<(), Self::Error> {
let mut out = treexml::Element::new(match self.mode {
CodecMode::Client => "boinc_gui_rpc_request",
CodecMode::Server => "boinc_gui_rpc_reply",
});
out.children = item;
let data = format!("{}", out)
.replace("<?xml version='1.0'?>", "")
.replace(" />", "/>");
trace!("Sending data: {}", data);
dst.extend_from_slice(
&ISO_8859_1
.encode(&data, EncoderTrap::Strict)
.expect("Our data should always be correct"),
);
dst.extend_from_slice(&[TERMCHAR]);
Ok(())
}
}
pub struct | <Io> {
conn: Framed<Io, BoincCodec>,
}
impl DaemonStream<TcpStream> {
pub async fn connect(host: String, password: Option<String>) -> Result<Self, Error> {
Self::authenticate(TcpStream::connect(host).await?, password).await
}
}
impl<Io: AsyncRead + AsyncWrite + Unpin> DaemonStream<Io> {
async fn authenticate(io: Io, password: Option<String>) -> Result<Self, Error> {
let mut conn = BoincCodec::new(CodecMode::Client).framed(io);
let mut out = Some(vec![treexml::Element::new("auth1")]);
let mut nonce_sent = false;
loop {
if let Some(data) = out.take() {
conn.send(data).await?;
let data = conn
.try_next()
.await?
.ok_or_else(|| Error::DaemonError("EOF".into()))?;
for node in data {
match &*node.name {
"nonce" => {
if nonce_sent {
return Err(Error::DaemonError(
"Daemon requested nonce again - could be a bug".into(),
));
}
let mut nonce_node = treexml::Element::new("nonce_hash");
let pwd = password.clone().ok_or_else(|| {
Error::AuthError("Password required for nonce".to_string())
})?;
nonce_node.text = Some(compute_nonce_hash(
&pwd,
&node
.text
.ok_or_else(|| Error::AuthError("Invalid nonce".into()))?,
));
let mut auth2_node = treexml::Element::new("auth2");
auth2_node.children.push(nonce_node);
out = Some(vec![auth2_node]);
nonce_sent = true;
}
"unauthorized" => {
return Err(Error::AuthError("unauthorized".to_string()));
}
"error" => {
return Err(Error::DaemonError(format!(
"BOINC daemon returned error: {:?}",
node.text
)));
}
"authorized" => {
return Ok(Self { conn });
}
_ => {
return Err(Error::DaemonError(format!(
"Invalid response from daemon: {}",
node.name
)));
}
}
}
} else {
return Err(Error::DaemonError("Empty response".into()));
}
}
}
pub(crate) async fn query(
&mut self,
request_data: Vec<treexml::Element>,
) -> Result<Vec<treexml::Element>, Error> {
self.conn.send(request_data).await?;
let data = self
.conn
.try_next()
.await?
.ok_or_else(|| Error::DaemonError("EOF".into()))?;
Ok(data)
}
}
| DaemonStream | identifier_name |
rpc.rs | extern crate crypto;
use bytes::BytesMut;
use crypto::digest::Digest;
use encoding::{all::ISO_8859_1, DecoderTrap, EncoderTrap, Encoding};
use futures::SinkExt;
use tokio::{
io::{AsyncRead, AsyncWrite},
net::TcpStream,
stream::StreamExt,
};
use tokio_util::codec::{Decoder, Encoder, Framed};
use tracing::*;
use crate::{errors::Error, util};
fn compute_nonce_hash(pass: &str, nonce: &str) -> String {
let mut digest = crypto::md5::Md5::new();
digest.input_str(&format!("{}{}", nonce, pass));
digest.result_str()
}
const TERMCHAR: u8 = 3;
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum CodecMode {
Client,
Server,
}
pub struct BoincCodec {
mode: CodecMode,
next_index: usize,
}
impl BoincCodec {
#[must_use]
pub const fn new(mode: CodecMode) -> Self {
Self {
mode,
next_index: 0,
}
}
}
impl Decoder for BoincCodec {
type Item = Vec<treexml::Element>;
type Error = Error;
fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {
let read_to = src.len();
if let Some(offset) = src[self.next_index..read_to]
.iter()
.position(|b| *b == TERMCHAR)
{
let newline_index = offset + self.next_index;
self.next_index = 0;
let line = src.split_to(newline_index + 1);
let line = &line[..line.len() - 1];
let line = ISO_8859_1
.decode(line, DecoderTrap::Strict)
.map_err(|e| Error::DataParseError(format!("Invalid data received: {}", e)))?;
trace!("Received data: {}", line);
let line = line.trim_start_matches("<?xml version=\"1.0\" encoding=\"ISO-8859-1\" ?>");
let root_node = util::parse_node(line)?;
let expected_root = match self.mode {
CodecMode::Client => "boinc_gui_rpc_reply",
CodecMode::Server => "boinc_gui_rpc_request",
};
if root_node.name != expected_root {
return Err(Error::DataParseError(format!(
"Invalid root: {}. Expected: {}",
root_node.name, expected_root
)));
}
Ok(Some(root_node.children))
} else {
self.next_index = read_to;
Ok(None)
}
}
}
impl Encoder<Vec<treexml::Element>> for BoincCodec {
type Error = Error;
fn encode(
&mut self,
item: Vec<treexml::Element>,
dst: &mut BytesMut,
) -> Result<(), Self::Error> {
let mut out = treexml::Element::new(match self.mode {
CodecMode::Client => "boinc_gui_rpc_request",
CodecMode::Server => "boinc_gui_rpc_reply",
});
out.children = item;
let data = format!("{}", out)
.replace("<?xml version='1.0'?>", "")
.replace(" />", "/>");
trace!("Sending data: {}", data);
dst.extend_from_slice(
&ISO_8859_1
.encode(&data, EncoderTrap::Strict)
.expect("Our data should always be correct"),
);
dst.extend_from_slice(&[TERMCHAR]);
Ok(())
}
}
pub struct DaemonStream<Io> {
conn: Framed<Io, BoincCodec>,
}
impl DaemonStream<TcpStream> {
pub async fn connect(host: String, password: Option<String>) -> Result<Self, Error> {
Self::authenticate(TcpStream::connect(host).await?, password).await
}
}
impl<Io: AsyncRead + AsyncWrite + Unpin> DaemonStream<Io> {
async fn authenticate(io: Io, password: Option<String>) -> Result<Self, Error> {
let mut conn = BoincCodec::new(CodecMode::Client).framed(io);
let mut out = Some(vec![treexml::Element::new("auth1")]);
let mut nonce_sent = false;
loop {
if let Some(data) = out.take() {
conn.send(data).await?;
let data = conn
.try_next()
.await?
.ok_or_else(|| Error::DaemonError("EOF".into()))?;
for node in data {
match &*node.name {
"nonce" => {
if nonce_sent {
return Err(Error::DaemonError(
"Daemon requested nonce again - could be a bug".into(),
));
}
let mut nonce_node = treexml::Element::new("nonce_hash");
let pwd = password.clone().ok_or_else(|| {
Error::AuthError("Password required for nonce".to_string())
})?;
nonce_node.text = Some(compute_nonce_hash(
&pwd,
&node
.text
.ok_or_else(|| Error::AuthError("Invalid nonce".into()))?,
));
let mut auth2_node = treexml::Element::new("auth2");
auth2_node.children.push(nonce_node);
out = Some(vec![auth2_node]);
nonce_sent = true;
}
"unauthorized" => {
return Err(Error::AuthError("unauthorized".to_string()));
}
"error" => {
return Err(Error::DaemonError(format!(
"BOINC daemon returned error: {:?}",
node.text
)));
}
"authorized" => {
return Ok(Self { conn });
}
_ => {
return Err(Error::DaemonError(format!(
"Invalid response from daemon: {}",
node.name
)));
}
}
}
} else {
return Err(Error::DaemonError("Empty response".into()));
}
}
} |
pub(crate) async fn query(
&mut self,
request_data: Vec<treexml::Element>,
) -> Result<Vec<treexml::Element>, Error> {
self.conn.send(request_data).await?;
let data = self
.conn
.try_next()
.await?
.ok_or_else(|| Error::DaemonError("EOF".into()))?;
Ok(data)
}
} | random_line_split |
|
app.component.ts | import { Component, OnInit, HostListener, Input } from '@angular/core';
import { FormBuilder, FormGroup, Validators, AbstractControl } from "@angular/forms";
import { hexToHsl, rgbToHsl, hexToRgb } from '../common/color-conversion';
import { MdDialog, MdDialogRef, MdDialogConfig} from "@angular/material";
@Component({
selector: 'app-root',
templateUrl: './app.component.html',
styleUrls: ['./app.component.css']
})
export class AppComponent implements OnInit {
hexForm: FormGroup;
hexValue: AbstractControl;
hexToHsl: Function;
baseHsl: any;
tmpHsl: any;
baseHslArr2: Array<any>;
hslArr1: Array<any>;
row1ShowBtnCopy: Array<any>;
hslArr2: Array<any>;
row2ShowBtnCopy: Array<any>;
hslArr3: Array<any>;
row3ShowBtnCopy: Array<any>;
bodyWidth: number;
toggleColorPicker: boolean;
selectedColor: string;
defaultColorFormat: string;
constructor(private fb: FormBuilder, public modal: MdDialog) {
this.hexToHsl = hexToHsl;
}
ngOnInit(): void {
this.buildForm();
this.tmpHsl = [];
this.baseHsl = '';
this.baseHslArr2 = [];
this.hslArr1 = [];
this.hslArr2 = [];
this.hslArr3 = [];
this.row1ShowBtnCopy = [false, false, false, false, false, false, false, false, false];
this.row2ShowBtnCopy = [false, false, false, false, false, false, false, false, false];
this.row3ShowBtnCopy = [false, false, false, false, false, false, false, false, false];
this.bodyWidth = window.innerWidth || document.body.clientWidth;
this.toggleColorPicker = false;
this.defaultColorFormat = 'HSL';
this.initHslaArrs();
}
buildForm(): void {
this.hexForm = this.fb.group({
'hexValue': ['', [
Validators.pattern(/(^#([A-Fa-f0-9]{6}|[A-Fa-f0-9]{3})$)|(^[A-Fa-f0-9]{6}$)|(^[A-Fa-f0-9]{3}$)/)
]]
});
this.hexValue = this.hexForm.controls['hexValue'];
}
_keyPress(event: any) {
const pattern = /#|[A-Fa-f0-9]/;
let inputChar = String.fromCharCode(event.charCode);
if (event.code === "Enter" && this.hexForm.get('hexValue').value.length > 0) {
this.clearHexVal();
this.calBaseHsl(this.hexForm.get('hexValue').value);
return;
}
if (!pattern.test(inputChar)) {
// invalid character, prevent input
event.preventDefault();
}
}
dealHashStr(str: string): string {
let returnStr = '';
if (str.length > 0) {
if (str.indexOf('#') === -1) {
returnStr = '#' + str;
} else {
returnStr = str;
}
}
return returnStr;
}
@HostListener('window:resize', ['$event'])
onResize(event) {
this.bodyWidth = event.target.innerWidth;
}
calBaseHsl(hexVal): any {
if (hexVal.length === 0) return;
this.clearHexVal();
this.tmpHsl = hexToHsl(this.dealHashStr(hexVal));
this.baseHsl = `hsl(${this.tmpHsl[0]}, ${this.tmpHsl[1]}%, ${this.tmpHsl[2]}%)`;
this.calHslArr2();
}
calHslArr2(): void {
let baseHue = parseInt(this.tmpHsl[0], 10);
let baseSaturation = parseInt(this.tmpHsl[1], 10);
let baseLightness = parseInt(this.tmpHsl[2], 10);
let tmpHue = 0;
for (let i = 0; i < 9; i++) {
tmpHue = (baseHue + 40 * i) % 360;
this.baseHslArr2.push([tmpHue, baseSaturation, baseLightness]);
this.hslArr2.push(`hsl(${tmpHue}, ${baseSaturation}%, ${baseLightness}%)`);
}
this.calHslArr1();
this.calHslArr3();
}
initHslaArrs(): void {
this.clearHexVal();
for (let i = 0; i < 9; i++) {
this.hslArr1.push(`hsla(0, 0%, 85%, 0.${i * 10})`);
this.hslArr2.push(`hsla(0, 0%, 55%, 0.${i * 10})`);
this.hslArr3.push(`hsla(0, 0%, 25%, 0.${i * 10})`);
}
}
calHslArr1(): void {
let tmpHue = 0;
let tmpSaturation = 0;
let tmpLightness = 0;
for (let i = 0; i < 9; i++) {
tmpHue = Math.ceil((this.baseHslArr2[i][0] + 3.6) % 360);
tmpSaturation = this.baseHslArr2[i][1];
if ((this.baseHslArr2[i][1] - 29) >= 0) {
tmpSaturation = (this.baseHslArr2[i][1] - 29);
}
tmpLightness = this.baseHslArr2[i][2];
if ((this.baseHslArr2[i][2] + 3) <= 100) {
tmpLightness = (this.baseHslArr2[i][2] + 3);
}
this.hslArr1.push(`hsl(${tmpHue}, ${tmpSaturation}%, ${tmpLightness}%)`);
}
}
clearHexVal(): void {
this.hslArr2 = [];
this.hslArr1 = [];
this.hslArr3 = [];
this.baseHslArr2 = [];
}
calHslArr3(): void {
let tmpHue = 0;
let tmpSaturation;
let tmpLightness;
for (let i = 0; i < 9; i++) {
tmpHue = Math.ceil((this.baseHslArr2[i][0] - 3.6) % 360);
tmpSaturation = this.baseHslArr2[i][1];
if ((this.baseHslArr2[i][1] + 1) <= 100) {
tmpSaturation = (this.baseHslArr2[i][1] + 1);
}
tmpLightness = this.baseHslArr2[i][2];
if ((this.baseHslArr2[i][2] - 13 ) >= 0) {
tmpLightness = (this.baseHslArr2[i][2] - 13);
}
this.hslArr3.push(`hsl(${tmpHue}, ${tmpSaturation}%, ${tmpLightness}%)`);
}
}
openSnackBar(val) {
// Get the snackbar DIV
let x = document.getElementById("snackbar")
// Add the "show" class to DIV
x.className = "show";
this.selectedColor = val;
// After 2 seconds, remove the show class from DIV
setTimeout(function () {
x.className = x.className.replace("show", "");
}, 2000);
}
openCopyModal() {
let config = new MdDialogConfig();
config.width = `500px`; | modalRef.componentInstance.hslArr2 = this.hslArr2;
modalRef.componentInstance.hslArr3 = this.hslArr3;
}
}
@Component({
selector: 'photo-modal',
templateUrl: './photo-modal.html',
styleUrls: ['./app.component.css']
})
export class PhotoModalComponent {
@Input() hslArr1: Array<any>;
@Input() hslArr2: Array<any>;
@Input() hslArr3: Array<any>;
constructor(public modalRef: MdDialogRef<PhotoModalComponent>) {
}
} | config.height = `400px`;
config.disableClose = true;
let modalRef: MdDialogRef<PhotoModalComponent> = this.modal.open(PhotoModalComponent, config);
modalRef.componentInstance.hslArr1 = this.hslArr1; | random_line_split |
app.component.ts | import { Component, OnInit, HostListener, Input } from '@angular/core';
import { FormBuilder, FormGroup, Validators, AbstractControl } from "@angular/forms";
import { hexToHsl, rgbToHsl, hexToRgb } from '../common/color-conversion';
import { MdDialog, MdDialogRef, MdDialogConfig} from "@angular/material";
@Component({
selector: 'app-root',
templateUrl: './app.component.html',
styleUrls: ['./app.component.css']
})
export class AppComponent implements OnInit {
hexForm: FormGroup;
hexValue: AbstractControl;
hexToHsl: Function;
baseHsl: any;
tmpHsl: any;
baseHslArr2: Array<any>;
hslArr1: Array<any>;
row1ShowBtnCopy: Array<any>;
hslArr2: Array<any>;
row2ShowBtnCopy: Array<any>;
hslArr3: Array<any>;
row3ShowBtnCopy: Array<any>;
bodyWidth: number;
toggleColorPicker: boolean;
selectedColor: string;
defaultColorFormat: string;
constructor(private fb: FormBuilder, public modal: MdDialog) {
this.hexToHsl = hexToHsl;
}
ngOnInit(): void {
this.buildForm();
this.tmpHsl = [];
this.baseHsl = '';
this.baseHslArr2 = [];
this.hslArr1 = [];
this.hslArr2 = [];
this.hslArr3 = [];
this.row1ShowBtnCopy = [false, false, false, false, false, false, false, false, false];
this.row2ShowBtnCopy = [false, false, false, false, false, false, false, false, false];
this.row3ShowBtnCopy = [false, false, false, false, false, false, false, false, false];
this.bodyWidth = window.innerWidth || document.body.clientWidth;
this.toggleColorPicker = false;
this.defaultColorFormat = 'HSL';
this.initHslaArrs();
}
buildForm(): void {
this.hexForm = this.fb.group({
'hexValue': ['', [
Validators.pattern(/(^#([A-Fa-f0-9]{6}|[A-Fa-f0-9]{3})$)|(^[A-Fa-f0-9]{6}$)|(^[A-Fa-f0-9]{3}$)/)
]]
});
this.hexValue = this.hexForm.controls['hexValue'];
}
_keyPress(event: any) {
const pattern = /#|[A-Fa-f0-9]/;
let inputChar = String.fromCharCode(event.charCode);
if (event.code === "Enter" && this.hexForm.get('hexValue').value.length > 0) {
this.clearHexVal();
this.calBaseHsl(this.hexForm.get('hexValue').value);
return;
}
if (!pattern.test(inputChar)) {
// invalid character, prevent input
event.preventDefault();
}
}
dealHashStr(str: string): string {
let returnStr = '';
if (str.length > 0) {
if (str.indexOf('#') === -1) {
returnStr = '#' + str;
} else {
returnStr = str;
}
}
return returnStr;
}
@HostListener('window:resize', ['$event'])
onResize(event) {
this.bodyWidth = event.target.innerWidth;
}
calBaseHsl(hexVal): any {
if (hexVal.length === 0) return;
this.clearHexVal();
this.tmpHsl = hexToHsl(this.dealHashStr(hexVal));
this.baseHsl = `hsl(${this.tmpHsl[0]}, ${this.tmpHsl[1]}%, ${this.tmpHsl[2]}%)`;
this.calHslArr2();
}
calHslArr2(): void {
let baseHue = parseInt(this.tmpHsl[0], 10);
let baseSaturation = parseInt(this.tmpHsl[1], 10);
let baseLightness = parseInt(this.tmpHsl[2], 10);
let tmpHue = 0;
for (let i = 0; i < 9; i++) {
tmpHue = (baseHue + 40 * i) % 360;
this.baseHslArr2.push([tmpHue, baseSaturation, baseLightness]);
this.hslArr2.push(`hsl(${tmpHue}, ${baseSaturation}%, ${baseLightness}%)`);
}
this.calHslArr1();
this.calHslArr3();
}
initHslaArrs(): void {
this.clearHexVal();
for (let i = 0; i < 9; i++) {
this.hslArr1.push(`hsla(0, 0%, 85%, 0.${i * 10})`);
this.hslArr2.push(`hsla(0, 0%, 55%, 0.${i * 10})`);
this.hslArr3.push(`hsla(0, 0%, 25%, 0.${i * 10})`);
}
}
calHslArr1(): void {
let tmpHue = 0;
let tmpSaturation = 0;
let tmpLightness = 0;
for (let i = 0; i < 9; i++) {
tmpHue = Math.ceil((this.baseHslArr2[i][0] + 3.6) % 360);
tmpSaturation = this.baseHslArr2[i][1];
if ((this.baseHslArr2[i][1] - 29) >= 0) {
tmpSaturation = (this.baseHslArr2[i][1] - 29);
}
tmpLightness = this.baseHslArr2[i][2];
if ((this.baseHslArr2[i][2] + 3) <= 100) {
tmpLightness = (this.baseHslArr2[i][2] + 3);
}
this.hslArr1.push(`hsl(${tmpHue}, ${tmpSaturation}%, ${tmpLightness}%)`);
}
}
clearHexVal(): void {
this.hslArr2 = [];
this.hslArr1 = [];
this.hslArr3 = [];
this.baseHslArr2 = [];
}
calHslArr3(): void {
let tmpHue = 0;
let tmpSaturation;
let tmpLightness;
for (let i = 0; i < 9; i++) {
tmpHue = Math.ceil((this.baseHslArr2[i][0] - 3.6) % 360);
tmpSaturation = this.baseHslArr2[i][1];
if ((this.baseHslArr2[i][1] + 1) <= 100) {
tmpSaturation = (this.baseHslArr2[i][1] + 1);
}
tmpLightness = this.baseHslArr2[i][2];
if ((this.baseHslArr2[i][2] - 13 ) >= 0) {
tmpLightness = (this.baseHslArr2[i][2] - 13);
}
this.hslArr3.push(`hsl(${tmpHue}, ${tmpSaturation}%, ${tmpLightness}%)`);
}
}
openSnackBar(val) {
// Get the snackbar DIV
let x = document.getElementById("snackbar")
// Add the "show" class to DIV
x.className = "show";
this.selectedColor = val;
// After 2 seconds, remove the show class from DIV
setTimeout(function () {
x.className = x.className.replace("show", "");
}, 2000);
}
openCopyModal() {
let config = new MdDialogConfig();
config.width = `500px`;
config.height = `400px`;
config.disableClose = true;
let modalRef: MdDialogRef<PhotoModalComponent> = this.modal.open(PhotoModalComponent, config);
modalRef.componentInstance.hslArr1 = this.hslArr1;
modalRef.componentInstance.hslArr2 = this.hslArr2;
modalRef.componentInstance.hslArr3 = this.hslArr3;
}
}
@Component({
selector: 'photo-modal',
templateUrl: './photo-modal.html',
styleUrls: ['./app.component.css']
})
export class PhotoModalComponent {
@Input() hslArr1: Array<any>;
@Input() hslArr2: Array<any>;
@Input() hslArr3: Array<any>;
| (public modalRef: MdDialogRef<PhotoModalComponent>) {
}
} | constructor | identifier_name |
app.component.ts | import { Component, OnInit, HostListener, Input } from '@angular/core';
import { FormBuilder, FormGroup, Validators, AbstractControl } from "@angular/forms";
import { hexToHsl, rgbToHsl, hexToRgb } from '../common/color-conversion';
import { MdDialog, MdDialogRef, MdDialogConfig} from "@angular/material";
@Component({
selector: 'app-root',
templateUrl: './app.component.html',
styleUrls: ['./app.component.css']
})
export class AppComponent implements OnInit {
hexForm: FormGroup;
hexValue: AbstractControl;
hexToHsl: Function;
baseHsl: any;
tmpHsl: any;
baseHslArr2: Array<any>;
hslArr1: Array<any>;
row1ShowBtnCopy: Array<any>;
hslArr2: Array<any>;
row2ShowBtnCopy: Array<any>;
hslArr3: Array<any>;
row3ShowBtnCopy: Array<any>;
bodyWidth: number;
toggleColorPicker: boolean;
selectedColor: string;
defaultColorFormat: string;
constructor(private fb: FormBuilder, public modal: MdDialog) {
this.hexToHsl = hexToHsl;
}
ngOnInit(): void {
this.buildForm();
this.tmpHsl = [];
this.baseHsl = '';
this.baseHslArr2 = [];
this.hslArr1 = [];
this.hslArr2 = [];
this.hslArr3 = [];
this.row1ShowBtnCopy = [false, false, false, false, false, false, false, false, false];
this.row2ShowBtnCopy = [false, false, false, false, false, false, false, false, false];
this.row3ShowBtnCopy = [false, false, false, false, false, false, false, false, false];
this.bodyWidth = window.innerWidth || document.body.clientWidth;
this.toggleColorPicker = false;
this.defaultColorFormat = 'HSL';
this.initHslaArrs();
}
buildForm(): void {
this.hexForm = this.fb.group({
'hexValue': ['', [
Validators.pattern(/(^#([A-Fa-f0-9]{6}|[A-Fa-f0-9]{3})$)|(^[A-Fa-f0-9]{6}$)|(^[A-Fa-f0-9]{3}$)/)
]]
});
this.hexValue = this.hexForm.controls['hexValue'];
}
_keyPress(event: any) {
const pattern = /#|[A-Fa-f0-9]/;
let inputChar = String.fromCharCode(event.charCode);
if (event.code === "Enter" && this.hexForm.get('hexValue').value.length > 0) {
this.clearHexVal();
this.calBaseHsl(this.hexForm.get('hexValue').value);
return;
}
if (!pattern.test(inputChar)) {
// invalid character, prevent input
event.preventDefault();
}
}
dealHashStr(str: string): string {
let returnStr = '';
if (str.length > 0) {
if (str.indexOf('#') === -1) {
returnStr = '#' + str;
} else {
returnStr = str;
}
}
return returnStr;
}
@HostListener('window:resize', ['$event'])
onResize(event) {
this.bodyWidth = event.target.innerWidth;
}
calBaseHsl(hexVal): any {
if (hexVal.length === 0) return;
this.clearHexVal();
this.tmpHsl = hexToHsl(this.dealHashStr(hexVal));
this.baseHsl = `hsl(${this.tmpHsl[0]}, ${this.tmpHsl[1]}%, ${this.tmpHsl[2]}%)`;
this.calHslArr2();
}
calHslArr2(): void {
let baseHue = parseInt(this.tmpHsl[0], 10);
let baseSaturation = parseInt(this.tmpHsl[1], 10);
let baseLightness = parseInt(this.tmpHsl[2], 10);
let tmpHue = 0;
for (let i = 0; i < 9; i++) {
tmpHue = (baseHue + 40 * i) % 360;
this.baseHslArr2.push([tmpHue, baseSaturation, baseLightness]);
this.hslArr2.push(`hsl(${tmpHue}, ${baseSaturation}%, ${baseLightness}%)`);
}
this.calHslArr1();
this.calHslArr3();
}
initHslaArrs(): void {
this.clearHexVal();
for (let i = 0; i < 9; i++) {
this.hslArr1.push(`hsla(0, 0%, 85%, 0.${i * 10})`);
this.hslArr2.push(`hsla(0, 0%, 55%, 0.${i * 10})`);
this.hslArr3.push(`hsla(0, 0%, 25%, 0.${i * 10})`);
}
}
calHslArr1(): void {
let tmpHue = 0;
let tmpSaturation = 0;
let tmpLightness = 0;
for (let i = 0; i < 9; i++) {
tmpHue = Math.ceil((this.baseHslArr2[i][0] + 3.6) % 360);
tmpSaturation = this.baseHslArr2[i][1];
if ((this.baseHslArr2[i][1] - 29) >= 0) {
tmpSaturation = (this.baseHslArr2[i][1] - 29);
}
tmpLightness = this.baseHslArr2[i][2];
if ((this.baseHslArr2[i][2] + 3) <= 100) {
tmpLightness = (this.baseHslArr2[i][2] + 3);
}
this.hslArr1.push(`hsl(${tmpHue}, ${tmpSaturation}%, ${tmpLightness}%)`);
}
}
clearHexVal(): void {
this.hslArr2 = [];
this.hslArr1 = [];
this.hslArr3 = [];
this.baseHslArr2 = [];
}
calHslArr3(): void {
let tmpHue = 0;
let tmpSaturation;
let tmpLightness;
for (let i = 0; i < 9; i++) |
}
openSnackBar(val) {
// Get the snackbar DIV
let x = document.getElementById("snackbar")
// Add the "show" class to DIV
x.className = "show";
this.selectedColor = val;
// After 2 seconds, remove the show class from DIV
setTimeout(function () {
x.className = x.className.replace("show", "");
}, 2000);
}
openCopyModal() {
let config = new MdDialogConfig();
config.width = `500px`;
config.height = `400px`;
config.disableClose = true;
let modalRef: MdDialogRef<PhotoModalComponent> = this.modal.open(PhotoModalComponent, config);
modalRef.componentInstance.hslArr1 = this.hslArr1;
modalRef.componentInstance.hslArr2 = this.hslArr2;
modalRef.componentInstance.hslArr3 = this.hslArr3;
}
}
@Component({
selector: 'photo-modal',
templateUrl: './photo-modal.html',
styleUrls: ['./app.component.css']
})
export class PhotoModalComponent {
@Input() hslArr1: Array<any>;
@Input() hslArr2: Array<any>;
@Input() hslArr3: Array<any>;
constructor(public modalRef: MdDialogRef<PhotoModalComponent>) {
}
} | {
tmpHue = Math.ceil((this.baseHslArr2[i][0] - 3.6) % 360);
tmpSaturation = this.baseHslArr2[i][1];
if ((this.baseHslArr2[i][1] + 1) <= 100) {
tmpSaturation = (this.baseHslArr2[i][1] + 1);
}
tmpLightness = this.baseHslArr2[i][2];
if ((this.baseHslArr2[i][2] - 13 ) >= 0) {
tmpLightness = (this.baseHslArr2[i][2] - 13);
}
this.hslArr3.push(`hsl(${tmpHue}, ${tmpSaturation}%, ${tmpLightness}%)`);
} | conditional_block |
app.component.ts | import { Component, OnInit, HostListener, Input } from '@angular/core';
import { FormBuilder, FormGroup, Validators, AbstractControl } from "@angular/forms";
import { hexToHsl, rgbToHsl, hexToRgb } from '../common/color-conversion';
import { MdDialog, MdDialogRef, MdDialogConfig} from "@angular/material";
@Component({
selector: 'app-root',
templateUrl: './app.component.html',
styleUrls: ['./app.component.css']
})
export class AppComponent implements OnInit {
hexForm: FormGroup;
hexValue: AbstractControl;
hexToHsl: Function;
baseHsl: any;
tmpHsl: any;
baseHslArr2: Array<any>;
hslArr1: Array<any>;
row1ShowBtnCopy: Array<any>;
hslArr2: Array<any>;
row2ShowBtnCopy: Array<any>;
hslArr3: Array<any>;
row3ShowBtnCopy: Array<any>;
bodyWidth: number;
toggleColorPicker: boolean;
selectedColor: string;
defaultColorFormat: string;
constructor(private fb: FormBuilder, public modal: MdDialog) |
ngOnInit(): void {
this.buildForm();
this.tmpHsl = [];
this.baseHsl = '';
this.baseHslArr2 = [];
this.hslArr1 = [];
this.hslArr2 = [];
this.hslArr3 = [];
this.row1ShowBtnCopy = [false, false, false, false, false, false, false, false, false];
this.row2ShowBtnCopy = [false, false, false, false, false, false, false, false, false];
this.row3ShowBtnCopy = [false, false, false, false, false, false, false, false, false];
this.bodyWidth = window.innerWidth || document.body.clientWidth;
this.toggleColorPicker = false;
this.defaultColorFormat = 'HSL';
this.initHslaArrs();
}
buildForm(): void {
this.hexForm = this.fb.group({
'hexValue': ['', [
Validators.pattern(/(^#([A-Fa-f0-9]{6}|[A-Fa-f0-9]{3})$)|(^[A-Fa-f0-9]{6}$)|(^[A-Fa-f0-9]{3}$)/)
]]
});
this.hexValue = this.hexForm.controls['hexValue'];
}
_keyPress(event: any) {
const pattern = /#|[A-Fa-f0-9]/;
let inputChar = String.fromCharCode(event.charCode);
if (event.code === "Enter" && this.hexForm.get('hexValue').value.length > 0) {
this.clearHexVal();
this.calBaseHsl(this.hexForm.get('hexValue').value);
return;
}
if (!pattern.test(inputChar)) {
// invalid character, prevent input
event.preventDefault();
}
}
dealHashStr(str: string): string {
let returnStr = '';
if (str.length > 0) {
if (str.indexOf('#') === -1) {
returnStr = '#' + str;
} else {
returnStr = str;
}
}
return returnStr;
}
@HostListener('window:resize', ['$event'])
onResize(event) {
this.bodyWidth = event.target.innerWidth;
}
calBaseHsl(hexVal): any {
if (hexVal.length === 0) return;
this.clearHexVal();
this.tmpHsl = hexToHsl(this.dealHashStr(hexVal));
this.baseHsl = `hsl(${this.tmpHsl[0]}, ${this.tmpHsl[1]}%, ${this.tmpHsl[2]}%)`;
this.calHslArr2();
}
calHslArr2(): void {
let baseHue = parseInt(this.tmpHsl[0], 10);
let baseSaturation = parseInt(this.tmpHsl[1], 10);
let baseLightness = parseInt(this.tmpHsl[2], 10);
let tmpHue = 0;
for (let i = 0; i < 9; i++) {
tmpHue = (baseHue + 40 * i) % 360;
this.baseHslArr2.push([tmpHue, baseSaturation, baseLightness]);
this.hslArr2.push(`hsl(${tmpHue}, ${baseSaturation}%, ${baseLightness}%)`);
}
this.calHslArr1();
this.calHslArr3();
}
initHslaArrs(): void {
this.clearHexVal();
for (let i = 0; i < 9; i++) {
this.hslArr1.push(`hsla(0, 0%, 85%, 0.${i * 10})`);
this.hslArr2.push(`hsla(0, 0%, 55%, 0.${i * 10})`);
this.hslArr3.push(`hsla(0, 0%, 25%, 0.${i * 10})`);
}
}
calHslArr1(): void {
let tmpHue = 0;
let tmpSaturation = 0;
let tmpLightness = 0;
for (let i = 0; i < 9; i++) {
tmpHue = Math.ceil((this.baseHslArr2[i][0] + 3.6) % 360);
tmpSaturation = this.baseHslArr2[i][1];
if ((this.baseHslArr2[i][1] - 29) >= 0) {
tmpSaturation = (this.baseHslArr2[i][1] - 29);
}
tmpLightness = this.baseHslArr2[i][2];
if ((this.baseHslArr2[i][2] + 3) <= 100) {
tmpLightness = (this.baseHslArr2[i][2] + 3);
}
this.hslArr1.push(`hsl(${tmpHue}, ${tmpSaturation}%, ${tmpLightness}%)`);
}
}
clearHexVal(): void {
this.hslArr2 = [];
this.hslArr1 = [];
this.hslArr3 = [];
this.baseHslArr2 = [];
}
calHslArr3(): void {
let tmpHue = 0;
let tmpSaturation;
let tmpLightness;
for (let i = 0; i < 9; i++) {
tmpHue = Math.ceil((this.baseHslArr2[i][0] - 3.6) % 360);
tmpSaturation = this.baseHslArr2[i][1];
if ((this.baseHslArr2[i][1] + 1) <= 100) {
tmpSaturation = (this.baseHslArr2[i][1] + 1);
}
tmpLightness = this.baseHslArr2[i][2];
if ((this.baseHslArr2[i][2] - 13 ) >= 0) {
tmpLightness = (this.baseHslArr2[i][2] - 13);
}
this.hslArr3.push(`hsl(${tmpHue}, ${tmpSaturation}%, ${tmpLightness}%)`);
}
}
openSnackBar(val) {
// Get the snackbar DIV
let x = document.getElementById("snackbar")
// Add the "show" class to DIV
x.className = "show";
this.selectedColor = val;
// After 2 seconds, remove the show class from DIV
setTimeout(function () {
x.className = x.className.replace("show", "");
}, 2000);
}
openCopyModal() {
let config = new MdDialogConfig();
config.width = `500px`;
config.height = `400px`;
config.disableClose = true;
let modalRef: MdDialogRef<PhotoModalComponent> = this.modal.open(PhotoModalComponent, config);
modalRef.componentInstance.hslArr1 = this.hslArr1;
modalRef.componentInstance.hslArr2 = this.hslArr2;
modalRef.componentInstance.hslArr3 = this.hslArr3;
}
}
@Component({
selector: 'photo-modal',
templateUrl: './photo-modal.html',
styleUrls: ['./app.component.css']
})
export class PhotoModalComponent {
@Input() hslArr1: Array<any>;
@Input() hslArr2: Array<any>;
@Input() hslArr3: Array<any>;
constructor(public modalRef: MdDialogRef<PhotoModalComponent>) {
}
} | {
this.hexToHsl = hexToHsl;
} | identifier_body |
placement-in-syntax.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(dead_code, unused_variables)]
#![feature(box_heap)]
#![feature(placement_in_syntax)] | //
// Compare with new-box-syntax.rs
use std::boxed::{Box, HEAP};
struct Structure {
x: isize,
y: isize,
}
pub fn main() {
let x: Box<isize> = in HEAP { 2 };
let b: Box<isize> = in HEAP { 1 + 2 };
let c = in HEAP { 3 + 4 };
let s: Box<Structure> = in HEAP {
Structure {
x: 3,
y: 4,
}
};
} |
// Tests that the new `in` syntax works with unique pointers. | random_line_split |
placement-in-syntax.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(dead_code, unused_variables)]
#![feature(box_heap)]
#![feature(placement_in_syntax)]
// Tests that the new `in` syntax works with unique pointers.
//
// Compare with new-box-syntax.rs
use std::boxed::{Box, HEAP};
struct Structure {
x: isize,
y: isize,
}
pub fn | () {
let x: Box<isize> = in HEAP { 2 };
let b: Box<isize> = in HEAP { 1 + 2 };
let c = in HEAP { 3 + 4 };
let s: Box<Structure> = in HEAP {
Structure {
x: 3,
y: 4,
}
};
}
| main | identifier_name |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.