file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
12.1k
| suffix
large_stringlengths 0
12k
| middle
large_stringlengths 0
7.51k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
test_lms_matlab_problem.py | # -*- coding: utf-8 -*-
"""
Test for matlab problems
"""
import time
from ...pages.lms.matlab_problem import MatlabProblemPage
from ...fixtures.course import XBlockFixtureDesc
from ...fixtures.xqueue import XQueueResponseFixture
from .test_lms_problems import ProblemsTest
from textwrap import dedent
class MatlabProblemTest(ProblemsTest):
"""
Tests that verify matlab problem "Run Code".
"""
def get_problem(self):
"""
Create a matlab problem for the test.
"""
problem_data = dedent("""
<problem markdown="null">
<text>
<p>
Write MATLAB code to create the following row vector and store it in a variable named <code>V</code>.
</p>
<table id="a0000000466" class="equation" width="100%" cellspacing="0" cellpadding="7" style="table-layout:auto">
<tr>
<td class="equation">[1 1 2 3 5 8 13]</td>
</tr>
</table>
<p>
<coderesponse queuename="matlab">
<matlabinput rows="10" cols="40" mode="" tabsize="4">
<plot_payload>
</plot_payload>
</matlabinput>
<codeparam>
<initial_display/>
<answer_display>
</answer_display>
<grader_payload>
</grader_payload>
</codeparam>
</coderesponse>
</p>
</text>
</problem>
""")
return XBlockFixtureDesc('problem', 'Test Matlab Problem', data=problem_data)
def _goto_matlab_problem_page(self):
"""
Open matlab problem page with assertion.
"""
self.courseware_page.visit()
matlab_problem_page = MatlabProblemPage(self.browser)
self.assertEqual(matlab_problem_page.problem_name, 'TEST MATLAB PROBLEM')
return matlab_problem_page
def test_run_code(self):
"""
Test "Run Code" button functionality.
"""
# Enter a submission, which will trigger a pre-defined response from the XQueue stub.
self.submission = "a=1" + self.unique_id[0:5]
self.xqueue_grade_response = {'msg': self.submission}
matlab_problem_page = self._goto_matlab_problem_page()
# Configure the XQueue stub's response for the text we will submit
if self.xqueue_grade_response is not None:
XQueueResponseFixture(self.submission, self.xqueue_grade_response).install()
matlab_problem_page.set_response(self.submission)
matlab_problem_page.click_run_code()
self.assertEqual(
u'Submitted. As soon as a response is returned, this message will be replaced by that feedback.',
matlab_problem_page.get_grader_msg(".external-grader-message")[0]
)
# Wait 5 seconds for xqueue stub server grader response sent back to lms.
time.sleep(5)
self.assertEqual(u'', matlab_problem_page.get_grader_msg(".external-grader-message")[0])
self.assertEqual(
self.xqueue_grade_response.get("msg"),
matlab_problem_page.get_grader_msg(".ungraded-matlab-result")[0] | ) | random_line_split |
|
test_lms_matlab_problem.py | # -*- coding: utf-8 -*-
"""
Test for matlab problems
"""
import time
from ...pages.lms.matlab_problem import MatlabProblemPage
from ...fixtures.course import XBlockFixtureDesc
from ...fixtures.xqueue import XQueueResponseFixture
from .test_lms_problems import ProblemsTest
from textwrap import dedent
class MatlabProblemTest(ProblemsTest):
"""
Tests that verify matlab problem "Run Code".
"""
def get_problem(self):
| <codeparam>
<initial_display/>
<answer_display>
</answer_display>
<grader_payload>
</grader_payload>
</codeparam>
</coderesponse>
</p>
</text>
</problem>
""")
return XBlockFixtureDesc('problem', 'Test Matlab Problem', data=problem_data)
def _goto_matlab_problem_page(self):
"""
Open matlab problem page with assertion.
"""
self.courseware_page.visit()
matlab_problem_page = MatlabProblemPage(self.browser)
self.assertEqual(matlab_problem_page.problem_name, 'TEST MATLAB PROBLEM')
return matlab_problem_page
def test_run_code(self):
"""
Test "Run Code" button functionality.
"""
# Enter a submission, which will trigger a pre-defined response from the XQueue stub.
self.submission = "a=1" + self.unique_id[0:5]
self.xqueue_grade_response = {'msg': self.submission}
matlab_problem_page = self._goto_matlab_problem_page()
# Configure the XQueue stub's response for the text we will submit
if self.xqueue_grade_response is not None:
XQueueResponseFixture(self.submission, self.xqueue_grade_response).install()
matlab_problem_page.set_response(self.submission)
matlab_problem_page.click_run_code()
self.assertEqual(
u'Submitted. As soon as a response is returned, this message will be replaced by that feedback.',
matlab_problem_page.get_grader_msg(".external-grader-message")[0]
)
# Wait 5 seconds for xqueue stub server grader response sent back to lms.
time.sleep(5)
self.assertEqual(u'', matlab_problem_page.get_grader_msg(".external-grader-message")[0])
self.assertEqual(
self.xqueue_grade_response.get("msg"),
matlab_problem_page.get_grader_msg(".ungraded-matlab-result")[0]
)
| """
Create a matlab problem for the test.
"""
problem_data = dedent("""
<problem markdown="null">
<text>
<p>
Write MATLAB code to create the following row vector and store it in a variable named <code>V</code>.
</p>
<table id="a0000000466" class="equation" width="100%" cellspacing="0" cellpadding="7" style="table-layout:auto">
<tr>
<td class="equation">[1 1 2 3 5 8 13]</td>
</tr>
</table>
<p>
<coderesponse queuename="matlab">
<matlabinput rows="10" cols="40" mode="" tabsize="4">
<plot_payload>
</plot_payload>
</matlabinput> | identifier_body |
cleanup_sst.rs | // Copyright 2018 TiKV Project Authors. Licensed under Apache-2.0.
use std::fmt;
use std::sync::Arc;
use kvproto::import_sstpb::SstMeta;
use crate::store::util::is_epoch_stale;
use crate::store::{StoreMsg, StoreRouter};
use engine_traits::KvEngine;
use pd_client::PdClient;
use sst_importer::SSTImporter;
use std::marker::PhantomData;
use tikv_util::worker::Runnable;
pub enum Task {
DeleteSST { ssts: Vec<SstMeta> },
ValidateSST { ssts: Vec<SstMeta> },
}
impl fmt::Display for Task {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
Task::DeleteSST { ref ssts } => write!(f, "Delete {} ssts", ssts.len()),
Task::ValidateSST { ref ssts } => write!(f, "Validate {} ssts", ssts.len()),
}
}
}
pub struct Runner<EK, C, S>
where
EK: KvEngine,
S: StoreRouter<EK>,
{
store_id: u64,
store_router: S,
importer: Arc<SSTImporter>,
pd_client: Arc<C>,
_engine: PhantomData<EK>,
}
impl<EK, C, S> Runner<EK, C, S>
where
EK: KvEngine,
C: PdClient,
S: StoreRouter<EK>,
{
pub fn | (
store_id: u64,
store_router: S,
importer: Arc<SSTImporter>,
pd_client: Arc<C>,
) -> Runner<EK, C, S> {
Runner {
store_id,
store_router,
importer,
pd_client,
_engine: PhantomData,
}
}
/// Deletes SST files from the importer.
fn handle_delete_sst(&self, ssts: Vec<SstMeta>) {
for sst in &ssts {
let _ = self.importer.delete(sst);
}
}
/// Validates whether the SST is stale or not.
fn handle_validate_sst(&self, ssts: Vec<SstMeta>) {
let store_id = self.store_id;
let mut invalid_ssts = Vec::new();
for sst in ssts {
match self.pd_client.get_region(sst.get_range().get_start()) {
Ok(r) => {
// The region id may or may not be the same as the
// SST file, but it doesn't matter, because the
// epoch of a range will not decrease anyway.
if is_epoch_stale(r.get_region_epoch(), sst.get_region_epoch()) {
// Region has not been updated.
continue;
}
if r.get_id() == sst.get_region_id()
&& r.get_peers().iter().any(|p| p.get_store_id() == store_id)
{
// The SST still belongs to this store.
continue;
}
invalid_ssts.push(sst);
}
Err(e) => {
error!(%e; "get region failed");
}
}
}
// We need to send back the result to check for the stale
// peer, which may ingest the stale SST before it is
// destroyed.
let msg = StoreMsg::ValidateSSTResult { invalid_ssts };
if let Err(e) = self.store_router.send(msg) {
error!(%e; "send validate sst result failed");
}
}
}
impl<EK, C, S> Runnable for Runner<EK, C, S>
where
EK: KvEngine,
C: PdClient,
S: StoreRouter<EK>,
{
type Task = Task;
fn run(&mut self, task: Task) {
match task {
Task::DeleteSST { ssts } => {
self.handle_delete_sst(ssts);
}
Task::ValidateSST { ssts } => {
self.handle_validate_sst(ssts);
}
}
}
}
| new | identifier_name |
cleanup_sst.rs | // Copyright 2018 TiKV Project Authors. Licensed under Apache-2.0.
use std::fmt;
use std::sync::Arc;
use kvproto::import_sstpb::SstMeta;
use crate::store::util::is_epoch_stale;
use crate::store::{StoreMsg, StoreRouter};
use engine_traits::KvEngine;
use pd_client::PdClient;
use sst_importer::SSTImporter;
use std::marker::PhantomData;
use tikv_util::worker::Runnable;
pub enum Task {
DeleteSST { ssts: Vec<SstMeta> },
ValidateSST { ssts: Vec<SstMeta> },
}
impl fmt::Display for Task {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
Task::DeleteSST { ref ssts } => write!(f, "Delete {} ssts", ssts.len()),
Task::ValidateSST { ref ssts } => write!(f, "Validate {} ssts", ssts.len()),
}
}
}
pub struct Runner<EK, C, S>
where
EK: KvEngine,
S: StoreRouter<EK>,
{
store_id: u64,
store_router: S,
importer: Arc<SSTImporter>,
pd_client: Arc<C>,
_engine: PhantomData<EK>,
}
impl<EK, C, S> Runner<EK, C, S>
where
EK: KvEngine,
C: PdClient,
S: StoreRouter<EK>,
{
pub fn new(
store_id: u64,
store_router: S,
importer: Arc<SSTImporter>,
pd_client: Arc<C>,
) -> Runner<EK, C, S> {
Runner {
store_id,
store_router,
importer,
pd_client,
_engine: PhantomData,
}
}
/// Deletes SST files from the importer.
fn handle_delete_sst(&self, ssts: Vec<SstMeta>) {
for sst in &ssts {
let _ = self.importer.delete(sst);
}
}
/// Validates whether the SST is stale or not.
fn handle_validate_sst(&self, ssts: Vec<SstMeta>) | }
Err(e) => {
error!(%e; "get region failed");
}
}
}
// We need to send back the result to check for the stale
// peer, which may ingest the stale SST before it is
// destroyed.
let msg = StoreMsg::ValidateSSTResult { invalid_ssts };
if let Err(e) = self.store_router.send(msg) {
error!(%e; "send validate sst result failed");
}
}
}
impl<EK, C, S> Runnable for Runner<EK, C, S>
where
EK: KvEngine,
C: PdClient,
S: StoreRouter<EK>,
{
type Task = Task;
fn run(&mut self, task: Task) {
match task {
Task::DeleteSST { ssts } => {
self.handle_delete_sst(ssts);
}
Task::ValidateSST { ssts } => {
self.handle_validate_sst(ssts);
}
}
}
}
| {
let store_id = self.store_id;
let mut invalid_ssts = Vec::new();
for sst in ssts {
match self.pd_client.get_region(sst.get_range().get_start()) {
Ok(r) => {
// The region id may or may not be the same as the
// SST file, but it doesn't matter, because the
// epoch of a range will not decrease anyway.
if is_epoch_stale(r.get_region_epoch(), sst.get_region_epoch()) {
// Region has not been updated.
continue;
}
if r.get_id() == sst.get_region_id()
&& r.get_peers().iter().any(|p| p.get_store_id() == store_id)
{
// The SST still belongs to this store.
continue;
}
invalid_ssts.push(sst); | identifier_body |
cleanup_sst.rs | // Copyright 2018 TiKV Project Authors. Licensed under Apache-2.0.
use std::fmt;
use std::sync::Arc;
use kvproto::import_sstpb::SstMeta;
use crate::store::util::is_epoch_stale;
use crate::store::{StoreMsg, StoreRouter};
use engine_traits::KvEngine;
use pd_client::PdClient;
use sst_importer::SSTImporter;
use std::marker::PhantomData;
use tikv_util::worker::Runnable;
pub enum Task {
DeleteSST { ssts: Vec<SstMeta> },
ValidateSST { ssts: Vec<SstMeta> },
}
impl fmt::Display for Task {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
Task::DeleteSST { ref ssts } => write!(f, "Delete {} ssts", ssts.len()),
Task::ValidateSST { ref ssts } => write!(f, "Validate {} ssts", ssts.len()),
}
}
}
pub struct Runner<EK, C, S>
where
EK: KvEngine,
S: StoreRouter<EK>,
{
store_id: u64,
store_router: S,
importer: Arc<SSTImporter>,
pd_client: Arc<C>,
_engine: PhantomData<EK>,
}
impl<EK, C, S> Runner<EK, C, S>
where
EK: KvEngine,
C: PdClient,
S: StoreRouter<EK>,
{
pub fn new(
store_id: u64,
store_router: S,
importer: Arc<SSTImporter>,
pd_client: Arc<C>,
) -> Runner<EK, C, S> {
Runner {
store_id,
store_router,
importer,
pd_client,
_engine: PhantomData,
}
}
/// Deletes SST files from the importer.
fn handle_delete_sst(&self, ssts: Vec<SstMeta>) {
for sst in &ssts {
let _ = self.importer.delete(sst);
}
}
/// Validates whether the SST is stale or not. | let mut invalid_ssts = Vec::new();
for sst in ssts {
match self.pd_client.get_region(sst.get_range().get_start()) {
Ok(r) => {
// The region id may or may not be the same as the
// SST file, but it doesn't matter, because the
// epoch of a range will not decrease anyway.
if is_epoch_stale(r.get_region_epoch(), sst.get_region_epoch()) {
// Region has not been updated.
continue;
}
if r.get_id() == sst.get_region_id()
&& r.get_peers().iter().any(|p| p.get_store_id() == store_id)
{
// The SST still belongs to this store.
continue;
}
invalid_ssts.push(sst);
}
Err(e) => {
error!(%e; "get region failed");
}
}
}
// We need to send back the result to check for the stale
// peer, which may ingest the stale SST before it is
// destroyed.
let msg = StoreMsg::ValidateSSTResult { invalid_ssts };
if let Err(e) = self.store_router.send(msg) {
error!(%e; "send validate sst result failed");
}
}
}
impl<EK, C, S> Runnable for Runner<EK, C, S>
where
EK: KvEngine,
C: PdClient,
S: StoreRouter<EK>,
{
type Task = Task;
fn run(&mut self, task: Task) {
match task {
Task::DeleteSST { ssts } => {
self.handle_delete_sst(ssts);
}
Task::ValidateSST { ssts } => {
self.handle_validate_sst(ssts);
}
}
}
} | fn handle_validate_sst(&self, ssts: Vec<SstMeta>) {
let store_id = self.store_id; | random_line_split |
cleanup_sst.rs | // Copyright 2018 TiKV Project Authors. Licensed under Apache-2.0.
use std::fmt;
use std::sync::Arc;
use kvproto::import_sstpb::SstMeta;
use crate::store::util::is_epoch_stale;
use crate::store::{StoreMsg, StoreRouter};
use engine_traits::KvEngine;
use pd_client::PdClient;
use sst_importer::SSTImporter;
use std::marker::PhantomData;
use tikv_util::worker::Runnable;
pub enum Task {
DeleteSST { ssts: Vec<SstMeta> },
ValidateSST { ssts: Vec<SstMeta> },
}
impl fmt::Display for Task {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
Task::DeleteSST { ref ssts } => write!(f, "Delete {} ssts", ssts.len()),
Task::ValidateSST { ref ssts } => write!(f, "Validate {} ssts", ssts.len()),
}
}
}
pub struct Runner<EK, C, S>
where
EK: KvEngine,
S: StoreRouter<EK>,
{
store_id: u64,
store_router: S,
importer: Arc<SSTImporter>,
pd_client: Arc<C>,
_engine: PhantomData<EK>,
}
impl<EK, C, S> Runner<EK, C, S>
where
EK: KvEngine,
C: PdClient,
S: StoreRouter<EK>,
{
pub fn new(
store_id: u64,
store_router: S,
importer: Arc<SSTImporter>,
pd_client: Arc<C>,
) -> Runner<EK, C, S> {
Runner {
store_id,
store_router,
importer,
pd_client,
_engine: PhantomData,
}
}
/// Deletes SST files from the importer.
fn handle_delete_sst(&self, ssts: Vec<SstMeta>) {
for sst in &ssts {
let _ = self.importer.delete(sst);
}
}
/// Validates whether the SST is stale or not.
fn handle_validate_sst(&self, ssts: Vec<SstMeta>) {
let store_id = self.store_id;
let mut invalid_ssts = Vec::new();
for sst in ssts {
match self.pd_client.get_region(sst.get_range().get_start()) {
Ok(r) => {
// The region id may or may not be the same as the
// SST file, but it doesn't matter, because the
// epoch of a range will not decrease anyway.
if is_epoch_stale(r.get_region_epoch(), sst.get_region_epoch()) {
// Region has not been updated.
continue;
}
if r.get_id() == sst.get_region_id()
&& r.get_peers().iter().any(|p| p.get_store_id() == store_id)
{
// The SST still belongs to this store.
continue;
}
invalid_ssts.push(sst);
}
Err(e) => |
}
}
// We need to send back the result to check for the stale
// peer, which may ingest the stale SST before it is
// destroyed.
let msg = StoreMsg::ValidateSSTResult { invalid_ssts };
if let Err(e) = self.store_router.send(msg) {
error!(%e; "send validate sst result failed");
}
}
}
impl<EK, C, S> Runnable for Runner<EK, C, S>
where
EK: KvEngine,
C: PdClient,
S: StoreRouter<EK>,
{
type Task = Task;
fn run(&mut self, task: Task) {
match task {
Task::DeleteSST { ssts } => {
self.handle_delete_sst(ssts);
}
Task::ValidateSST { ssts } => {
self.handle_validate_sst(ssts);
}
}
}
}
| {
error!(%e; "get region failed");
} | conditional_block |
protocol.py | _dividends. They are guaranteed to be unique
integers with the context of a single simulation. If @data is non-empty, a
id is required to identify the historical dividend associated with this
payment.
Additionally, if @data is non-empty, either data['cash_amount'] should be
nonzero or data['payment_sid'] should be a security identifier and
data['share_count'] should be nonzero.
The returned Series is given its id value as a name so that concatenating
payments results in a DataFrame indexed by id. (Note, however, that the
name value is not used to construct an index when this series is returned
by function passed to `DataFrame.apply`. In such a case, pandas preserves
the index of the DataFrame on which `apply` is being called.)
"""
return pd.Series(
data=data,
name=data['id'] if data is not None else None,
index=DIVIDEND_PAYMENT_FIELDS,
dtype=object,
)
class Event(object):
def __init__(self, initial_values=None):
if initial_values:
self.__dict__ = initial_values
def __getitem__(self, name):
return getattr(self, name)
def __setitem__(self, name, value):
setattr(self, name, value)
def __delitem__(self, name):
delattr(self, name)
def keys(self):
return self.__dict__.keys()
def __eq__(self, other):
return hasattr(other, '__dict__') and self.__dict__ == other.__dict__
def __contains__(self, name):
return name in self.__dict__
def __repr__(self):
return "Event({0})".format(self.__dict__)
def to_series(self, index=None):
return pd.Series(self.__dict__, index=index)
class Order(Event):
pass
class Portfolio(object):
def __init__(self):
self.capital_used = 0.0
self.starting_cash = 0.0
self.portfolio_value = 0.0
self.pnl = 0.0
self.returns = 0.0
self.cash = 0.0
self.positions = Positions()
self.start_date = None
self.positions_value = 0.0
def __getitem__(self, key):
return self.__dict__[key]
def __repr__(self):
return "Portfolio({0})".format(self.__dict__)
class Account(object):
'''
The account object tracks information about the trading account. The
values are updated as the algorithm runs and its keys remain unchanged.
If connected to a broker, one can update these values with the trading
account values as reported by the broker.
'''
def __init__(self):
self.settled_cash = 0.0
self.accrued_interest = 0.0
self.buying_power = float('inf')
self.equity_with_loan = 0.0
self.total_positions_value = 0.0
self.regt_equity = 0.0
self.regt_margin = float('inf')
self.initial_margin_requirement = 0.0
self.maintenance_margin_requirement = 0.0
self.available_funds = 0.0
self.excess_liquidity = 0.0
self.cushion = 0.0
self.day_trades_remaining = float('inf')
self.leverage = 0.0
self.net_liquidation = 0.0
def __getitem__(self, key):
return self.__dict__[key]
def __repr__(self):
return "Account({0})".format(self.__dict__)
def _get_state(self):
return 'Account', self.__dict__
def _set_state(self, saved_state):
self.__dict__.update(saved_state)
class Position(object):
def __init__(self, sid):
self.sid = sid
self.amount = 0
self.cost_basis = 0.0 # per share
self.last_sale_price = 0.0
def __getitem__(self, key):
return self.__dict__[key]
def __repr__(self):
return "Position({0})".format(self.__dict__)
class Positions(dict):
def __missing__(self, key):
pos = Position(key)
self[key] = pos
return pos
class SIDData(object):
# Cache some data on the class so that this is shared for all instances of
# siddata.
# The dt where we cached the history.
_history_cache_dt = None
# _history_cache is a a dict mapping fields to pd.DataFrames. This is the
# most data we have for a given field for the _history_cache_dt.
_history_cache = {}
# This is the cache that is used for returns. This will have a different
# structure than the other history cache as this is always daily.
_returns_cache_dt = None
_returns_cache = None
# The last dt that we needed to cache the number of minutes.
_minute_bar_cache_dt = None
# If we are in minute mode, there is some cost associated with computing
# the number of minutes that we need to pass to the bar count of history.
# This will remain constant for a given bar and day count.
# This maps days to number of minutes.
_minute_bar_cache = {}
def __init__(self, sid, initial_values=None):
self._sid = sid
self._freqstr = None
# To check if we have data, we use the __len__ which depends on the
# __dict__. Because we are foward defining the attributes needed, we
# need to account for their entrys in the __dict__.
# We will add 1 because we need to account for the _initial_len entry
# itself.
self._initial_len = len(self.__dict__) + 1
| def datetime(self):
"""
Provides an alias from data['foo'].datetime -> data['foo'].dt
`datetime` was previously provided by adding a seperate `datetime`
member of the SIDData object via a generator that wrapped the incoming
data feed and added the field to each equity event.
This alias is intended to be temporary, to provide backwards
compatibility with existing algorithms, but should be considered
deprecated, and may be removed in the future.
"""
return self.dt
def get(self, name, default=None):
return self.__dict__.get(name, default)
def __getitem__(self, name):
return self.__dict__[name]
def __setitem__(self, name, value):
self.__dict__[name] = value
def __len__(self):
return len(self.__dict__) - self._initial_len
def __contains__(self, name):
return name in self.__dict__
def __repr__(self):
return "SIDData({0})".format(self.__dict__)
def _get_buffer(self, bars, field='price'):
"""
Gets the result of history for the given number of bars and field.
This will cache the results internally.
"""
cls = self.__class__
algo = get_algo_instance()
now = algo.datetime
if now != cls._history_cache_dt:
# For a given dt, the history call for this field will not change.
# We have a new dt, so we should reset the cache.
cls._history_cache_dt = now
cls._history_cache = {}
if field not in self._history_cache \
or bars > len(cls._history_cache[field].index):
# If we have never cached this field OR the amount of bars that we
# need for this field is greater than the amount we have cached,
# then we need to get more history.
hst = algo.history(
bars, self._freqstr, field, ffill=True,
)
# Assert that the column holds ints, not security objects.
if not isinstance(self._sid, str):
hst.columns = hst.columns.astype(int)
self._history_cache[field] = hst
# Slice of only the bars needed. This is because we strore the LARGEST
# amount of history for the field, and we might request less than the
# largest from the cache.
return cls._history_cache[field][self._sid][-bars:]
def _get_bars(self, days):
"""
Gets the number of bars needed for the current number of days.
Figures this out based on the algo datafrequency and caches the result.
This caches the result by replacing this function on the object.
This means that after the first call to _get_bars, this method will
point to a new function object.
"""
def daily_get_bars(days):
return days
@with_environment()
def minute_get_bars(days, env=None):
cls = self.__class__
now = get_algo_instance().datetime
if now != cls._minute_bar_cache_dt:
cls._minute_bar_cache_dt = now
cls._minute_bar_cache = {}
if days not in cls._minute_bar_cache:
# Cache this calculation to happen once per bar, even if we
# use another transform with the | if initial_values:
self.__dict__.update(initial_values)
@property | random_line_split |
protocol.py | = 0.0
self.buying_power = float('inf')
self.equity_with_loan = 0.0
self.total_positions_value = 0.0
self.regt_equity = 0.0
self.regt_margin = float('inf')
self.initial_margin_requirement = 0.0
self.maintenance_margin_requirement = 0.0
self.available_funds = 0.0
self.excess_liquidity = 0.0
self.cushion = 0.0
self.day_trades_remaining = float('inf')
self.leverage = 0.0
self.net_liquidation = 0.0
def __getitem__(self, key):
return self.__dict__[key]
def __repr__(self):
return "Account({0})".format(self.__dict__)
def _get_state(self):
return 'Account', self.__dict__
def _set_state(self, saved_state):
self.__dict__.update(saved_state)
class Position(object):
def __init__(self, sid):
self.sid = sid
self.amount = 0
self.cost_basis = 0.0 # per share
self.last_sale_price = 0.0
def __getitem__(self, key):
return self.__dict__[key]
def __repr__(self):
return "Position({0})".format(self.__dict__)
class Positions(dict):
def __missing__(self, key):
pos = Position(key)
self[key] = pos
return pos
class SIDData(object):
# Cache some data on the class so that this is shared for all instances of
# siddata.
# The dt where we cached the history.
_history_cache_dt = None
# _history_cache is a a dict mapping fields to pd.DataFrames. This is the
# most data we have for a given field for the _history_cache_dt.
_history_cache = {}
# This is the cache that is used for returns. This will have a different
# structure than the other history cache as this is always daily.
_returns_cache_dt = None
_returns_cache = None
# The last dt that we needed to cache the number of minutes.
_minute_bar_cache_dt = None
# If we are in minute mode, there is some cost associated with computing
# the number of minutes that we need to pass to the bar count of history.
# This will remain constant for a given bar and day count.
# This maps days to number of minutes.
_minute_bar_cache = {}
def __init__(self, sid, initial_values=None):
self._sid = sid
self._freqstr = None
# To check if we have data, we use the __len__ which depends on the
# __dict__. Because we are foward defining the attributes needed, we
# need to account for their entrys in the __dict__.
# We will add 1 because we need to account for the _initial_len entry
# itself.
self._initial_len = len(self.__dict__) + 1
if initial_values:
self.__dict__.update(initial_values)
@property
def datetime(self):
"""
Provides an alias from data['foo'].datetime -> data['foo'].dt
`datetime` was previously provided by adding a seperate `datetime`
member of the SIDData object via a generator that wrapped the incoming
data feed and added the field to each equity event.
This alias is intended to be temporary, to provide backwards
compatibility with existing algorithms, but should be considered
deprecated, and may be removed in the future.
"""
return self.dt
def get(self, name, default=None):
return self.__dict__.get(name, default)
def __getitem__(self, name):
return self.__dict__[name]
def __setitem__(self, name, value):
self.__dict__[name] = value
def __len__(self):
return len(self.__dict__) - self._initial_len
def __contains__(self, name):
return name in self.__dict__
def __repr__(self):
return "SIDData({0})".format(self.__dict__)
def _get_buffer(self, bars, field='price'):
"""
Gets the result of history for the given number of bars and field.
This will cache the results internally.
"""
cls = self.__class__
algo = get_algo_instance()
now = algo.datetime
if now != cls._history_cache_dt:
# For a given dt, the history call for this field will not change.
# We have a new dt, so we should reset the cache.
cls._history_cache_dt = now
cls._history_cache = {}
if field not in self._history_cache \
or bars > len(cls._history_cache[field].index):
# If we have never cached this field OR the amount of bars that we
# need for this field is greater than the amount we have cached,
# then we need to get more history.
hst = algo.history(
bars, self._freqstr, field, ffill=True,
)
# Assert that the column holds ints, not security objects.
if not isinstance(self._sid, str):
hst.columns = hst.columns.astype(int)
self._history_cache[field] = hst
# Slice of only the bars needed. This is because we strore the LARGEST
# amount of history for the field, and we might request less than the
# largest from the cache.
return cls._history_cache[field][self._sid][-bars:]
def _get_bars(self, days):
"""
Gets the number of bars needed for the current number of days.
Figures this out based on the algo datafrequency and caches the result.
This caches the result by replacing this function on the object.
This means that after the first call to _get_bars, this method will
point to a new function object.
"""
def daily_get_bars(days):
return days
@with_environment()
def minute_get_bars(days, env=None):
cls = self.__class__
now = get_algo_instance().datetime
if now != cls._minute_bar_cache_dt:
cls._minute_bar_cache_dt = now
cls._minute_bar_cache = {}
if days not in cls._minute_bar_cache:
# Cache this calculation to happen once per bar, even if we
# use another transform with the same number of days.
prev = env.previous_trading_day(now)
ds = env.days_in_range(
env.add_trading_days(-days + 2, prev),
prev,
)
# compute the number of minutes in the (days - 1) days before
# today.
# 210 minutes in a an early close and 390 in a full day.
ms = sum(210 if d in env.early_closes else 390 for d in ds)
# Add the number of minutes for today.
ms += int(
(now - env.get_open_and_close(now)[0]).total_seconds() / 60
)
cls._minute_bar_cache[days] = ms + 1 # Account for this minute
return cls._minute_bar_cache[days]
if get_algo_instance().sim_params.data_frequency == 'daily':
self._freqstr = '1d'
# update this method to point to the daily variant.
self._get_bars = daily_get_bars
else:
self._freqstr = '1m'
# update this method to point to the minute variant.
self._get_bars = minute_get_bars
# Not actually recursive because we have already cached the new method.
return self._get_bars(days)
def mavg(self, days):
return self._get_buffer(self._get_bars(days)).mean()
def stddev(self, days):
return self._get_buffer(self._get_bars(days)).std(ddof=1)
def vwap(self, days):
bars = self._get_bars(days)
prices = self._get_buffer(bars)
vols = self._get_buffer(bars, field='volume')
return (prices * vols).sum() / vols.sum()
def returns(self):
algo = get_algo_instance()
now = algo.datetime
if now != self._returns_cache_dt:
self._returns_cache_dt = now
self._returns_cache = algo.history(2, '1d', 'price', ffill=True)
hst = self._returns_cache[self._sid]
return (hst.iloc[-1] - hst.iloc[0]) / hst.iloc[0]
class BarData(object):
"""
Holds the event data for all sids for a given dt.
This is what is passed as `data` to the `handle_data` function.
Note: Many methods are analogues of dictionary because of historical
usage of what this replaced as a dictionary subclass.
"""
def __init__(self, data=None):
self._data = data or {}
self._contains_override = None
def __contains__(self, name):
if self._contains_override:
if self._contains_override(name):
return name in self._data
else:
| return False | conditional_block |
|
protocol.py | idends. They are guaranteed to be unique
integers with the context of a single simulation. If @data is non-empty, a
id is required to identify the historical dividend associated with this
payment.
Additionally, if @data is non-empty, either data['cash_amount'] should be
nonzero or data['payment_sid'] should be a security identifier and
data['share_count'] should be nonzero.
The returned Series is given its id value as a name so that concatenating
payments results in a DataFrame indexed by id. (Note, however, that the
name value is not used to construct an index when this series is returned
by function passed to `DataFrame.apply`. In such a case, pandas preserves
the index of the DataFrame on which `apply` is being called.)
"""
return pd.Series(
data=data,
name=data['id'] if data is not None else None,
index=DIVIDEND_PAYMENT_FIELDS,
dtype=object,
)
class Event(object):
def __init__(self, initial_values=None):
if initial_values:
self.__dict__ = initial_values
def __getitem__(self, name):
return getattr(self, name)
def __setitem__(self, name, value):
setattr(self, name, value)
def __delitem__(self, name):
delattr(self, name)
def keys(self):
return self.__dict__.keys()
def __eq__(self, other):
return hasattr(other, '__dict__') and self.__dict__ == other.__dict__
def __contains__(self, name):
return name in self.__dict__
def __repr__(self):
return "Event({0})".format(self.__dict__)
def to_series(self, index=None):
return pd.Series(self.__dict__, index=index)
class Order(Event):
pass
class Portfolio(object):
def __init__(self):
self.capital_used = 0.0
self.starting_cash = 0.0
self.portfolio_value = 0.0
self.pnl = 0.0
self.returns = 0.0
self.cash = 0.0
self.positions = Positions()
self.start_date = None
self.positions_value = 0.0
def __getitem__(self, key):
return self.__dict__[key]
def __repr__(self):
return "Portfolio({0})".format(self.__dict__)
class Account(object):
'''
The account object tracks information about the trading account. The
values are updated as the algorithm runs and its keys remain unchanged.
If connected to a broker, one can update these values with the trading
account values as reported by the broker.
'''
def __init__(self):
self.settled_cash = 0.0
self.accrued_interest = 0.0
self.buying_power = float('inf')
self.equity_with_loan = 0.0
self.total_positions_value = 0.0
self.regt_equity = 0.0
self.regt_margin = float('inf')
self.initial_margin_requirement = 0.0
self.maintenance_margin_requirement = 0.0
self.available_funds = 0.0
self.excess_liquidity = 0.0
self.cushion = 0.0
self.day_trades_remaining = float('inf')
self.leverage = 0.0
self.net_liquidation = 0.0
def __getitem__(self, key):
return self.__dict__[key]
def __repr__(self):
return "Account({0})".format(self.__dict__)
def _get_state(self):
return 'Account', self.__dict__
def _set_state(self, saved_state):
self.__dict__.update(saved_state)
class Position(object):
def __init__(self, sid):
self.sid = sid
self.amount = 0
self.cost_basis = 0.0 # per share
self.last_sale_price = 0.0
def __getitem__(self, key):
return self.__dict__[key]
def __repr__(self):
return "Position({0})".format(self.__dict__)
class Positions(dict):
def __missing__(self, key):
pos = Position(key)
self[key] = pos
return pos
class SIDData(object):
# Cache some data on the class so that this is shared for all instances of
# siddata.
# The dt where we cached the history.
_history_cache_dt = None
# _history_cache is a a dict mapping fields to pd.DataFrames. This is the
# most data we have for a given field for the _history_cache_dt.
_history_cache = {}
# This is the cache that is used for returns. This will have a different
# structure than the other history cache as this is always daily.
_returns_cache_dt = None
_returns_cache = None
# The last dt that we needed to cache the number of minutes.
_minute_bar_cache_dt = None
# If we are in minute mode, there is some cost associated with computing
# the number of minutes that we need to pass to the bar count of history.
# This will remain constant for a given bar and day count.
# This maps days to number of minutes.
_minute_bar_cache = {}
def __init__(self, sid, initial_values=None):
self._sid = sid
self._freqstr = None
# To check if we have data, we use the __len__ which depends on the
# __dict__. Because we are foward defining the attributes needed, we
# need to account for their entrys in the __dict__.
# We will add 1 because we need to account for the _initial_len entry
# itself.
self._initial_len = len(self.__dict__) + 1
if initial_values:
self.__dict__.update(initial_values)
@property
def datetime(self):
"""
Provides an alias from data['foo'].datetime -> data['foo'].dt
`datetime` was previously provided by adding a seperate `datetime`
member of the SIDData object via a generator that wrapped the incoming
data feed and added the field to each equity event.
This alias is intended to be temporary, to provide backwards
compatibility with existing algorithms, but should be considered
deprecated, and may be removed in the future.
"""
return self.dt
def get(self, name, default=None):
return self.__dict__.get(name, default)
def __getitem__(self, name):
return self.__dict__[name]
def __setitem__(self, name, value):
self.__dict__[name] = value
def __len__(self):
return len(self.__dict__) - self._initial_len
def __contains__(self, name):
return name in self.__dict__
def | (self):
return "SIDData({0})".format(self.__dict__)
def _get_buffer(self, bars, field='price'):
"""
Gets the result of history for the given number of bars and field.
This will cache the results internally.
"""
cls = self.__class__
algo = get_algo_instance()
now = algo.datetime
if now != cls._history_cache_dt:
# For a given dt, the history call for this field will not change.
# We have a new dt, so we should reset the cache.
cls._history_cache_dt = now
cls._history_cache = {}
if field not in self._history_cache \
or bars > len(cls._history_cache[field].index):
# If we have never cached this field OR the amount of bars that we
# need for this field is greater than the amount we have cached,
# then we need to get more history.
hst = algo.history(
bars, self._freqstr, field, ffill=True,
)
# Assert that the column holds ints, not security objects.
if not isinstance(self._sid, str):
hst.columns = hst.columns.astype(int)
self._history_cache[field] = hst
# Slice of only the bars needed. This is because we strore the LARGEST
# amount of history for the field, and we might request less than the
# largest from the cache.
return cls._history_cache[field][self._sid][-bars:]
def _get_bars(self, days):
"""
Gets the number of bars needed for the current number of days.
Figures this out based on the algo datafrequency and caches the result.
This caches the result by replacing this function on the object.
This means that after the first call to _get_bars, this method will
point to a new function object.
"""
def daily_get_bars(days):
return days
@with_environment()
def minute_get_bars(days, env=None):
cls = self.__class__
now = get_algo_instance().datetime
if now != cls._minute_bar_cache_dt:
cls._minute_bar_cache_dt = now
cls._minute_bar_cache = {}
if days not in cls._minute_bar_cache:
# Cache this calculation to happen once per bar, even if we
# use another transform with | __repr__ | identifier_name |
protocol.py | idends. They are guaranteed to be unique
integers with the context of a single simulation. If @data is non-empty, a
id is required to identify the historical dividend associated with this
payment.
Additionally, if @data is non-empty, either data['cash_amount'] should be
nonzero or data['payment_sid'] should be a security identifier and
data['share_count'] should be nonzero.
The returned Series is given its id value as a name so that concatenating
payments results in a DataFrame indexed by id. (Note, however, that the
name value is not used to construct an index when this series is returned
by function passed to `DataFrame.apply`. In such a case, pandas preserves
the index of the DataFrame on which `apply` is being called.)
"""
return pd.Series(
data=data,
name=data['id'] if data is not None else None,
index=DIVIDEND_PAYMENT_FIELDS,
dtype=object,
)
class Event(object):
def __init__(self, initial_values=None):
if initial_values:
self.__dict__ = initial_values
def __getitem__(self, name):
return getattr(self, name)
def __setitem__(self, name, value):
setattr(self, name, value)
def __delitem__(self, name):
delattr(self, name)
def keys(self):
return self.__dict__.keys()
def __eq__(self, other):
return hasattr(other, '__dict__') and self.__dict__ == other.__dict__
def __contains__(self, name):
return name in self.__dict__
def __repr__(self):
return "Event({0})".format(self.__dict__)
def to_series(self, index=None):
return pd.Series(self.__dict__, index=index)
class Order(Event):
pass
class Portfolio(object):
def __init__(self):
self.capital_used = 0.0
self.starting_cash = 0.0
self.portfolio_value = 0.0
self.pnl = 0.0
self.returns = 0.0
self.cash = 0.0
self.positions = Positions()
self.start_date = None
self.positions_value = 0.0
def __getitem__(self, key):
return self.__dict__[key]
def __repr__(self):
return "Portfolio({0})".format(self.__dict__)
class Account(object):
'''
The account object tracks information about the trading account. The
values are updated as the algorithm runs and its keys remain unchanged.
If connected to a broker, one can update these values with the trading
account values as reported by the broker.
'''
def __init__(self):
self.settled_cash = 0.0
self.accrued_interest = 0.0
self.buying_power = float('inf')
self.equity_with_loan = 0.0
self.total_positions_value = 0.0
self.regt_equity = 0.0
self.regt_margin = float('inf')
self.initial_margin_requirement = 0.0
self.maintenance_margin_requirement = 0.0
self.available_funds = 0.0
self.excess_liquidity = 0.0
self.cushion = 0.0
self.day_trades_remaining = float('inf')
self.leverage = 0.0
self.net_liquidation = 0.0
def __getitem__(self, key):
return self.__dict__[key]
def __repr__(self):
return "Account({0})".format(self.__dict__)
def _get_state(self):
return 'Account', self.__dict__
def _set_state(self, saved_state):
self.__dict__.update(saved_state)
class Position(object):
def __init__(self, sid):
self.sid = sid
self.amount = 0
self.cost_basis = 0.0 # per share
self.last_sale_price = 0.0
def __getitem__(self, key):
return self.__dict__[key]
def __repr__(self):
return "Position({0})".format(self.__dict__)
class Positions(dict):
def __missing__(self, key):
|
class SIDData(object):
# Cache some data on the class so that this is shared for all instances of
# siddata.
# The dt where we cached the history.
_history_cache_dt = None
# _history_cache is a a dict mapping fields to pd.DataFrames. This is the
# most data we have for a given field for the _history_cache_dt.
_history_cache = {}
# This is the cache that is used for returns. This will have a different
# structure than the other history cache as this is always daily.
_returns_cache_dt = None
_returns_cache = None
# The last dt that we needed to cache the number of minutes.
_minute_bar_cache_dt = None
# If we are in minute mode, there is some cost associated with computing
# the number of minutes that we need to pass to the bar count of history.
# This will remain constant for a given bar and day count.
# This maps days to number of minutes.
_minute_bar_cache = {}
def __init__(self, sid, initial_values=None):
self._sid = sid
self._freqstr = None
# To check if we have data, we use the __len__ which depends on the
# __dict__. Because we are foward defining the attributes needed, we
# need to account for their entrys in the __dict__.
# We will add 1 because we need to account for the _initial_len entry
# itself.
self._initial_len = len(self.__dict__) + 1
if initial_values:
self.__dict__.update(initial_values)
@property
def datetime(self):
"""
Provides an alias from data['foo'].datetime -> data['foo'].dt
`datetime` was previously provided by adding a seperate `datetime`
member of the SIDData object via a generator that wrapped the incoming
data feed and added the field to each equity event.
This alias is intended to be temporary, to provide backwards
compatibility with existing algorithms, but should be considered
deprecated, and may be removed in the future.
"""
return self.dt
def get(self, name, default=None):
return self.__dict__.get(name, default)
def __getitem__(self, name):
return self.__dict__[name]
def __setitem__(self, name, value):
self.__dict__[name] = value
def __len__(self):
return len(self.__dict__) - self._initial_len
def __contains__(self, name):
return name in self.__dict__
def __repr__(self):
return "SIDData({0})".format(self.__dict__)
def _get_buffer(self, bars, field='price'):
"""
Gets the result of history for the given number of bars and field.
This will cache the results internally.
"""
cls = self.__class__
algo = get_algo_instance()
now = algo.datetime
if now != cls._history_cache_dt:
# For a given dt, the history call for this field will not change.
# We have a new dt, so we should reset the cache.
cls._history_cache_dt = now
cls._history_cache = {}
if field not in self._history_cache \
or bars > len(cls._history_cache[field].index):
# If we have never cached this field OR the amount of bars that we
# need for this field is greater than the amount we have cached,
# then we need to get more history.
hst = algo.history(
bars, self._freqstr, field, ffill=True,
)
# Assert that the column holds ints, not security objects.
if not isinstance(self._sid, str):
hst.columns = hst.columns.astype(int)
self._history_cache[field] = hst
# Slice of only the bars needed. This is because we strore the LARGEST
# amount of history for the field, and we might request less than the
# largest from the cache.
return cls._history_cache[field][self._sid][-bars:]
def _get_bars(self, days):
"""
Gets the number of bars needed for the current number of days.
Figures this out based on the algo datafrequency and caches the result.
This caches the result by replacing this function on the object.
This means that after the first call to _get_bars, this method will
point to a new function object.
"""
def daily_get_bars(days):
return days
@with_environment()
def minute_get_bars(days, env=None):
cls = self.__class__
now = get_algo_instance().datetime
if now != cls._minute_bar_cache_dt:
cls._minute_bar_cache_dt = now
cls._minute_bar_cache = {}
if days not in cls._minute_bar_cache:
# Cache this calculation to happen once per bar, even if we
# use another transform with the | pos = Position(key)
self[key] = pos
return pos | identifier_body |
project.component.ts | import { Component, OnInit } from '@angular/core';
import { TaskService, ProjectService, LabelService, UserService } from '../_services/index';
import { Task, Project, Label, User, Access } from '../_models/index';
import { Router, ActivatedRoute, Params } from '@angular/router';
import { NotificationsService } from 'angular2-notifications';
import { IMyDpOptions, IMyDateModel } from 'mydatepicker';
@Component({
templateUrl: 'project.component.html'
})
export class ProjectComponent implements OnInit {
tasks: any = {};
dates: Array<any>;
endDate: string;
labels: Label[];
users: User[];
selectedTask: Task;
error: any = {};
validation: String;
project_id: number;
accessList: Array<Access>;
today: Date = new Date();
week: Date;
private myDatePickerOptions: IMyDpOptions = {
dateFormat: 'yyyy.mm.dd',
ariaLabelInputField: 'end_date',
firstDayOfWeek: 'mo',
sunHighlight: true,
showClearDateBtn: false
};
constructor(
private taskService: TaskService,
private projectService: ProjectService,
private labelService: LabelService,
private userService: UserService,
private activatedRoute: ActivatedRoute,
private _notificationsService: NotificationsService) {
}
ngOnInit() {
this.activatedRoute.params.subscribe((params: Params) => {
// Init date to create link in nav
this.today = new Date();
this.today.setHours(0);
this.today.setMinutes(0);
this.today.setSeconds(0, 0);
this.week = new Date();
this.week.setHours(0);
this.week.setMinutes(0);
this.week.setSeconds(0, 0);
this.week.setSeconds(this.week.getSeconds() + 86400*6);
// Get project_id to filter
this.project_id = params['id'];
// Get end_date to filter
this.endDate = params['endDate'];
// Selected task to open edit panel
this.selectedTask = null;
// Get data from API
this.getListOfTasks(this.endDate);
this.getListOfLabels();
this.getListOfUsers();
});
}
addTaskToList(task) {
if (task.end_date.getTime() >= this.today.getTime()) {
// If task is in the future
if (this.dates.indexOf(task.getEndDateHuman()) < 0) {
this.dates.push(task.getEndDateHuman());
this.tasks[task.getEndDateHuman()] = [];
}
this.tasks[task.getEndDateHuman()].push(task);
}
else {
// If task end_date is outdated
if (this.dates.indexOf('Outdated') < 0) {
this.dates.push('Outdated');
this.tasks['Outdated'] = [];
}
this.tasks['Outdated'].push(task);
}
}
getListOfTasks(endDate = null) {
this.tasks = {};
this.dates = [];
if (endDate) {
this.projectService.getTasksByEndDateLte(this.project_id, endDate)
.subscribe(
data => {
data.map((task) => {
let newTask = new Task(task);
this.addTaskToList(newTask);
})
}
);
}
else {
this.projectService.getTasks(this.project_id)
.subscribe(
data => {
data.map((task) => {
let newTask = new Task(task);
this.addTaskToList(newTask);
})
}
);
}
}
getListOfLabels() {
this.labelService.listByProject(this.project_id)
.subscribe(
data => {
this.labels = data;
}
);
}
getListOfUsers() {
this.users = [];
this.projectService.getUsers(this.project_id)
.subscribe(
data => {
data.map((access) => {
this.userService.get(access.user)
.subscribe(
data => {
this.users.push(data);
}
);
})
}
);
}
updateTask() {
if (this.selectedTask.id) {
this.taskService.update(this.selectedTask)
.subscribe(
data => {
this.selectedTask = null;
this.getListOfTasks();
this._notificationsService.success(
'Task updated',
'The task has been correctly updated',
{
timeOut: 2000
}
)
},
error => {
this.error = JSON.parse(error._body);
}
);
}
else {
this.taskService.create(this.selectedTask)
.subscribe(
data => {
this.selectedTask = null;
this.getListOfTasks();
this._notificationsService.success(
'Task created',
'The new task has been correctly created',
{
timeOut: 2000
}
)
},
error => {
this.error = JSON.parse(error._body);
}
);
}
}
| (task) {
this.taskService.delete(task)
.subscribe(
data => {
if (this.selectedTask && this.selectedTask.id == task.id) {
this.selectedTask = null;
}
this.getListOfTasks();
this._notificationsService.success(
'Task deleted',
'The task has been deleted',
{
timeOut: 2000
}
)
},
error => {
this.error = JSON.parse(error._body);
}
);
}
addTask() {
this.selectedTask = new Task();
this.selectedTask.project.id = this.project_id;
}
changeSelectedTask(task) {
this.selectedTask = new Task(JSON.parse(JSON.stringify(task)));
}
close() {
this.selectedTask = null;
}
getLabelColor(label) {
return '#' + label.color;
}
onDateChanged(event: IMyDateModel) {
this.selectedTask.end_date = event.jsdate;
console.log(this.selectedTask.end_date);
}
}
| deleteTask | identifier_name |
project.component.ts | import { Component, OnInit } from '@angular/core';
import { TaskService, ProjectService, LabelService, UserService } from '../_services/index';
import { Task, Project, Label, User, Access } from '../_models/index';
import { Router, ActivatedRoute, Params } from '@angular/router';
import { NotificationsService } from 'angular2-notifications';
import { IMyDpOptions, IMyDateModel } from 'mydatepicker';
@Component({
templateUrl: 'project.component.html'
})
export class ProjectComponent implements OnInit {
tasks: any = {};
dates: Array<any>;
endDate: string;
labels: Label[];
users: User[];
selectedTask: Task;
error: any = {};
validation: String;
project_id: number;
accessList: Array<Access>;
today: Date = new Date();
week: Date;
private myDatePickerOptions: IMyDpOptions = {
dateFormat: 'yyyy.mm.dd',
ariaLabelInputField: 'end_date',
firstDayOfWeek: 'mo',
sunHighlight: true,
showClearDateBtn: false
};
constructor(
private taskService: TaskService,
private projectService: ProjectService,
private labelService: LabelService,
private userService: UserService,
private activatedRoute: ActivatedRoute,
private _notificationsService: NotificationsService) {
}
ngOnInit() {
this.activatedRoute.params.subscribe((params: Params) => {
// Init date to create link in nav
this.today = new Date();
this.today.setHours(0);
this.today.setMinutes(0);
this.today.setSeconds(0, 0);
this.week = new Date();
this.week.setHours(0);
this.week.setMinutes(0);
this.week.setSeconds(0, 0);
this.week.setSeconds(this.week.getSeconds() + 86400*6);
// Get project_id to filter
this.project_id = params['id'];
// Get end_date to filter
this.endDate = params['endDate'];
// Selected task to open edit panel
this.selectedTask = null;
// Get data from API
this.getListOfTasks(this.endDate);
this.getListOfLabels();
this.getListOfUsers();
});
}
addTaskToList(task) {
if (task.end_date.getTime() >= this.today.getTime()) |
else {
// If task end_date is outdated
if (this.dates.indexOf('Outdated') < 0) {
this.dates.push('Outdated');
this.tasks['Outdated'] = [];
}
this.tasks['Outdated'].push(task);
}
}
getListOfTasks(endDate = null) {
this.tasks = {};
this.dates = [];
if (endDate) {
this.projectService.getTasksByEndDateLte(this.project_id, endDate)
.subscribe(
data => {
data.map((task) => {
let newTask = new Task(task);
this.addTaskToList(newTask);
})
}
);
}
else {
this.projectService.getTasks(this.project_id)
.subscribe(
data => {
data.map((task) => {
let newTask = new Task(task);
this.addTaskToList(newTask);
})
}
);
}
}
getListOfLabels() {
this.labelService.listByProject(this.project_id)
.subscribe(
data => {
this.labels = data;
}
);
}
getListOfUsers() {
this.users = [];
this.projectService.getUsers(this.project_id)
.subscribe(
data => {
data.map((access) => {
this.userService.get(access.user)
.subscribe(
data => {
this.users.push(data);
}
);
})
}
);
}
updateTask() {
if (this.selectedTask.id) {
this.taskService.update(this.selectedTask)
.subscribe(
data => {
this.selectedTask = null;
this.getListOfTasks();
this._notificationsService.success(
'Task updated',
'The task has been correctly updated',
{
timeOut: 2000
}
)
},
error => {
this.error = JSON.parse(error._body);
}
);
}
else {
this.taskService.create(this.selectedTask)
.subscribe(
data => {
this.selectedTask = null;
this.getListOfTasks();
this._notificationsService.success(
'Task created',
'The new task has been correctly created',
{
timeOut: 2000
}
)
},
error => {
this.error = JSON.parse(error._body);
}
);
}
}
deleteTask(task) {
this.taskService.delete(task)
.subscribe(
data => {
if (this.selectedTask && this.selectedTask.id == task.id) {
this.selectedTask = null;
}
this.getListOfTasks();
this._notificationsService.success(
'Task deleted',
'The task has been deleted',
{
timeOut: 2000
}
)
},
error => {
this.error = JSON.parse(error._body);
}
);
}
addTask() {
this.selectedTask = new Task();
this.selectedTask.project.id = this.project_id;
}
changeSelectedTask(task) {
this.selectedTask = new Task(JSON.parse(JSON.stringify(task)));
}
close() {
this.selectedTask = null;
}
getLabelColor(label) {
return '#' + label.color;
}
onDateChanged(event: IMyDateModel) {
this.selectedTask.end_date = event.jsdate;
console.log(this.selectedTask.end_date);
}
}
| {
// If task is in the future
if (this.dates.indexOf(task.getEndDateHuman()) < 0) {
this.dates.push(task.getEndDateHuman());
this.tasks[task.getEndDateHuman()] = [];
}
this.tasks[task.getEndDateHuman()].push(task);
} | conditional_block |
project.component.ts | import { Component, OnInit } from '@angular/core';
import { TaskService, ProjectService, LabelService, UserService } from '../_services/index';
import { Task, Project, Label, User, Access } from '../_models/index';
import { Router, ActivatedRoute, Params } from '@angular/router';
import { NotificationsService } from 'angular2-notifications';
import { IMyDpOptions, IMyDateModel } from 'mydatepicker';
@Component({
templateUrl: 'project.component.html'
})
export class ProjectComponent implements OnInit {
tasks: any = {};
dates: Array<any>;
endDate: string;
labels: Label[];
users: User[];
selectedTask: Task;
error: any = {};
validation: String;
project_id: number;
accessList: Array<Access>;
today: Date = new Date();
week: Date;
private myDatePickerOptions: IMyDpOptions = {
dateFormat: 'yyyy.mm.dd',
ariaLabelInputField: 'end_date',
firstDayOfWeek: 'mo',
sunHighlight: true,
showClearDateBtn: false
};
constructor(
private taskService: TaskService,
private projectService: ProjectService,
private labelService: LabelService,
private userService: UserService,
private activatedRoute: ActivatedRoute,
private _notificationsService: NotificationsService) {
}
ngOnInit() {
this.activatedRoute.params.subscribe((params: Params) => {
// Init date to create link in nav
this.today = new Date();
this.today.setHours(0);
this.today.setMinutes(0);
this.today.setSeconds(0, 0);
this.week = new Date();
this.week.setHours(0);
this.week.setMinutes(0);
this.week.setSeconds(0, 0);
this.week.setSeconds(this.week.getSeconds() + 86400*6);
// Get project_id to filter
this.project_id = params['id'];
// Get end_date to filter
this.endDate = params['endDate'];
// Selected task to open edit panel
this.selectedTask = null;
// Get data from API
this.getListOfTasks(this.endDate);
this.getListOfLabels();
this.getListOfUsers();
});
}
addTaskToList(task) {
if (task.end_date.getTime() >= this.today.getTime()) {
// If task is in the future
if (this.dates.indexOf(task.getEndDateHuman()) < 0) {
this.dates.push(task.getEndDateHuman());
this.tasks[task.getEndDateHuman()] = [];
}
this.tasks[task.getEndDateHuman()].push(task);
}
else {
// If task end_date is outdated
if (this.dates.indexOf('Outdated') < 0) {
this.dates.push('Outdated');
this.tasks['Outdated'] = [];
}
this.tasks['Outdated'].push(task);
}
}
getListOfTasks(endDate = null) {
this.tasks = {};
this.dates = [];
if (endDate) {
this.projectService.getTasksByEndDateLte(this.project_id, endDate)
.subscribe(
data => {
data.map((task) => {
let newTask = new Task(task);
this.addTaskToList(newTask);
})
}
);
}
else {
this.projectService.getTasks(this.project_id)
.subscribe(
data => {
data.map((task) => {
let newTask = new Task(task);
this.addTaskToList(newTask);
})
}
);
}
}
getListOfLabels() {
this.labelService.listByProject(this.project_id)
.subscribe(
data => {
this.labels = data;
}
);
}
getListOfUsers() {
this.users = [];
this.projectService.getUsers(this.project_id)
.subscribe(
data => {
data.map((access) => {
this.userService.get(access.user)
.subscribe(
data => {
this.users.push(data);
}
);
})
}
);
}
updateTask() {
if (this.selectedTask.id) {
this.taskService.update(this.selectedTask)
.subscribe(
data => {
this.selectedTask = null;
this.getListOfTasks();
this._notificationsService.success(
'Task updated',
'The task has been correctly updated',
{
timeOut: 2000
}
)
},
error => {
this.error = JSON.parse(error._body);
}
);
}
else {
this.taskService.create(this.selectedTask)
.subscribe(
data => {
this.selectedTask = null;
this.getListOfTasks();
this._notificationsService.success(
'Task created',
'The new task has been correctly created',
{
timeOut: 2000 | }
)
},
error => {
this.error = JSON.parse(error._body);
}
);
}
}
deleteTask(task) {
this.taskService.delete(task)
.subscribe(
data => {
if (this.selectedTask && this.selectedTask.id == task.id) {
this.selectedTask = null;
}
this.getListOfTasks();
this._notificationsService.success(
'Task deleted',
'The task has been deleted',
{
timeOut: 2000
}
)
},
error => {
this.error = JSON.parse(error._body);
}
);
}
addTask() {
this.selectedTask = new Task();
this.selectedTask.project.id = this.project_id;
}
changeSelectedTask(task) {
this.selectedTask = new Task(JSON.parse(JSON.stringify(task)));
}
close() {
this.selectedTask = null;
}
getLabelColor(label) {
return '#' + label.color;
}
onDateChanged(event: IMyDateModel) {
this.selectedTask.end_date = event.jsdate;
console.log(this.selectedTask.end_date);
}
} | random_line_split |
|
project.component.ts | import { Component, OnInit } from '@angular/core';
import { TaskService, ProjectService, LabelService, UserService } from '../_services/index';
import { Task, Project, Label, User, Access } from '../_models/index';
import { Router, ActivatedRoute, Params } from '@angular/router';
import { NotificationsService } from 'angular2-notifications';
import { IMyDpOptions, IMyDateModel } from 'mydatepicker';
@Component({
templateUrl: 'project.component.html'
})
export class ProjectComponent implements OnInit {
tasks: any = {};
dates: Array<any>;
endDate: string;
labels: Label[];
users: User[];
selectedTask: Task;
error: any = {};
validation: String;
project_id: number;
accessList: Array<Access>;
today: Date = new Date();
week: Date;
private myDatePickerOptions: IMyDpOptions = {
dateFormat: 'yyyy.mm.dd',
ariaLabelInputField: 'end_date',
firstDayOfWeek: 'mo',
sunHighlight: true,
showClearDateBtn: false
};
constructor(
private taskService: TaskService,
private projectService: ProjectService,
private labelService: LabelService,
private userService: UserService,
private activatedRoute: ActivatedRoute,
private _notificationsService: NotificationsService) {
}
ngOnInit() {
this.activatedRoute.params.subscribe((params: Params) => {
// Init date to create link in nav
this.today = new Date();
this.today.setHours(0);
this.today.setMinutes(0);
this.today.setSeconds(0, 0);
this.week = new Date();
this.week.setHours(0);
this.week.setMinutes(0);
this.week.setSeconds(0, 0);
this.week.setSeconds(this.week.getSeconds() + 86400*6);
// Get project_id to filter
this.project_id = params['id'];
// Get end_date to filter
this.endDate = params['endDate'];
// Selected task to open edit panel
this.selectedTask = null;
// Get data from API
this.getListOfTasks(this.endDate);
this.getListOfLabels();
this.getListOfUsers();
});
}
addTaskToList(task) {
if (task.end_date.getTime() >= this.today.getTime()) {
// If task is in the future
if (this.dates.indexOf(task.getEndDateHuman()) < 0) {
this.dates.push(task.getEndDateHuman());
this.tasks[task.getEndDateHuman()] = [];
}
this.tasks[task.getEndDateHuman()].push(task);
}
else {
// If task end_date is outdated
if (this.dates.indexOf('Outdated') < 0) {
this.dates.push('Outdated');
this.tasks['Outdated'] = [];
}
this.tasks['Outdated'].push(task);
}
}
getListOfTasks(endDate = null) {
this.tasks = {};
this.dates = [];
if (endDate) {
this.projectService.getTasksByEndDateLte(this.project_id, endDate)
.subscribe(
data => {
data.map((task) => {
let newTask = new Task(task);
this.addTaskToList(newTask);
})
}
);
}
else {
this.projectService.getTasks(this.project_id)
.subscribe(
data => {
data.map((task) => {
let newTask = new Task(task);
this.addTaskToList(newTask);
})
}
);
}
}
getListOfLabels() {
this.labelService.listByProject(this.project_id)
.subscribe(
data => {
this.labels = data;
}
);
}
getListOfUsers() {
this.users = [];
this.projectService.getUsers(this.project_id)
.subscribe(
data => {
data.map((access) => {
this.userService.get(access.user)
.subscribe(
data => {
this.users.push(data);
}
);
})
}
);
}
updateTask() {
if (this.selectedTask.id) {
this.taskService.update(this.selectedTask)
.subscribe(
data => {
this.selectedTask = null;
this.getListOfTasks();
this._notificationsService.success(
'Task updated',
'The task has been correctly updated',
{
timeOut: 2000
}
)
},
error => {
this.error = JSON.parse(error._body);
}
);
}
else {
this.taskService.create(this.selectedTask)
.subscribe(
data => {
this.selectedTask = null;
this.getListOfTasks();
this._notificationsService.success(
'Task created',
'The new task has been correctly created',
{
timeOut: 2000
}
)
},
error => {
this.error = JSON.parse(error._body);
}
);
}
}
deleteTask(task) | }
addTask() {
this.selectedTask = new Task();
this.selectedTask.project.id = this.project_id;
}
changeSelectedTask(task) {
this.selectedTask = new Task(JSON.parse(JSON.stringify(task)));
}
close() {
this.selectedTask = null;
}
getLabelColor(label) {
return '#' + label.color;
}
onDateChanged(event: IMyDateModel) {
this.selectedTask.end_date = event.jsdate;
console.log(this.selectedTask.end_date);
}
}
| {
this.taskService.delete(task)
.subscribe(
data => {
if (this.selectedTask && this.selectedTask.id == task.id) {
this.selectedTask = null;
}
this.getListOfTasks();
this._notificationsService.success(
'Task deleted',
'The task has been deleted',
{
timeOut: 2000
}
)
},
error => {
this.error = JSON.parse(error._body);
}
); | identifier_body |
core.js | app.core.eventsLoaded = false;
app.core.updateEventsInterval = 10000;
//120000
//60000 in production
app.core.sinceId = 0;
app.core.maxId = undefined;
app.core.qtyPag = 30;
app.core.searchRadius = 300000;
//Initial radius 300Km
//Events update interval
//CORE FUNCTIONS
app.core.start = function() {//Init app function
app.ui.loading.show();
app.core.updateEvents('bottom');
//Carga inicial de eventos
//Se actualiza desde aqui y no en el callback de la funcion ajax, por si falla el ajax
//setInterval(app.core.updateEvents, app.core.updateEventsInterval, 'top');
//Carga en busqueda de novedades
}
app.core.ajax = function(method, url, params, callbacks, hideLoading) {
//Show loading
if (!hideLoading) {
app.ui.loading.show();
}
app.ui.tabs.add(app.ui.loading);
//Init AJAX
var xhr = Ti.Network.createHTTPClient();
//enableKeepAlive: true
//xhr.ondatastream = callbacks.dataStream;
//xhr.onsendstream = callbacks.sendStream;
//+ '&callback'
//Se añade &callback para evitar que devuelva una cadena en el response de JSONP
//xhr.setRequestHeader('Content-Type', 'application/json');
//xhr.setTimeout(20000);
//Callback handlers
xhr.onload = function(data) {//Success callback
app.ui.loading.hide();
var data = this.responseText.substring(1);
//Remove first (
data = data.substring(0, data.length - 1);
//Remove last )
data = JSON.parse(data);
//Call callback
callbacks.success(data);
}
xhr.onerror = callbacks.error ||
function() {//Default error callback
app.ui.loading.hide();
//alert('Request ERROR');
};
xhr.open(method, url);
if (params) {//Simplificar solo con xhr.send();
params.callback = '';
xhr.send(params);
} else {
xhr.send();
}
}
app.core.getCurrentPosition = function(callback) {
Titanium.Geolocation.getCurrentPosition(function(e) {
if (!e.success || e.error) {
// manage the error
alert('geolocation error');
return;
}
callback(e);
//var accuracy = e.coords.accuracy;
});
}
//Actualiza los eventos cada X tiempo definido en app.core.updateEventsInterval
//dataPosition --> Indica la posicion donde hay que meter el resultado de los eventos [top|bottom]
app.core.updateEvents = function(dataPosition) {//
//alert('updateEvents sinceId: ' + app.core.sinceId);
var searchFilter = '';
//Determining search data filter
if (dataPosition == 'top') {
searchFilter = '&sinceId=' + app.core.sinceId;
} else if (dataPosition == 'bottom' && app.core.maxId) {
searchFilter = '&maxId=' + app.core.maxId;
}
app.core.getCurrentPosition(function(e) {
var latitude = e.coords.latitude;
var longitude = e.coords.longitude;
//alert('longitude: ' + longitude + ' latitude: ' + latitude );
var url = app.core.restUrl2 + 'action=find&radius=' + app.core.searchRadius + '&qtyPag=' + app.core.qtyPag + '&lat=' + latitude + '&lng=' + longitude + searchFilter;
//lat=39.402738&lng=-0.403518
//alert(url);
app.core.ajax('GET', url, undefined, {
success : function(data) {//Success
var values = data.values;
//app.core.events = values;
if (values) {
var evento, rows = new Array(), annotations = new Array(), row, rowImg, rowLabel, valuesLength = values.length;
for (var i = 0; i < valuesLength; i++) {
evento = values[i];
//Timeline
row = app.core.createCustomRow(evento.title, evento.direction.complete, evento.image.s[1], {
title : evento.title,
qtyLikes : evento.qtyLikes,
id : evento.id,
image : evento.image,
direction : evento.direction,
lat : evento.lat,
lng : evento.lng
})
//Map
annotations.push(Titanium.Map.createAnnotation({
id : evento.id,
qtyLikes : evento.qtyLikes,
latitude : evento.lat,
longitude : evento.lng,
title : evento.title,
image : evento.image,
direction : evento.direction,
lat : evento.lat,
lng : evento.lng,
//subtitle : 'Cupertino, CA',//Meter tiempo "Hace 5 minutos"
pincolor : Titanium.Map.ANNOTATION_GREEN,
animate : true,
rightButton : evento.image.s[1]
}));
if (dataPosition == 'top') {//se mete al principio
app.view.timeline.tableView.insertRowBefore(0, row);
} else {//se guarda y se mete luego con todas
rows.push(row);
}
}
//Adding annotations to the map
app.view.map.mapView.addAnnotations(annotations);
if (dataPosition == 'bottom') { | else {
//Show notification of firts event
var newEvent = values[0];
app.core.statusbarNotification.add('New event near you', newEvent.title, 'New events');
//Update sinceId
app.core.sinceId = newEvent.id;
}
if (!app.core.eventsLoaded) {//Si es la primera vez que se llama la funcion
//Setting events loaded
app.core.eventsLoaded = true;
//Set first value to sinceId
app.core.sinceId = values[0].id;
//Set map loaction
app.controller.map.location = {
latitude : latitude,
longitude : longitude,
latitudeDelta : 0.5,
longitudeDelta : 0.5
};
app.view.map.mapView.setLocation(app.controller.map.location);
//Start service for updateEvents
app.core.service.start();
}
} else if (!app.core.eventsLoaded) {//Si no estaban cargados los eventos y no se recibe ningun evento
app.core.eventsLoaded = true;
var row = {
color : '#000',
textAlign : 'center',
title : "Don't have events, please change radius"
};
app.view.timeline.tableView.setData(row);
}
}
}, true);
});
}
app.core.resetEvents = function() {
app.core.eventsLoaded = false;
app.core.sinceId = 0;
app.core.maxId = undefined;
app.view.timeline.tableView.setData([]);
app.view.map.mapView.removeAllAnnotations();
}
app.core.capitalize = function(string) {//Capitalize the First letter
return string.charAt(0).toUpperCase() + string.slice(1);
}
app.core.AndroidMenuHandler = function() {
Ti.UI.createAlertDialog({
title : 'AndroidMenuHandler'
}).show();
}
app.core.facebookPost = function() {//Post feed in wall of current user
app.ui.loading.show();
Titanium.Facebook.requestWithGraphPath('me/feed', {
message : app.controller.eventDetail.eventTitle,
link : 'http://www.hotnowapp.com/share.php?id=' + app.controller.eventDetail.eventId
}, "POST", function() {
app.ui.loading.hide();
app.core.niceAlert('Event shared');
});
}
app.core.niceAlert = function(title) {
Ti.UI.createAlertDialog({
title : title
}).show();
}
app.core.createCustomRow = function(title, detail, image, rowConfig) {
if (rowConfig) {
rowConfig.className = 'Pic';
} else {
rowConfig = {};
}
rowConfig.backgroundSelectedColor = '#000';
var row = Titanium.UI.createTableViewRow(rowConfig);
//Image
rowImg = Titanium.UI.createImageView({
url : image,
height : 70,
width : 70,
left : 1
});
//Title
rowLabel = Titanium.UI.createLabel({
text : app.core.capitalize(title),
color : '#333',
font : {
fontSize : 16,
fontWeight : 'bold'
},
width : 'auto',
textAlign : 'left',
top : 2,
left : 80,
height : 20
});
//Detail
rowInfoLabel = Ti.UI.createLabel({
color : '#444',
textAlign : 'left',
text : detail,
font : {
fontSize : 14
},
left : | //Si hay que meterlo al final
//Adding show more events button
var showMoreButton = Ti.UI.createButton({
title : 'Show more events',
witdh : '100%',
top : 5
}), endRow = Ti.UI.createTableViewRow({
witdh : '100%',
loadMore : true
});
endRow.add(showMoreButton);
rows.push(endRow);
//Adding data
app.view.timeline.tableView.appendRow(rows);
//app.view.timeline.tableView.setData(rows);
//Update maxId
app.core.maxId = values[values.length - 1].id;
} | conditional_block |
core.js |
app.core.eventsLoaded = false;
app.core.updateEventsInterval = 10000;
//120000
//60000 in production
app.core.sinceId = 0;
app.core.maxId = undefined;
app.core.qtyPag = 30;
app.core.searchRadius = 300000;
//Initial radius 300Km
//Events update interval
//CORE FUNCTIONS
app.core.start = function() {//Init app function
app.ui.loading.show();
app.core.updateEvents('bottom');
//Carga inicial de eventos
//Se actualiza desde aqui y no en el callback de la funcion ajax, por si falla el ajax
//setInterval(app.core.updateEvents, app.core.updateEventsInterval, 'top');
//Carga en busqueda de novedades
}
app.core.ajax = function(method, url, params, callbacks, hideLoading) {
//Show loading
if (!hideLoading) {
app.ui.loading.show();
}
app.ui.tabs.add(app.ui.loading);
//Init AJAX
var xhr = Ti.Network.createHTTPClient();
//enableKeepAlive: true
//xhr.ondatastream = callbacks.dataStream;
//xhr.onsendstream = callbacks.sendStream;
//+ '&callback'
//Se añade &callback para evitar que devuelva una cadena en el response de JSONP
//xhr.setRequestHeader('Content-Type', 'application/json');
//xhr.setTimeout(20000);
//Callback handlers
xhr.onload = function(data) {//Success callback
app.ui.loading.hide();
var data = this.responseText.substring(1);
//Remove first (
data = data.substring(0, data.length - 1);
//Remove last )
data = JSON.parse(data);
//Call callback
callbacks.success(data);
}
xhr.onerror = callbacks.error ||
function() {//Default error callback
app.ui.loading.hide();
//alert('Request ERROR');
};
xhr.open(method, url);
if (params) {//Simplificar solo con xhr.send();
params.callback = '';
xhr.send(params);
} else {
xhr.send();
}
}
app.core.getCurrentPosition = function(callback) {
Titanium.Geolocation.getCurrentPosition(function(e) { | if (!e.success || e.error) {
// manage the error
alert('geolocation error');
return;
}
callback(e);
//var accuracy = e.coords.accuracy;
});
}
//Actualiza los eventos cada X tiempo definido en app.core.updateEventsInterval
//dataPosition --> Indica la posicion donde hay que meter el resultado de los eventos [top|bottom]
app.core.updateEvents = function(dataPosition) {//
//alert('updateEvents sinceId: ' + app.core.sinceId);
var searchFilter = '';
//Determining search data filter
if (dataPosition == 'top') {
searchFilter = '&sinceId=' + app.core.sinceId;
} else if (dataPosition == 'bottom' && app.core.maxId) {
searchFilter = '&maxId=' + app.core.maxId;
}
app.core.getCurrentPosition(function(e) {
var latitude = e.coords.latitude;
var longitude = e.coords.longitude;
//alert('longitude: ' + longitude + ' latitude: ' + latitude );
var url = app.core.restUrl2 + 'action=find&radius=' + app.core.searchRadius + '&qtyPag=' + app.core.qtyPag + '&lat=' + latitude + '&lng=' + longitude + searchFilter;
//lat=39.402738&lng=-0.403518
//alert(url);
app.core.ajax('GET', url, undefined, {
success : function(data) {//Success
var values = data.values;
//app.core.events = values;
if (values) {
var evento, rows = new Array(), annotations = new Array(), row, rowImg, rowLabel, valuesLength = values.length;
for (var i = 0; i < valuesLength; i++) {
evento = values[i];
//Timeline
row = app.core.createCustomRow(evento.title, evento.direction.complete, evento.image.s[1], {
title : evento.title,
qtyLikes : evento.qtyLikes,
id : evento.id,
image : evento.image,
direction : evento.direction,
lat : evento.lat,
lng : evento.lng
})
//Map
annotations.push(Titanium.Map.createAnnotation({
id : evento.id,
qtyLikes : evento.qtyLikes,
latitude : evento.lat,
longitude : evento.lng,
title : evento.title,
image : evento.image,
direction : evento.direction,
lat : evento.lat,
lng : evento.lng,
//subtitle : 'Cupertino, CA',//Meter tiempo "Hace 5 minutos"
pincolor : Titanium.Map.ANNOTATION_GREEN,
animate : true,
rightButton : evento.image.s[1]
}));
if (dataPosition == 'top') {//se mete al principio
app.view.timeline.tableView.insertRowBefore(0, row);
} else {//se guarda y se mete luego con todas
rows.push(row);
}
}
//Adding annotations to the map
app.view.map.mapView.addAnnotations(annotations);
if (dataPosition == 'bottom') {//Si hay que meterlo al final
//Adding show more events button
var showMoreButton = Ti.UI.createButton({
title : 'Show more events',
witdh : '100%',
top : 5
}), endRow = Ti.UI.createTableViewRow({
witdh : '100%',
loadMore : true
});
endRow.add(showMoreButton);
rows.push(endRow);
//Adding data
app.view.timeline.tableView.appendRow(rows);
//app.view.timeline.tableView.setData(rows);
//Update maxId
app.core.maxId = values[values.length - 1].id;
} else {
//Show notification of firts event
var newEvent = values[0];
app.core.statusbarNotification.add('New event near you', newEvent.title, 'New events');
//Update sinceId
app.core.sinceId = newEvent.id;
}
if (!app.core.eventsLoaded) {//Si es la primera vez que se llama la funcion
//Setting events loaded
app.core.eventsLoaded = true;
//Set first value to sinceId
app.core.sinceId = values[0].id;
//Set map loaction
app.controller.map.location = {
latitude : latitude,
longitude : longitude,
latitudeDelta : 0.5,
longitudeDelta : 0.5
};
app.view.map.mapView.setLocation(app.controller.map.location);
//Start service for updateEvents
app.core.service.start();
}
} else if (!app.core.eventsLoaded) {//Si no estaban cargados los eventos y no se recibe ningun evento
app.core.eventsLoaded = true;
var row = {
color : '#000',
textAlign : 'center',
title : "Don't have events, please change radius"
};
app.view.timeline.tableView.setData(row);
}
}
}, true);
});
}
app.core.resetEvents = function() {
app.core.eventsLoaded = false;
app.core.sinceId = 0;
app.core.maxId = undefined;
app.view.timeline.tableView.setData([]);
app.view.map.mapView.removeAllAnnotations();
}
app.core.capitalize = function(string) {//Capitalize the First letter
return string.charAt(0).toUpperCase() + string.slice(1);
}
app.core.AndroidMenuHandler = function() {
Ti.UI.createAlertDialog({
title : 'AndroidMenuHandler'
}).show();
}
app.core.facebookPost = function() {//Post feed in wall of current user
app.ui.loading.show();
Titanium.Facebook.requestWithGraphPath('me/feed', {
message : app.controller.eventDetail.eventTitle,
link : 'http://www.hotnowapp.com/share.php?id=' + app.controller.eventDetail.eventId
}, "POST", function() {
app.ui.loading.hide();
app.core.niceAlert('Event shared');
});
}
app.core.niceAlert = function(title) {
Ti.UI.createAlertDialog({
title : title
}).show();
}
app.core.createCustomRow = function(title, detail, image, rowConfig) {
if (rowConfig) {
rowConfig.className = 'Pic';
} else {
rowConfig = {};
}
rowConfig.backgroundSelectedColor = '#000';
var row = Titanium.UI.createTableViewRow(rowConfig);
//Image
rowImg = Titanium.UI.createImageView({
url : image,
height : 70,
width : 70,
left : 1
});
//Title
rowLabel = Titanium.UI.createLabel({
text : app.core.capitalize(title),
color : '#333',
font : {
fontSize : 16,
fontWeight : 'bold'
},
width : 'auto',
textAlign : 'left',
top : 2,
left : 80,
height : 20
});
//Detail
rowInfoLabel = Ti.UI.createLabel({
color : '#444',
textAlign : 'left',
text : detail,
font : {
fontSize : 14
},
left : | random_line_split |
|
resources.rs | use serde::de::Deserialize;
use serde_json;
use std::{collections::HashMap, str};
use economy::Commodity;
use entities::Faction;
use entities::PlanetEconomy;
/// Generic Resource trait to be implemented by all resource types which should
/// be loaded at compile time.
/// KEY must be unique to the specific resource (e.g the filename of the
/// resource).
pub trait Resource: Deserialize<'static> {
const KEY: &'static str;
}
lazy_static! {
// Load resources at compile time.
// TODO: Convert to resource at compile time to save resources.
static ref RESOURCES: HashMap<&'static str, &'static str> = {
let mut res = HashMap::new();
res.insert(
AstronomicalNamesResource::KEY,
include_str!("../res/astronomical_names.json"),
);
res.insert(
AgentResource::KEY,
include_str!("../res/economic_agents.json"),
);
res
};
}
/// Attempts to returns the resource with the given type, will return None
/// if the type has no resource or if the deserialization fails.
pub fn fetch_resource<T: Resource>() -> Option<T> {
let res_str = RESOURCES.get(T::KEY).unwrap();
match serde_json::from_str(res_str) {
Ok(res) => Some(res),
Err(msg) => {
error!("{}", msg);
None
}
}
}
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
/// Resource used in name generation of celestial objects.
pub struct | {
pub names: Vec<String>,
pub scientific_names: Vec<String>,
pub greek: Vec<String>,
pub roman: Vec<String>,
pub decorators: Vec<String>,
}
impl Resource for AstronomicalNamesResource {
const KEY: &'static str = "astronomical_names";
}
#[derive(Serialize, Deserialize, Debug)]
/// Resource containing all production/consumptions for factions and planets.
pub struct AgentResource {
pub faction_ideals: HashMap<Faction, HashMap<Commodity, u64>>,
pub faction_production: HashMap<Faction, HashMap<Commodity, u64>>,
pub planet_ideals: HashMap<PlanetEconomy, HashMap<Commodity, u64>>,
pub planet_production: HashMap<PlanetEconomy, HashMap<Commodity, u64>>,
}
impl Resource for AgentResource {
const KEY: &'static str = "economic_agents";
}
| AstronomicalNamesResource | identifier_name |
resources.rs | use serde::de::Deserialize;
use serde_json;
use std::{collections::HashMap, str};
use economy::Commodity;
use entities::Faction;
use entities::PlanetEconomy;
/// Generic Resource trait to be implemented by all resource types which should
/// be loaded at compile time.
/// KEY must be unique to the specific resource (e.g the filename of the
/// resource).
pub trait Resource: Deserialize<'static> {
const KEY: &'static str;
}
lazy_static! {
// Load resources at compile time.
// TODO: Convert to resource at compile time to save resources.
static ref RESOURCES: HashMap<&'static str, &'static str> = {
let mut res = HashMap::new();
res.insert(
AstronomicalNamesResource::KEY,
include_str!("../res/astronomical_names.json"),
);
res.insert(
AgentResource::KEY,
include_str!("../res/economic_agents.json"),
);
res
};
}
/// Attempts to returns the resource with the given type, will return None | /// if the type has no resource or if the deserialization fails.
pub fn fetch_resource<T: Resource>() -> Option<T> {
let res_str = RESOURCES.get(T::KEY).unwrap();
match serde_json::from_str(res_str) {
Ok(res) => Some(res),
Err(msg) => {
error!("{}", msg);
None
}
}
}
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
/// Resource used in name generation of celestial objects.
pub struct AstronomicalNamesResource {
pub names: Vec<String>,
pub scientific_names: Vec<String>,
pub greek: Vec<String>,
pub roman: Vec<String>,
pub decorators: Vec<String>,
}
impl Resource for AstronomicalNamesResource {
const KEY: &'static str = "astronomical_names";
}
#[derive(Serialize, Deserialize, Debug)]
/// Resource containing all production/consumptions for factions and planets.
pub struct AgentResource {
pub faction_ideals: HashMap<Faction, HashMap<Commodity, u64>>,
pub faction_production: HashMap<Faction, HashMap<Commodity, u64>>,
pub planet_ideals: HashMap<PlanetEconomy, HashMap<Commodity, u64>>,
pub planet_production: HashMap<PlanetEconomy, HashMap<Commodity, u64>>,
}
impl Resource for AgentResource {
const KEY: &'static str = "economic_agents";
} | random_line_split |
|
gen.py | #!/usr/bin/python
import json
from random import randint
INPUT = "Tweet.size1000page1cnt849.json"
OUTPUT = 'new.json'
objs = json.load(open(INPUT,'r'))
print len(objs)
# for k,v in objs[0].items():
# print "%s=\n\t%s | " % (str(k),str(v))
def | (obj):
import datetime
return obj.strftime("%Y-%m-%d %H:%M:%S") if isinstance(obj,datetime.datetime) else obj
se = []
for o in objs:
# se.append( {'x':o['created_at'],'y':randint(0,1000)} )
se.append([o['created_at'],randint(0,1000)])
di = {'name':'LA','series':se}
# print json.dumps(di)
# print type(di['series'][0][0])
f = open(OUTPUT,'w+')
f.write(json.dumps(di,default=date_handler))
f.close()
| date_handler | identifier_name |
gen.py | #!/usr/bin/python
import json
from random import randint
INPUT = "Tweet.size1000page1cnt849.json"
OUTPUT = 'new.json'
objs = json.load(open(INPUT,'r'))
print len(objs)
# for k,v in objs[0].items():
# print "%s=\n\t%s | " % (str(k),str(v))
def date_handler(obj):
|
se = []
for o in objs:
# se.append( {'x':o['created_at'],'y':randint(0,1000)} )
se.append([o['created_at'],randint(0,1000)])
di = {'name':'LA','series':se}
# print json.dumps(di)
# print type(di['series'][0][0])
f = open(OUTPUT,'w+')
f.write(json.dumps(di,default=date_handler))
f.close()
| import datetime
return obj.strftime("%Y-%m-%d %H:%M:%S") if isinstance(obj,datetime.datetime) else obj | identifier_body |
gen.py | #!/usr/bin/python
import json
from random import randint
INPUT = "Tweet.size1000page1cnt849.json"
OUTPUT = 'new.json'
objs = json.load(open(INPUT,'r'))
print len(objs)
# for k,v in objs[0].items():
# print "%s=\n\t%s | " % (str(k),str(v))
def date_handler(obj):
import datetime
return obj.strftime("%Y-%m-%d %H:%M:%S") if isinstance(obj,datetime.datetime) else obj
se = []
for o in objs: | # print json.dumps(di)
# print type(di['series'][0][0])
f = open(OUTPUT,'w+')
f.write(json.dumps(di,default=date_handler))
f.close() | # se.append( {'x':o['created_at'],'y':randint(0,1000)} )
se.append([o['created_at'],randint(0,1000)])
di = {'name':'LA','series':se} | random_line_split |
gen.py | #!/usr/bin/python
import json
from random import randint
INPUT = "Tweet.size1000page1cnt849.json"
OUTPUT = 'new.json'
objs = json.load(open(INPUT,'r'))
print len(objs)
# for k,v in objs[0].items():
# print "%s=\n\t%s | " % (str(k),str(v))
def date_handler(obj):
import datetime
return obj.strftime("%Y-%m-%d %H:%M:%S") if isinstance(obj,datetime.datetime) else obj
se = []
for o in objs:
# se.append( {'x':o['created_at'],'y':randint(0,1000)} )
|
di = {'name':'LA','series':se}
# print json.dumps(di)
# print type(di['series'][0][0])
f = open(OUTPUT,'w+')
f.write(json.dumps(di,default=date_handler))
f.close()
| se.append([o['created_at'],randint(0,1000)]) | conditional_block |
node.py | # Copyright (c) 2013 Red Hat, Inc.
# Author: William Benton ([email protected])
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from proxy import Proxy, proxied_attr
from proxy import proxied_attr_get as pag, proxied_attr_set as pas, proxied_attr_getset as pags
from arc_utils import arcmethod, uniq
from singleton import v as store_singleton
import errors
from errors import not_implemented, fail
from constants import PARTITION_GROUP, LABEL_SENTINEL_PARAM, LABEL_SENTINEL_PARAM_ATTR
from datetime import datetime
import calendar
import urllib
def ts():
now = datetime.utcnow()
return (calendar.timegm(now.utctimetuple()) * 1000000) + now.microsecond
class node(Proxy):
name = property(pag("name"))
memberships = property(*pags("memberships"))
identity_group = property(lambda self : self.cm.make_proxy_object("group", self.attr_vals["identity_group"], refresh=True))
provisioned = property(*pags("provisioned"))
last_updated_version = property(pag("last_updated_version"))
modifyMemberships = arcmethod(pag("memberships"), pas("memberships"), heterogeneous=True, preserve_order=True)
def getConfig(self, **options):
if options.has_key("version"):
|
return self.cm.fetch_json_resource("/config/node/%s" % urllib.quote_plus(self.name))
def makeProvisioned(self):
self.provisioned = True
self.update()
def explain(self):
not_implemented()
def checkin(self):
metapath = "/meta/node/%s" % self.name
# now = datetime.utcnow().isoformat()
now = ts()
meta = self.cm.fetch_json_resource(metapath, False, default={})
meta["last-checkin"] = now
self.cm.put_json_resource(metapath, meta, False)
return now
def last_checkin(self):
metapath = "/meta/node/%s" % self.name
meta = self.cm.fetch_json_resource(metapath, False, default={})
return meta.has_key("last-checkin") and meta["last-checkin"] or 0
def whatChanged(self, old, new):
oc = self.cm.fetch_json_resource("/config/node/%s" % urllib.quote_plus(self.name), {"commit":old}, {})
nc = self.cm.fetch_json_resource("/config/node/%s" % urllib.quote_plus(self.name), {"commit":new}, {})
ock = set(oc)
nck = set(nc)
params = set([p for p in (ock | nck) if p not in ock or p not in nck or oc[p] != nc[p]]) - set(["WALLABY_CONFIG_VERSION"])
mc_params = set([p for p in params if store_singleton().getParam(p).must_change])
subsystems = [store_singleton().getSubsys(sub) for sub in self.cm.list_objects("subsystem")]
restart, reconfig = [], []
for ss in subsystems:
ss.refresh
ssp = set(ss.parameters)
if ssp.intersection(mc_params):
restart.append(ss.name)
elif ssp.intersection(params):
reconfig.append(ss.name)
return [list(params), restart, reconfig]
# labeling support below
def getLabels(self):
memberships = self.memberships
if not PARTITION_GROUP in memberships:
return []
else:
partition = memberships.index(PARTITION_GROUP)
return memberships[partition+1:]
labels=property(getLabels)
def modifyLabels(self, op, labels, **options):
thestore = store_singleton()
memberships = self.memberships
current_labels = self.getLabels()
label_set = set(current_labels + [PARTITION_GROUP])
new_labels = []
if op == "ADD":
new_labels = current_labels + labels
pass
elif op == "REPLACE":
new_labels = labels
pass
elif op == "REMOVE":
new_labels = [label for label in current_labels if label not in labels]
else:
raise NotImplementedError("modifyLabels: operation " + op + " not understood")
just_memberships = [grp for grp in memberships if grp not in label_set]
new_memberships = uniq(just_memberships + [PARTITION_GROUP] + new_labels)
if "ensure_partition_group" in options and options["ensure_partition_group"] is not False:
if thestore is None:
raise RuntimeError("store singleton must be initialized before using the ensure_partition_group option")
thestore.getPartitionGroup()
if "create_missing_labels" in options and options["create_missing_labels"] is not False:
if thestore is None:
raise RuntimeError("store singleton must be initialized before using the create_missing_labels option")
for missing_label in thestore.checkGroupValidity(new_labels):
thestore.addLabel(missing_label)
return self.modifyMemberships("REPLACE", new_memberships, {})
proxied_attr(node, "name")
proxied_attr(node, "memberships")
proxied_attr(node, "identity_group")
proxied_attr(node, "provisioned") | return self.cm.fetch_json_resource("/config/node/%s" % urllib.quote_plus(self.name), {"commit":options["version"]}, {}) | conditional_block |
node.py | # Copyright (c) 2013 Red Hat, Inc.
# Author: William Benton ([email protected])
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from proxy import Proxy, proxied_attr
from proxy import proxied_attr_get as pag, proxied_attr_set as pas, proxied_attr_getset as pags
from arc_utils import arcmethod, uniq
from singleton import v as store_singleton
import errors
from errors import not_implemented, fail
from constants import PARTITION_GROUP, LABEL_SENTINEL_PARAM, LABEL_SENTINEL_PARAM_ATTR
from datetime import datetime
import calendar
import urllib
def ts():
now = datetime.utcnow()
return (calendar.timegm(now.utctimetuple()) * 1000000) + now.microsecond
class node(Proxy):
name = property(pag("name"))
memberships = property(*pags("memberships"))
identity_group = property(lambda self : self.cm.make_proxy_object("group", self.attr_vals["identity_group"], refresh=True))
provisioned = property(*pags("provisioned"))
last_updated_version = property(pag("last_updated_version"))
modifyMemberships = arcmethod(pag("memberships"), pas("memberships"), heterogeneous=True, preserve_order=True)
def getConfig(self, **options):
|
def makeProvisioned(self):
self.provisioned = True
self.update()
def explain(self):
not_implemented()
def checkin(self):
metapath = "/meta/node/%s" % self.name
# now = datetime.utcnow().isoformat()
now = ts()
meta = self.cm.fetch_json_resource(metapath, False, default={})
meta["last-checkin"] = now
self.cm.put_json_resource(metapath, meta, False)
return now
def last_checkin(self):
metapath = "/meta/node/%s" % self.name
meta = self.cm.fetch_json_resource(metapath, False, default={})
return meta.has_key("last-checkin") and meta["last-checkin"] or 0
def whatChanged(self, old, new):
oc = self.cm.fetch_json_resource("/config/node/%s" % urllib.quote_plus(self.name), {"commit":old}, {})
nc = self.cm.fetch_json_resource("/config/node/%s" % urllib.quote_plus(self.name), {"commit":new}, {})
ock = set(oc)
nck = set(nc)
params = set([p for p in (ock | nck) if p not in ock or p not in nck or oc[p] != nc[p]]) - set(["WALLABY_CONFIG_VERSION"])
mc_params = set([p for p in params if store_singleton().getParam(p).must_change])
subsystems = [store_singleton().getSubsys(sub) for sub in self.cm.list_objects("subsystem")]
restart, reconfig = [], []
for ss in subsystems:
ss.refresh
ssp = set(ss.parameters)
if ssp.intersection(mc_params):
restart.append(ss.name)
elif ssp.intersection(params):
reconfig.append(ss.name)
return [list(params), restart, reconfig]
# labeling support below
def getLabels(self):
memberships = self.memberships
if not PARTITION_GROUP in memberships:
return []
else:
partition = memberships.index(PARTITION_GROUP)
return memberships[partition+1:]
labels=property(getLabels)
def modifyLabels(self, op, labels, **options):
thestore = store_singleton()
memberships = self.memberships
current_labels = self.getLabels()
label_set = set(current_labels + [PARTITION_GROUP])
new_labels = []
if op == "ADD":
new_labels = current_labels + labels
pass
elif op == "REPLACE":
new_labels = labels
pass
elif op == "REMOVE":
new_labels = [label for label in current_labels if label not in labels]
else:
raise NotImplementedError("modifyLabels: operation " + op + " not understood")
just_memberships = [grp for grp in memberships if grp not in label_set]
new_memberships = uniq(just_memberships + [PARTITION_GROUP] + new_labels)
if "ensure_partition_group" in options and options["ensure_partition_group"] is not False:
if thestore is None:
raise RuntimeError("store singleton must be initialized before using the ensure_partition_group option")
thestore.getPartitionGroup()
if "create_missing_labels" in options and options["create_missing_labels"] is not False:
if thestore is None:
raise RuntimeError("store singleton must be initialized before using the create_missing_labels option")
for missing_label in thestore.checkGroupValidity(new_labels):
thestore.addLabel(missing_label)
return self.modifyMemberships("REPLACE", new_memberships, {})
proxied_attr(node, "name")
proxied_attr(node, "memberships")
proxied_attr(node, "identity_group")
proxied_attr(node, "provisioned") | if options.has_key("version"):
return self.cm.fetch_json_resource("/config/node/%s" % urllib.quote_plus(self.name), {"commit":options["version"]}, {})
return self.cm.fetch_json_resource("/config/node/%s" % urllib.quote_plus(self.name)) | identifier_body |
node.py | # Copyright (c) 2013 Red Hat, Inc.
# Author: William Benton ([email protected])
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from proxy import Proxy, proxied_attr
from proxy import proxied_attr_get as pag, proxied_attr_set as pas, proxied_attr_getset as pags
from arc_utils import arcmethod, uniq
from singleton import v as store_singleton
import errors
from errors import not_implemented, fail
from constants import PARTITION_GROUP, LABEL_SENTINEL_PARAM, LABEL_SENTINEL_PARAM_ATTR
from datetime import datetime
import calendar
import urllib
def ts():
now = datetime.utcnow()
return (calendar.timegm(now.utctimetuple()) * 1000000) + now.microsecond
class node(Proxy):
name = property(pag("name"))
memberships = property(*pags("memberships"))
identity_group = property(lambda self : self.cm.make_proxy_object("group", self.attr_vals["identity_group"], refresh=True))
provisioned = property(*pags("provisioned"))
last_updated_version = property(pag("last_updated_version"))
modifyMemberships = arcmethod(pag("memberships"), pas("memberships"), heterogeneous=True, preserve_order=True)
def getConfig(self, **options):
if options.has_key("version"):
return self.cm.fetch_json_resource("/config/node/%s" % urllib.quote_plus(self.name), {"commit":options["version"]}, {})
return self.cm.fetch_json_resource("/config/node/%s" % urllib.quote_plus(self.name))
def makeProvisioned(self):
self.provisioned = True
self.update()
def explain(self):
not_implemented()
def checkin(self):
metapath = "/meta/node/%s" % self.name
# now = datetime.utcnow().isoformat()
now = ts()
meta = self.cm.fetch_json_resource(metapath, False, default={})
meta["last-checkin"] = now
self.cm.put_json_resource(metapath, meta, False)
return now
def last_checkin(self): | return meta.has_key("last-checkin") and meta["last-checkin"] or 0
def whatChanged(self, old, new):
oc = self.cm.fetch_json_resource("/config/node/%s" % urllib.quote_plus(self.name), {"commit":old}, {})
nc = self.cm.fetch_json_resource("/config/node/%s" % urllib.quote_plus(self.name), {"commit":new}, {})
ock = set(oc)
nck = set(nc)
params = set([p for p in (ock | nck) if p not in ock or p not in nck or oc[p] != nc[p]]) - set(["WALLABY_CONFIG_VERSION"])
mc_params = set([p for p in params if store_singleton().getParam(p).must_change])
subsystems = [store_singleton().getSubsys(sub) for sub in self.cm.list_objects("subsystem")]
restart, reconfig = [], []
for ss in subsystems:
ss.refresh
ssp = set(ss.parameters)
if ssp.intersection(mc_params):
restart.append(ss.name)
elif ssp.intersection(params):
reconfig.append(ss.name)
return [list(params), restart, reconfig]
# labeling support below
def getLabels(self):
memberships = self.memberships
if not PARTITION_GROUP in memberships:
return []
else:
partition = memberships.index(PARTITION_GROUP)
return memberships[partition+1:]
labels=property(getLabels)
def modifyLabels(self, op, labels, **options):
thestore = store_singleton()
memberships = self.memberships
current_labels = self.getLabels()
label_set = set(current_labels + [PARTITION_GROUP])
new_labels = []
if op == "ADD":
new_labels = current_labels + labels
pass
elif op == "REPLACE":
new_labels = labels
pass
elif op == "REMOVE":
new_labels = [label for label in current_labels if label not in labels]
else:
raise NotImplementedError("modifyLabels: operation " + op + " not understood")
just_memberships = [grp for grp in memberships if grp not in label_set]
new_memberships = uniq(just_memberships + [PARTITION_GROUP] + new_labels)
if "ensure_partition_group" in options and options["ensure_partition_group"] is not False:
if thestore is None:
raise RuntimeError("store singleton must be initialized before using the ensure_partition_group option")
thestore.getPartitionGroup()
if "create_missing_labels" in options and options["create_missing_labels"] is not False:
if thestore is None:
raise RuntimeError("store singleton must be initialized before using the create_missing_labels option")
for missing_label in thestore.checkGroupValidity(new_labels):
thestore.addLabel(missing_label)
return self.modifyMemberships("REPLACE", new_memberships, {})
proxied_attr(node, "name")
proxied_attr(node, "memberships")
proxied_attr(node, "identity_group")
proxied_attr(node, "provisioned") | metapath = "/meta/node/%s" % self.name
meta = self.cm.fetch_json_resource(metapath, False, default={}) | random_line_split |
node.py | # Copyright (c) 2013 Red Hat, Inc.
# Author: William Benton ([email protected])
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from proxy import Proxy, proxied_attr
from proxy import proxied_attr_get as pag, proxied_attr_set as pas, proxied_attr_getset as pags
from arc_utils import arcmethod, uniq
from singleton import v as store_singleton
import errors
from errors import not_implemented, fail
from constants import PARTITION_GROUP, LABEL_SENTINEL_PARAM, LABEL_SENTINEL_PARAM_ATTR
from datetime import datetime
import calendar
import urllib
def ts():
now = datetime.utcnow()
return (calendar.timegm(now.utctimetuple()) * 1000000) + now.microsecond
class node(Proxy):
name = property(pag("name"))
memberships = property(*pags("memberships"))
identity_group = property(lambda self : self.cm.make_proxy_object("group", self.attr_vals["identity_group"], refresh=True))
provisioned = property(*pags("provisioned"))
last_updated_version = property(pag("last_updated_version"))
modifyMemberships = arcmethod(pag("memberships"), pas("memberships"), heterogeneous=True, preserve_order=True)
def getConfig(self, **options):
if options.has_key("version"):
return self.cm.fetch_json_resource("/config/node/%s" % urllib.quote_plus(self.name), {"commit":options["version"]}, {})
return self.cm.fetch_json_resource("/config/node/%s" % urllib.quote_plus(self.name))
def makeProvisioned(self):
self.provisioned = True
self.update()
def explain(self):
not_implemented()
def checkin(self):
metapath = "/meta/node/%s" % self.name
# now = datetime.utcnow().isoformat()
now = ts()
meta = self.cm.fetch_json_resource(metapath, False, default={})
meta["last-checkin"] = now
self.cm.put_json_resource(metapath, meta, False)
return now
def last_checkin(self):
metapath = "/meta/node/%s" % self.name
meta = self.cm.fetch_json_resource(metapath, False, default={})
return meta.has_key("last-checkin") and meta["last-checkin"] or 0
def whatChanged(self, old, new):
oc = self.cm.fetch_json_resource("/config/node/%s" % urllib.quote_plus(self.name), {"commit":old}, {})
nc = self.cm.fetch_json_resource("/config/node/%s" % urllib.quote_plus(self.name), {"commit":new}, {})
ock = set(oc)
nck = set(nc)
params = set([p for p in (ock | nck) if p not in ock or p not in nck or oc[p] != nc[p]]) - set(["WALLABY_CONFIG_VERSION"])
mc_params = set([p for p in params if store_singleton().getParam(p).must_change])
subsystems = [store_singleton().getSubsys(sub) for sub in self.cm.list_objects("subsystem")]
restart, reconfig = [], []
for ss in subsystems:
ss.refresh
ssp = set(ss.parameters)
if ssp.intersection(mc_params):
restart.append(ss.name)
elif ssp.intersection(params):
reconfig.append(ss.name)
return [list(params), restart, reconfig]
# labeling support below
def | (self):
memberships = self.memberships
if not PARTITION_GROUP in memberships:
return []
else:
partition = memberships.index(PARTITION_GROUP)
return memberships[partition+1:]
labels=property(getLabels)
def modifyLabels(self, op, labels, **options):
thestore = store_singleton()
memberships = self.memberships
current_labels = self.getLabels()
label_set = set(current_labels + [PARTITION_GROUP])
new_labels = []
if op == "ADD":
new_labels = current_labels + labels
pass
elif op == "REPLACE":
new_labels = labels
pass
elif op == "REMOVE":
new_labels = [label for label in current_labels if label not in labels]
else:
raise NotImplementedError("modifyLabels: operation " + op + " not understood")
just_memberships = [grp for grp in memberships if grp not in label_set]
new_memberships = uniq(just_memberships + [PARTITION_GROUP] + new_labels)
if "ensure_partition_group" in options and options["ensure_partition_group"] is not False:
if thestore is None:
raise RuntimeError("store singleton must be initialized before using the ensure_partition_group option")
thestore.getPartitionGroup()
if "create_missing_labels" in options and options["create_missing_labels"] is not False:
if thestore is None:
raise RuntimeError("store singleton must be initialized before using the create_missing_labels option")
for missing_label in thestore.checkGroupValidity(new_labels):
thestore.addLabel(missing_label)
return self.modifyMemberships("REPLACE", new_memberships, {})
proxied_attr(node, "name")
proxied_attr(node, "memberships")
proxied_attr(node, "identity_group")
proxied_attr(node, "provisioned") | getLabels | identifier_name |
previous.rs | use std::path::Path;
use serde::{
Deserialize,
Serialize,
};
use anyhow::Result;
use rnc_core::grouper;
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq)]
pub struct | {
pub id: usize,
pub urs_id: usize,
pub urs_taxid: String,
upi: String,
taxid: usize,
databases: Option<String>,
description: Option<String>,
has_coordinates: Option<bool>,
is_active: Option<bool>,
last_release: Option<usize>,
rna_type: Option<String>,
short_description: Option<String>,
so_rna_type: Option<String>,
}
impl grouper::HasIndex for Previous {
fn index(&self) -> usize {
self.id
}
}
pub fn group(path: &Path, max: usize, output: &Path) -> Result<()> {
grouper::group::<Previous>(grouper::Criteria::ZeroOrOne, &path, 1, max, &output)
}
| Previous | identifier_name |
previous.rs | use std::path::Path;
use serde::{
Deserialize,
Serialize,
};
use anyhow::Result;
use rnc_core::grouper;
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq)]
pub struct Previous {
pub id: usize,
pub urs_id: usize,
pub urs_taxid: String,
upi: String,
taxid: usize,
databases: Option<String>,
description: Option<String>,
has_coordinates: Option<bool>,
is_active: Option<bool>,
last_release: Option<usize>,
rna_type: Option<String>,
short_description: Option<String>,
so_rna_type: Option<String>,
}
impl grouper::HasIndex for Previous {
fn index(&self) -> usize |
}
pub fn group(path: &Path, max: usize, output: &Path) -> Result<()> {
grouper::group::<Previous>(grouper::Criteria::ZeroOrOne, &path, 1, max, &output)
}
| {
self.id
} | identifier_body |
previous.rs | use std::path::Path;
use serde::{
Deserialize,
Serialize,
};
use anyhow::Result;
use rnc_core::grouper;
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq)]
pub struct Previous {
pub id: usize,
pub urs_id: usize,
pub urs_taxid: String,
upi: String,
taxid: usize,
databases: Option<String>,
description: Option<String>,
has_coordinates: Option<bool>,
is_active: Option<bool>, | }
impl grouper::HasIndex for Previous {
fn index(&self) -> usize {
self.id
}
}
pub fn group(path: &Path, max: usize, output: &Path) -> Result<()> {
grouper::group::<Previous>(grouper::Criteria::ZeroOrOne, &path, 1, max, &output)
} | last_release: Option<usize>,
rna_type: Option<String>,
short_description: Option<String>,
so_rna_type: Option<String>, | random_line_split |
tomorrow-night-bright.js | 'use strict';
module.exports = {
colors: {
black: '#000000',
red: '#D54E53',
green: '#B9CA4A',
yellow: '#E7C547',
blue: '#7AA6DA',
magenta: '#C397D8',
cyan: '#70C0B1',
white: '#EAEAEA',
lightBlack: '#969896',
lightRed: '#D54E53',
lightGreen: '#B9CA4A',
lightYellow: '#E7C547',
lightBlue: '#7AA6DA',
lightMagenta: '#C397D8',
lightCyan: '#70C0B1',
lightWhite: '#EAEAEA',
},
// Default
backgroundColor: '#000000',
foregroundColor: '#EAEAEA',
cursorColor: '#EAEAEA',
borderColor: '#171717',
// Accent color
accentColor: '#7AA6DA', | }; |
// Other
tabTitleColor: 'rgba(255, 255, 255, 0.2)',
selectedTabTitleColor: '#EAEAEA', | random_line_split |
parse.py | import sys, math
from test import goertzel
import wave
import pyaudio
import Queue
import numpy as np
if len(sys.argv) < 2:
print "Usage: %s <filename> " % sys.argv[0]
sys.exit(1)
filename = sys.argv[1]
w = wave.open(filename)
fs = w.getframerate()
width = w.getsampwidth()
chunkDuration = .2 #.2 second chunks
chunk = int(chunkDuration*fs)
window = np.blackman(chunk)
p = pyaudio.PyAudio()
stream = p.open(format = p.get_format_from_width(w.getsampwidth()), channels = w.getnchannels(),rate = fs, output=True)
#read .2 second chunk
data = w.readframes(chunk)
chunk_data = []
#find the frequencies of each chunk
print "Running calculations on wav file"
num = 0
while data != '':
print "Calculating Chunk " + str(num)
stream.write(data)
indata = np.array(wave.struct.unpack("%dh"%(len(data)/width),\
data))
freqs , results = goertzel(indata,fs, (1036,1058), (1567,1569), (2082,2104))
chunk_data.append((freqs,results))
data = w.readframes(chunk)
num+=.2
stream.close()
p.terminate()
#finished getting data from chunks, now to parse the data
hi = []
lo = []
mid = []
#average first second of audio to get frequency baselines
for i in range (5):
a = chunk_data[i][0]
b = chunk_data[i][1]
for j in range(len(a)):
if a[j] > 1700:
hi.append(b[j])
elif a[j] < 1300:
lo.append(b[j])
else:
mid.append(b[j])
hi_average = sum(hi)/float(len(hi))
lo_average = sum(lo)/float(len(lo))
mid_average = sum(mid)/float(len(mid))
"""
Determine the frequency in each .2 second chunk that has the highest amplitude increase from its average, then determine the frequency
of that second of data by the median frequency of its 5 chunks
"""
#looks for start signal in last 3 seconds of audio
def signal_found(arr):
|
#gets freq of 1 second of audio
def get_freq(arr):
lo_count = 0
hi_count = 0
mid_count = 0
for i in arr:
if i=="lo":
lo_count+=1
if i=="hi":
hi_count+=1
if i=="mid":
mid_count+=1
if mid_count > hi_count and mid_count > lo_count:
return 2
if lo_count>hi_count:
return 0
else:
return 1
start = False
freq_list = []
offset = 0
bits = []
for i in range(5,len(chunk_data)):
a = chunk_data[i][0]
b = chunk_data[i][1]
hi_amp = []
lo_amp = []
mid_amp = []
#get averages for each freq
for j in range(len(a)):
if a[j] > 1700:
hi_amp.append(b[j])
elif a[j] < 1300:
lo_amp.append(b[j])
else:
mid_amp.append(b[j])
hi_av = sum(hi_amp)/float(len(hi_amp))
lo_av = sum(lo_amp)/float(len(lo_amp))
mid_av = sum(mid_amp)/float(len(mid_amp))
#get freq of this chunk
diff = [lo_av-lo_average,mid_av-mid_average,hi_av-hi_average]
index = diff.index(max(diff))
if(index==0):
freq_list.append("lo")
if(index==1):
freq_list.append("mid")
if(index==2):
freq_list.append("hi")
print(freq_list[len(freq_list)-1])
if len(freq_list) > 5:
if start:
if len(freq_list)%5 == offset:
bit = get_freq(freq_list[-5:])
if bit != 2:
bits.append(bit)
else:
print "Stop Signal Detected"
break
elif len(freq_list) >= 15:
if signal_found(freq_list):
print "signal found"
start = True
offset = len(freq_list)%5
print bits
| lst = arr[-15:]
first = 0
second = 0
third = 0
for i in range(0,5):
if lst[i]=="mid":
first += 1
for i in range(5,10):
if lst[i]=="mid":
second += 1
for i in range(10,15):
if lst[i]=="mid":
third += 1
if first >= 5 and second >= 5 and third >= 5:
return True
else:
return False | identifier_body |
parse.py | import sys, math
from test import goertzel
import wave
import pyaudio
import Queue
import numpy as np
if len(sys.argv) < 2:
print "Usage: %s <filename> " % sys.argv[0]
sys.exit(1)
filename = sys.argv[1]
w = wave.open(filename)
fs = w.getframerate()
width = w.getsampwidth()
chunkDuration = .2 #.2 second chunks
chunk = int(chunkDuration*fs)
window = np.blackman(chunk)
p = pyaudio.PyAudio()
stream = p.open(format = p.get_format_from_width(w.getsampwidth()), channels = w.getnchannels(),rate = fs, output=True)
#read .2 second chunk
data = w.readframes(chunk)
chunk_data = []
#find the frequencies of each chunk
print "Running calculations on wav file"
num = 0
while data != '':
print "Calculating Chunk " + str(num)
stream.write(data)
indata = np.array(wave.struct.unpack("%dh"%(len(data)/width),\
data))
freqs , results = goertzel(indata,fs, (1036,1058), (1567,1569), (2082,2104))
chunk_data.append((freqs,results))
data = w.readframes(chunk)
num+=.2
stream.close()
p.terminate()
#finished getting data from chunks, now to parse the data
hi = []
lo = []
mid = []
#average first second of audio to get frequency baselines
for i in range (5):
a = chunk_data[i][0]
b = chunk_data[i][1]
for j in range(len(a)):
if a[j] > 1700:
hi.append(b[j])
elif a[j] < 1300:
lo.append(b[j])
else:
mid.append(b[j])
hi_average = sum(hi)/float(len(hi))
lo_average = sum(lo)/float(len(lo))
mid_average = sum(mid)/float(len(mid))
"""
Determine the frequency in each .2 second chunk that has the highest amplitude increase from its average, then determine the frequency
of that second of data by the median frequency of its 5 chunks
"""
#looks for start signal in last 3 seconds of audio
def signal_found(arr):
lst = arr[-15:]
first = 0
second = 0
third = 0
for i in range(0,5):
if lst[i]=="mid":
first += 1
for i in range(5,10):
if lst[i]=="mid":
second += 1
for i in range(10,15):
if lst[i]=="mid":
third += 1
if first >= 5 and second >= 5 and third >= 5:
return True
else:
return False
#gets freq of 1 second of audio
def | (arr):
lo_count = 0
hi_count = 0
mid_count = 0
for i in arr:
if i=="lo":
lo_count+=1
if i=="hi":
hi_count+=1
if i=="mid":
mid_count+=1
if mid_count > hi_count and mid_count > lo_count:
return 2
if lo_count>hi_count:
return 0
else:
return 1
start = False
freq_list = []
offset = 0
bits = []
for i in range(5,len(chunk_data)):
a = chunk_data[i][0]
b = chunk_data[i][1]
hi_amp = []
lo_amp = []
mid_amp = []
#get averages for each freq
for j in range(len(a)):
if a[j] > 1700:
hi_amp.append(b[j])
elif a[j] < 1300:
lo_amp.append(b[j])
else:
mid_amp.append(b[j])
hi_av = sum(hi_amp)/float(len(hi_amp))
lo_av = sum(lo_amp)/float(len(lo_amp))
mid_av = sum(mid_amp)/float(len(mid_amp))
#get freq of this chunk
diff = [lo_av-lo_average,mid_av-mid_average,hi_av-hi_average]
index = diff.index(max(diff))
if(index==0):
freq_list.append("lo")
if(index==1):
freq_list.append("mid")
if(index==2):
freq_list.append("hi")
print(freq_list[len(freq_list)-1])
if len(freq_list) > 5:
if start:
if len(freq_list)%5 == offset:
bit = get_freq(freq_list[-5:])
if bit != 2:
bits.append(bit)
else:
print "Stop Signal Detected"
break
elif len(freq_list) >= 15:
if signal_found(freq_list):
print "signal found"
start = True
offset = len(freq_list)%5
print bits
| get_freq | identifier_name |
parse.py | import sys, math
from test import goertzel
import wave
import pyaudio
import Queue
import numpy as np
if len(sys.argv) < 2:
print "Usage: %s <filename> " % sys.argv[0]
sys.exit(1)
filename = sys.argv[1]
w = wave.open(filename)
fs = w.getframerate()
width = w.getsampwidth()
chunkDuration = .2 #.2 second chunks
chunk = int(chunkDuration*fs)
window = np.blackman(chunk)
p = pyaudio.PyAudio() | stream = p.open(format = p.get_format_from_width(w.getsampwidth()), channels = w.getnchannels(),rate = fs, output=True)
#read .2 second chunk
data = w.readframes(chunk)
chunk_data = []
#find the frequencies of each chunk
print "Running calculations on wav file"
num = 0
while data != '':
print "Calculating Chunk " + str(num)
stream.write(data)
indata = np.array(wave.struct.unpack("%dh"%(len(data)/width),\
data))
freqs , results = goertzel(indata,fs, (1036,1058), (1567,1569), (2082,2104))
chunk_data.append((freqs,results))
data = w.readframes(chunk)
num+=.2
stream.close()
p.terminate()
#finished getting data from chunks, now to parse the data
hi = []
lo = []
mid = []
#average first second of audio to get frequency baselines
for i in range (5):
a = chunk_data[i][0]
b = chunk_data[i][1]
for j in range(len(a)):
if a[j] > 1700:
hi.append(b[j])
elif a[j] < 1300:
lo.append(b[j])
else:
mid.append(b[j])
hi_average = sum(hi)/float(len(hi))
lo_average = sum(lo)/float(len(lo))
mid_average = sum(mid)/float(len(mid))
"""
Determine the frequency in each .2 second chunk that has the highest amplitude increase from its average, then determine the frequency
of that second of data by the median frequency of its 5 chunks
"""
#looks for start signal in last 3 seconds of audio
def signal_found(arr):
lst = arr[-15:]
first = 0
second = 0
third = 0
for i in range(0,5):
if lst[i]=="mid":
first += 1
for i in range(5,10):
if lst[i]=="mid":
second += 1
for i in range(10,15):
if lst[i]=="mid":
third += 1
if first >= 5 and second >= 5 and third >= 5:
return True
else:
return False
#gets freq of 1 second of audio
def get_freq(arr):
lo_count = 0
hi_count = 0
mid_count = 0
for i in arr:
if i=="lo":
lo_count+=1
if i=="hi":
hi_count+=1
if i=="mid":
mid_count+=1
if mid_count > hi_count and mid_count > lo_count:
return 2
if lo_count>hi_count:
return 0
else:
return 1
start = False
freq_list = []
offset = 0
bits = []
for i in range(5,len(chunk_data)):
a = chunk_data[i][0]
b = chunk_data[i][1]
hi_amp = []
lo_amp = []
mid_amp = []
#get averages for each freq
for j in range(len(a)):
if a[j] > 1700:
hi_amp.append(b[j])
elif a[j] < 1300:
lo_amp.append(b[j])
else:
mid_amp.append(b[j])
hi_av = sum(hi_amp)/float(len(hi_amp))
lo_av = sum(lo_amp)/float(len(lo_amp))
mid_av = sum(mid_amp)/float(len(mid_amp))
#get freq of this chunk
diff = [lo_av-lo_average,mid_av-mid_average,hi_av-hi_average]
index = diff.index(max(diff))
if(index==0):
freq_list.append("lo")
if(index==1):
freq_list.append("mid")
if(index==2):
freq_list.append("hi")
print(freq_list[len(freq_list)-1])
if len(freq_list) > 5:
if start:
if len(freq_list)%5 == offset:
bit = get_freq(freq_list[-5:])
if bit != 2:
bits.append(bit)
else:
print "Stop Signal Detected"
break
elif len(freq_list) >= 15:
if signal_found(freq_list):
print "signal found"
start = True
offset = len(freq_list)%5
print bits | random_line_split |
|
parse.py | import sys, math
from test import goertzel
import wave
import pyaudio
import Queue
import numpy as np
if len(sys.argv) < 2:
print "Usage: %s <filename> " % sys.argv[0]
sys.exit(1)
filename = sys.argv[1]
w = wave.open(filename)
fs = w.getframerate()
width = w.getsampwidth()
chunkDuration = .2 #.2 second chunks
chunk = int(chunkDuration*fs)
window = np.blackman(chunk)
p = pyaudio.PyAudio()
stream = p.open(format = p.get_format_from_width(w.getsampwidth()), channels = w.getnchannels(),rate = fs, output=True)
#read .2 second chunk
data = w.readframes(chunk)
chunk_data = []
#find the frequencies of each chunk
print "Running calculations on wav file"
num = 0
while data != '':
print "Calculating Chunk " + str(num)
stream.write(data)
indata = np.array(wave.struct.unpack("%dh"%(len(data)/width),\
data))
freqs , results = goertzel(indata,fs, (1036,1058), (1567,1569), (2082,2104))
chunk_data.append((freqs,results))
data = w.readframes(chunk)
num+=.2
stream.close()
p.terminate()
#finished getting data from chunks, now to parse the data
hi = []
lo = []
mid = []
#average first second of audio to get frequency baselines
for i in range (5):
a = chunk_data[i][0]
b = chunk_data[i][1]
for j in range(len(a)):
if a[j] > 1700:
hi.append(b[j])
elif a[j] < 1300:
lo.append(b[j])
else:
mid.append(b[j])
hi_average = sum(hi)/float(len(hi))
lo_average = sum(lo)/float(len(lo))
mid_average = sum(mid)/float(len(mid))
"""
Determine the frequency in each .2 second chunk that has the highest amplitude increase from its average, then determine the frequency
of that second of data by the median frequency of its 5 chunks
"""
#looks for start signal in last 3 seconds of audio
def signal_found(arr):
lst = arr[-15:]
first = 0
second = 0
third = 0
for i in range(0,5):
if lst[i]=="mid":
first += 1
for i in range(5,10):
if lst[i]=="mid":
second += 1
for i in range(10,15):
if lst[i]=="mid":
third += 1
if first >= 5 and second >= 5 and third >= 5:
return True
else:
return False
#gets freq of 1 second of audio
def get_freq(arr):
lo_count = 0
hi_count = 0
mid_count = 0
for i in arr:
if i=="lo":
lo_count+=1
if i=="hi":
hi_count+=1
if i=="mid":
|
if mid_count > hi_count and mid_count > lo_count:
return 2
if lo_count>hi_count:
return 0
else:
return 1
start = False
freq_list = []
offset = 0
bits = []
for i in range(5,len(chunk_data)):
a = chunk_data[i][0]
b = chunk_data[i][1]
hi_amp = []
lo_amp = []
mid_amp = []
#get averages for each freq
for j in range(len(a)):
if a[j] > 1700:
hi_amp.append(b[j])
elif a[j] < 1300:
lo_amp.append(b[j])
else:
mid_amp.append(b[j])
hi_av = sum(hi_amp)/float(len(hi_amp))
lo_av = sum(lo_amp)/float(len(lo_amp))
mid_av = sum(mid_amp)/float(len(mid_amp))
#get freq of this chunk
diff = [lo_av-lo_average,mid_av-mid_average,hi_av-hi_average]
index = diff.index(max(diff))
if(index==0):
freq_list.append("lo")
if(index==1):
freq_list.append("mid")
if(index==2):
freq_list.append("hi")
print(freq_list[len(freq_list)-1])
if len(freq_list) > 5:
if start:
if len(freq_list)%5 == offset:
bit = get_freq(freq_list[-5:])
if bit != 2:
bits.append(bit)
else:
print "Stop Signal Detected"
break
elif len(freq_list) >= 15:
if signal_found(freq_list):
print "signal found"
start = True
offset = len(freq_list)%5
print bits
| mid_count+=1 | conditional_block |
1Prelude.js | // Copyright (c) 2012, Event Store LLP
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// Redistributions in binary form must reproduce the above copyright | // contributors may be used to endorse or promote products derived from
// this software without specific prior written permission
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
"use strict";
// these $ globals are defined by external environment
// they are redefined here to make R# like tools understand them
var _log = $log;
var _load_module = $load_module;
function log(message) {
_log("PROJECTIONS (JS): " + message);
}
function initializeModules() {
// load module load new instance of the given module every time
// this is a responsibility of prelude to manage instances of modules
var modules = _load_module('Modules');
// TODO: replace with createRequire($load_module)
modules.$load_module = _load_module;
return modules;
}
function initializeProjections() {
var projections = _load_module('Projections');
return projections;
}
var modules = initializeModules();
var projections = initializeProjections();
var eventProcessor;
function scope($on, $notify) {
eventProcessor = projections.createEventProcessor(log, $notify);
eventProcessor.register_command_handlers($on);
function queryLog(message) {
if (typeof message === "string")
_log(message);
else
_log(JSON.stringify(message));
}
function translateOn(handlers) {
for (var name in handlers) {
if (name == 0 || name === "$init") {
eventProcessor.on_init_state(handlers[name]);
} else if (name === "$initShared") {
eventProcessor.on_init_shared_state(handlers[name]);
} else if (name === "$any") {
eventProcessor.on_any(handlers[name]);
} else if (name === "$deleted") {
eventProcessor.on_deleted_notification(handlers[name]);
} else if (name === "$created") {
eventProcessor.on_created_notification(handlers[name]);
} else {
eventProcessor.on_event(name, handlers[name]);
}
}
}
function $defines_state_transform() {
eventProcessor.$defines_state_transform();
}
function transformBy(by) {
eventProcessor.chainTransformBy(by);
return {
transformBy: transformBy,
filterBy: filterBy,
outputState: outputState,
outputTo: outputTo,
};
}
function filterBy(by) {
eventProcessor.chainTransformBy(function (s) {
var result = by(s);
return result ? s : null;
});
return {
transformBy: transformBy,
filterBy: filterBy,
outputState: outputState,
outputTo: outputTo,
};
}
function outputTo(resultStream, partitionResultStreamPattern) {
eventProcessor.$defines_state_transform();
eventProcessor.options({
resultStreamName: resultStream,
partitionResultStreamNamePattern: partitionResultStreamPattern,
});
}
function outputState() {
eventProcessor.$outputState();
return {
transformBy: transformBy,
filterBy: filterBy,
outputTo: outputTo,
};
}
function when(handlers) {
translateOn(handlers);
return {
$defines_state_transform: $defines_state_transform,
transformBy: transformBy,
filterBy: filterBy,
outputTo: outputTo,
outputState: outputState,
};
}
function foreachStream() {
eventProcessor.byStream();
return {
when: when,
};
}
function partitionBy(byHandler) {
eventProcessor.partitionBy(byHandler);
return {
when: when,
};
}
function fromCategory(category) {
eventProcessor.fromCategory(category);
return {
partitionBy: partitionBy,
foreachStream: foreachStream,
when: when,
outputState: outputState,
};
}
function fromAll() {
eventProcessor.fromAll();
return {
partitionBy: partitionBy,
when: when,
foreachStream: foreachStream,
outputState: outputState,
};
}
function fromStream(stream) {
eventProcessor.fromStream(stream);
return {
partitionBy: partitionBy,
when: when,
outputState: outputState,
};
}
function fromStreamCatalog(streamCatalog, transformer) {
eventProcessor.fromStreamCatalog(streamCatalog, transformer ? transformer : null);
return {
foreachStream: foreachStream,
};
}
function fromStreamsMatching(filter) {
eventProcessor.fromStreamsMatching(filter);
return {
when: when,
};
}
function fromStreams(streams) {
var arr = Array.isArray(streams) ? streams : arguments;
for (var i = 0; i < arr.length; i++)
eventProcessor.fromStream(arr[i]);
return {
partitionBy: partitionBy,
when: when,
outputState: outputState,
};
}
function emit(streamId, eventName, eventBody, metadata) {
var message = { streamId: streamId, eventName: eventName , body: JSON.stringify(eventBody), metadata: metadata, isJson: true };
eventProcessor.emit(message);
}
function linkTo(streamId, event, metadata) {
var message = { streamId: streamId, eventName: "$>", body: event.sequenceNumber + "@" + event.streamId, metadata: metadata, isJson: false };
eventProcessor.emit(message);
}
function copyTo(streamId, event, metadata) {
var m = {};
var em = event.metadata;
if (em)
for (var p1 in em)
if (p1.indexOf("$") !== 0 || p1 === "$correlationId")
m[p1] = em[p1];
if (metadata)
for (var p2 in metadata)
if (p2.indexOf("$") !== 0)
m[p2] = metadata[p2];
var message = { streamId: streamId, eventName: event.eventType, body: event.bodyRaw, metadata: m };
eventProcessor.emit(message);
}
function linkStreamTo(streamId, linkedStreamId, metadata) {
var message = { streamId: streamId, eventName: "$@", body: linkedStreamId, metadata: metadata, isJson: false };
eventProcessor.emit(message);
}
function options(options_object) {
eventProcessor.options(options_object);
}
return {
log: queryLog,
on_any: eventProcessor.on_any,
on_raw: eventProcessor.on_raw,
fromAll: fromAll,
fromCategory: fromCategory,
fromStream: fromStream,
fromStreams: fromStreams,
fromStreamCatalog: fromStreamCatalog,
fromStreamsMatching: fromStreamsMatching,
options: options,
emit: emit,
linkTo: linkTo,
copyTo: copyTo,
linkStreamTo: linkStreamTo,
require: modules.require,
};
};
scope; | // notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// Neither the name of the Event Store LLP nor the names of its | random_line_split |
1Prelude.js | // Copyright (c) 2012, Event Store LLP
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// Neither the name of the Event Store LLP nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
"use strict";
// these $ globals are defined by external environment
// they are redefined here to make R# like tools understand them
var _log = $log;
var _load_module = $load_module;
function log(message) {
| function initializeModules() {
// load module load new instance of the given module every time
// this is a responsibility of prelude to manage instances of modules
var modules = _load_module('Modules');
// TODO: replace with createRequire($load_module)
modules.$load_module = _load_module;
return modules;
}
function initializeProjections() {
var projections = _load_module('Projections');
return projections;
}
var modules = initializeModules();
var projections = initializeProjections();
var eventProcessor;
function scope($on, $notify) {
eventProcessor = projections.createEventProcessor(log, $notify);
eventProcessor.register_command_handlers($on);
function queryLog(message) {
if (typeof message === "string")
_log(message);
else
_log(JSON.stringify(message));
}
function translateOn(handlers) {
for (var name in handlers) {
if (name == 0 || name === "$init") {
eventProcessor.on_init_state(handlers[name]);
} else if (name === "$initShared") {
eventProcessor.on_init_shared_state(handlers[name]);
} else if (name === "$any") {
eventProcessor.on_any(handlers[name]);
} else if (name === "$deleted") {
eventProcessor.on_deleted_notification(handlers[name]);
} else if (name === "$created") {
eventProcessor.on_created_notification(handlers[name]);
} else {
eventProcessor.on_event(name, handlers[name]);
}
}
}
function $defines_state_transform() {
eventProcessor.$defines_state_transform();
}
function transformBy(by) {
eventProcessor.chainTransformBy(by);
return {
transformBy: transformBy,
filterBy: filterBy,
outputState: outputState,
outputTo: outputTo,
};
}
function filterBy(by) {
eventProcessor.chainTransformBy(function (s) {
var result = by(s);
return result ? s : null;
});
return {
transformBy: transformBy,
filterBy: filterBy,
outputState: outputState,
outputTo: outputTo,
};
}
function outputTo(resultStream, partitionResultStreamPattern) {
eventProcessor.$defines_state_transform();
eventProcessor.options({
resultStreamName: resultStream,
partitionResultStreamNamePattern: partitionResultStreamPattern,
});
}
function outputState() {
eventProcessor.$outputState();
return {
transformBy: transformBy,
filterBy: filterBy,
outputTo: outputTo,
};
}
function when(handlers) {
translateOn(handlers);
return {
$defines_state_transform: $defines_state_transform,
transformBy: transformBy,
filterBy: filterBy,
outputTo: outputTo,
outputState: outputState,
};
}
function foreachStream() {
eventProcessor.byStream();
return {
when: when,
};
}
function partitionBy(byHandler) {
eventProcessor.partitionBy(byHandler);
return {
when: when,
};
}
function fromCategory(category) {
eventProcessor.fromCategory(category);
return {
partitionBy: partitionBy,
foreachStream: foreachStream,
when: when,
outputState: outputState,
};
}
function fromAll() {
eventProcessor.fromAll();
return {
partitionBy: partitionBy,
when: when,
foreachStream: foreachStream,
outputState: outputState,
};
}
function fromStream(stream) {
eventProcessor.fromStream(stream);
return {
partitionBy: partitionBy,
when: when,
outputState: outputState,
};
}
function fromStreamCatalog(streamCatalog, transformer) {
eventProcessor.fromStreamCatalog(streamCatalog, transformer ? transformer : null);
return {
foreachStream: foreachStream,
};
}
function fromStreamsMatching(filter) {
eventProcessor.fromStreamsMatching(filter);
return {
when: when,
};
}
function fromStreams(streams) {
var arr = Array.isArray(streams) ? streams : arguments;
for (var i = 0; i < arr.length; i++)
eventProcessor.fromStream(arr[i]);
return {
partitionBy: partitionBy,
when: when,
outputState: outputState,
};
}
function emit(streamId, eventName, eventBody, metadata) {
var message = { streamId: streamId, eventName: eventName , body: JSON.stringify(eventBody), metadata: metadata, isJson: true };
eventProcessor.emit(message);
}
function linkTo(streamId, event, metadata) {
var message = { streamId: streamId, eventName: "$>", body: event.sequenceNumber + "@" + event.streamId, metadata: metadata, isJson: false };
eventProcessor.emit(message);
}
function copyTo(streamId, event, metadata) {
var m = {};
var em = event.metadata;
if (em)
for (var p1 in em)
if (p1.indexOf("$") !== 0 || p1 === "$correlationId")
m[p1] = em[p1];
if (metadata)
for (var p2 in metadata)
if (p2.indexOf("$") !== 0)
m[p2] = metadata[p2];
var message = { streamId: streamId, eventName: event.eventType, body: event.bodyRaw, metadata: m };
eventProcessor.emit(message);
}
function linkStreamTo(streamId, linkedStreamId, metadata) {
var message = { streamId: streamId, eventName: "$@", body: linkedStreamId, metadata: metadata, isJson: false };
eventProcessor.emit(message);
}
function options(options_object) {
eventProcessor.options(options_object);
}
return {
log: queryLog,
on_any: eventProcessor.on_any,
on_raw: eventProcessor.on_raw,
fromAll: fromAll,
fromCategory: fromCategory,
fromStream: fromStream,
fromStreams: fromStreams,
fromStreamCatalog: fromStreamCatalog,
fromStreamsMatching: fromStreamsMatching,
options: options,
emit: emit,
linkTo: linkTo,
copyTo: copyTo,
linkStreamTo: linkStreamTo,
require: modules.require,
};
};
scope; | _log("PROJECTIONS (JS): " + message);
}
| identifier_body |
1Prelude.js | // Copyright (c) 2012, Event Store LLP
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// Neither the name of the Event Store LLP nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
"use strict";
// these $ globals are defined by external environment
// they are redefined here to make R# like tools understand them
var _log = $log;
var _load_module = $load_module;
function log(message) {
_log("PROJECTIONS (JS): " + message);
}
function initializeModules() {
// load module load new instance of the given module every time
// this is a responsibility of prelude to manage instances of modules
var modules = _load_module('Modules');
// TODO: replace with createRequire($load_module)
modules.$load_module = _load_module;
return modules;
}
function initializeProjections() {
var projections = _load_module('Projections');
return projections;
}
var modules = initializeModules();
var projections = initializeProjections();
var eventProcessor;
function scope($on, $notify) {
eventProcessor = projections.createEventProcessor(log, $notify);
eventProcessor.register_command_handlers($on);
function queryLog(message) {
if (typeof message === "string")
_log(message);
else
_log(JSON.stringify(message));
}
function translateOn(handlers) {
for (var name in handlers) {
if (name == 0 || name === "$init") {
eventProcessor.on_init_state(handlers[name]);
} else if (name === "$initShared") {
eventProcessor.on_init_shared_state(handlers[name]);
} else if (name === "$any") {
eventProcessor.on_any(handlers[name]);
} else if (name === "$deleted") {
eventProcessor.on_deleted_notification(handlers[name]);
} else if (name === "$created") {
eventProcessor.on_created_notification(handlers[name]);
} else {
eventProcessor.on_event(name, handlers[name]);
}
}
}
function $defines_state_transform() {
eventProcessor.$defines_state_transform();
}
function transformBy(by) {
eventProcessor.chainTransformBy(by);
return {
transformBy: transformBy,
filterBy: filterBy,
outputState: outputState,
outputTo: outputTo,
};
}
function filterBy(by) {
eventProcessor.chainTransformBy(function (s) {
var result = by(s);
return result ? s : null;
});
return {
transformBy: transformBy,
filterBy: filterBy,
outputState: outputState,
outputTo: outputTo,
};
}
function outputTo(resultStream, partitionResultStreamPattern) {
eventProcessor.$defines_state_transform();
eventProcessor.options({
resultStreamName: resultStream,
partitionResultStreamNamePattern: partitionResultStreamPattern,
});
}
function outputState() {
eventProcessor.$outputState();
return {
transformBy: transformBy,
filterBy: filterBy,
outputTo: outputTo,
};
}
function when(handlers) {
translateOn(handlers);
return {
$defines_state_transform: $defines_state_transform,
transformBy: transformBy,
filterBy: filterBy,
outputTo: outputTo,
outputState: outputState,
};
}
function foreachStream() {
eventProcessor.byStream();
return {
when: when,
};
}
function pa | yHandler) {
eventProcessor.partitionBy(byHandler);
return {
when: when,
};
}
function fromCategory(category) {
eventProcessor.fromCategory(category);
return {
partitionBy: partitionBy,
foreachStream: foreachStream,
when: when,
outputState: outputState,
};
}
function fromAll() {
eventProcessor.fromAll();
return {
partitionBy: partitionBy,
when: when,
foreachStream: foreachStream,
outputState: outputState,
};
}
function fromStream(stream) {
eventProcessor.fromStream(stream);
return {
partitionBy: partitionBy,
when: when,
outputState: outputState,
};
}
function fromStreamCatalog(streamCatalog, transformer) {
eventProcessor.fromStreamCatalog(streamCatalog, transformer ? transformer : null);
return {
foreachStream: foreachStream,
};
}
function fromStreamsMatching(filter) {
eventProcessor.fromStreamsMatching(filter);
return {
when: when,
};
}
function fromStreams(streams) {
var arr = Array.isArray(streams) ? streams : arguments;
for (var i = 0; i < arr.length; i++)
eventProcessor.fromStream(arr[i]);
return {
partitionBy: partitionBy,
when: when,
outputState: outputState,
};
}
function emit(streamId, eventName, eventBody, metadata) {
var message = { streamId: streamId, eventName: eventName , body: JSON.stringify(eventBody), metadata: metadata, isJson: true };
eventProcessor.emit(message);
}
function linkTo(streamId, event, metadata) {
var message = { streamId: streamId, eventName: "$>", body: event.sequenceNumber + "@" + event.streamId, metadata: metadata, isJson: false };
eventProcessor.emit(message);
}
function copyTo(streamId, event, metadata) {
var m = {};
var em = event.metadata;
if (em)
for (var p1 in em)
if (p1.indexOf("$") !== 0 || p1 === "$correlationId")
m[p1] = em[p1];
if (metadata)
for (var p2 in metadata)
if (p2.indexOf("$") !== 0)
m[p2] = metadata[p2];
var message = { streamId: streamId, eventName: event.eventType, body: event.bodyRaw, metadata: m };
eventProcessor.emit(message);
}
function linkStreamTo(streamId, linkedStreamId, metadata) {
var message = { streamId: streamId, eventName: "$@", body: linkedStreamId, metadata: metadata, isJson: false };
eventProcessor.emit(message);
}
function options(options_object) {
eventProcessor.options(options_object);
}
return {
log: queryLog,
on_any: eventProcessor.on_any,
on_raw: eventProcessor.on_raw,
fromAll: fromAll,
fromCategory: fromCategory,
fromStream: fromStream,
fromStreams: fromStreams,
fromStreamCatalog: fromStreamCatalog,
fromStreamsMatching: fromStreamsMatching,
options: options,
emit: emit,
linkTo: linkTo,
copyTo: copyTo,
linkStreamTo: linkStreamTo,
require: modules.require,
};
};
scope; | rtitionBy(b | identifier_name |
lib.rs | #![crate_type = "rlib"]
#![allow(non_snake_case)]
#![deny(unsafe_code)]
#![feature(custom_derive, plugin)]
#![plugin(heapsize_plugin, serde_macros)]
#[macro_use]
extern crate bitflags;
extern crate heapsize;
extern crate hyper;
extern crate ipc_channel;
extern crate msg;
extern crate serde;
extern crate time;
extern crate url;
extern crate util;
use hyper::header::Headers;
use hyper::http::RawStatus;
use hyper::method::Method;
use ipc_channel::ipc::IpcSender;
use msg::constellation_msg::PipelineId;
use std::net::TcpStream;
use time::Duration;
use time::Tm;
use url::Url;
// Information would be attached to NewGlobal to be received and show in devtools.
// Extend these fields if we need more information.
#[derive(Deserialize, Serialize)]
pub struct DevtoolsPageInfo {
pub title: String,
pub url: Url
}
#[derive(Deserialize, HeapSizeOf, Serialize, Clone)]
pub struct CSSError {
pub filename: String,
pub line: usize,
pub column: usize,
pub msg: String
}
/// Messages to instruct the devtools server to update its known actors/state
/// according to changes in the browser.
pub enum DevtoolsControlMsg {
/// Messages from threads in the chrome process (resource/constellation/devtools)
FromChrome(ChromeToDevtoolsControlMsg),
/// Messages from script threads
FromScript(ScriptToDevtoolsControlMsg),
}
/// Events that the devtools server must act upon.
pub enum ChromeToDevtoolsControlMsg {
/// A new client has connected to the server.
AddClient(TcpStream),
/// The browser is shutting down.
ServerExitMsg,
/// A network event occurred (request, reply, etc.). The actor with the
/// provided name should be notified.
NetworkEvent(String, NetworkEvent),
}
#[derive(Deserialize, Serialize)]
/// Events that the devtools server must act upon.
pub enum ScriptToDevtoolsControlMsg {
/// A new global object was created, associated with a particular pipeline.
/// The means of communicating directly with it are provided.
NewGlobal((PipelineId, Option<WorkerId>),
IpcSender<DevtoolScriptControlMsg>,
DevtoolsPageInfo),
/// A particular page has invoked the console API.
ConsoleAPI(PipelineId, ConsoleMessage, Option<WorkerId>),
/// An animation frame with the given timestamp was processed in a script thread.
/// The actor with the provided name should be notified.
FramerateTick(String, f64),
/// Report a CSS parse error for the given pipeline
ReportCSSError(PipelineId, CSSError),
}
/// Serialized JS return values
/// TODO: generalize this beyond the EvaluateJS message?
#[derive(Deserialize, Serialize)]
pub enum EvaluateJSReply {
VoidValue,
NullValue,
BooleanValue(bool),
NumberValue(f64),
StringValue(String),
ActorValue { class: String, uuid: String },
}
#[derive(Deserialize, Serialize)]
pub struct AttrInfo {
pub namespace: String,
pub name: String,
pub value: String,
}
#[derive(Deserialize, Serialize)]
pub struct NodeInfo {
pub uniqueId: String,
pub baseURI: String,
pub parent: String,
pub nodeType: u16,
pub namespaceURI: String,
pub nodeName: String,
pub numChildren: usize,
pub name: String,
pub publicId: String,
pub systemId: String,
pub attrs: Vec<AttrInfo>,
pub isDocumentElement: bool,
pub shortValue: String,
pub incompleteValue: bool,
}
pub struct StartedTimelineMarker {
name: String,
start_time: PreciseTime,
start_stack: Option<Vec<()>>,
}
#[derive(Deserialize, Serialize)]
pub struct TimelineMarker {
pub name: String,
pub start_time: PreciseTime,
pub start_stack: Option<Vec<()>>,
pub end_time: PreciseTime,
pub end_stack: Option<Vec<()>>,
}
#[derive(PartialEq, Eq, Hash, Clone, Deserialize, Serialize)]
pub enum TimelineMarkerType {
Reflow,
DOMEvent,
}
/// The properties of a DOM node as computed by layout.
#[derive(Deserialize, Serialize)]
pub struct ComputedNodeLayout {
pub display: String,
pub position: String,
pub zIndex: String,
pub boxSizing: String,
pub autoMargins: AutoMargins,
pub marginTop: String,
pub marginRight: String,
pub marginBottom: String,
pub marginLeft: String,
pub borderTopWidth: String,
pub borderRightWidth: String,
pub borderBottomWidth: String,
pub borderLeftWidth: String,
pub paddingTop: String,
pub paddingRight: String,
pub paddingBottom: String,
pub paddingLeft: String,
pub width: f32,
pub height: f32,
}
#[derive(Deserialize, Serialize)]
pub struct AutoMargins {
pub top: bool,
pub right: bool,
pub bottom: bool,
pub left: bool,
}
/// Messages to process in a particular script thread, as instructed by a devtools client.
#[derive(Deserialize, Serialize)]
pub enum DevtoolScriptControlMsg {
/// Evaluate a JS snippet in the context of the global for the given pipeline.
EvaluateJS(PipelineId, String, IpcSender<EvaluateJSReply>),
/// Retrieve the details of the root node (ie. the document) for the given pipeline.
GetRootNode(PipelineId, IpcSender<NodeInfo>),
/// Retrieve the details of the document element for the given pipeline.
GetDocumentElement(PipelineId, IpcSender<NodeInfo>),
/// Retrieve the details of the child nodes of the given node in the given pipeline.
GetChildren(PipelineId, String, IpcSender<Vec<NodeInfo>>),
/// Retrieve the computed layout properties of the given node in the given pipeline.
GetLayout(PipelineId, String, IpcSender<ComputedNodeLayout>),
/// Retrieve all stored console messages for the given pipeline.
GetCachedMessages(PipelineId, CachedConsoleMessageTypes, IpcSender<Vec<CachedConsoleMessage>>),
/// Update a given node's attributes with a list of modifications.
ModifyAttribute(PipelineId, String, Vec<Modification>),
/// Request live console messages for a given pipeline (true if desired, false otherwise).
WantsLiveNotifications(PipelineId, bool),
/// Request live notifications for a given set of timeline events for a given pipeline.
SetTimelineMarkers(PipelineId, Vec<TimelineMarkerType>, IpcSender<TimelineMarker>),
/// Withdraw request for live timeline notifications for a given pipeline.
DropTimelineMarkers(PipelineId, Vec<TimelineMarkerType>),
/// Request a callback directed at the given actor name from the next animation frame
/// executed in the given pipeline.
RequestAnimationFrame(PipelineId, String),
}
#[derive(Deserialize, Serialize)]
pub struct Modification {
pub attributeName: String,
pub newValue: Option<String>,
}
#[derive(Clone, Deserialize, Serialize)]
pub enum LogLevel {
Log,
Debug,
Info,
Warn,
Error,
}
#[derive(Clone, Deserialize, Serialize)]
pub struct ConsoleMessage {
pub message: String,
pub logLevel: LogLevel,
pub filename: String,
pub lineNumber: usize,
pub columnNumber: usize,
}
bitflags! {
#[derive(Deserialize, Serialize)]
flags CachedConsoleMessageTypes: u8 {
const PAGE_ERROR = 1 << 0,
const CONSOLE_API = 1 << 1,
}
}
#[derive(Deserialize, Serialize)]
pub struct PageError {
#[serde(rename = "type")]
pub type_: String,
pub errorMessage: String,
pub sourceName: String,
pub lineText: String,
pub lineNumber: u32,
pub columnNumber: u32,
pub category: String,
pub timeStamp: u64,
pub error: bool,
pub warning: bool,
pub exception: bool,
pub strict: bool,
pub private: bool,
}
#[derive(Deserialize, Serialize)]
pub struct ConsoleAPI {
#[serde(rename = "type")]
pub type_: String,
pub level: String,
pub filename: String,
pub lineNumber: u32,
pub functionName: String,
pub timeStamp: u64,
pub private: bool,
pub arguments: Vec<String>,
}
#[derive(Deserialize, Serialize)]
pub enum | {
PageError(PageError),
ConsoleAPI(ConsoleAPI),
}
#[derive(Debug, PartialEq)]
pub struct HttpRequest {
pub url: Url,
pub method: Method,
pub headers: Headers,
pub body: Option<Vec<u8>>,
pub pipeline_id: PipelineId,
pub startedDateTime: Tm
}
#[derive(Debug, PartialEq)]
pub struct HttpResponse {
pub headers: Option<Headers>,
pub status: Option<RawStatus>,
pub body: Option<Vec<u8>>,
pub pipeline_id: PipelineId,
}
pub enum NetworkEvent {
HttpRequest(HttpRequest),
HttpResponse(HttpResponse),
}
impl TimelineMarker {
pub fn start(name: String) -> StartedTimelineMarker {
StartedTimelineMarker {
name: name,
start_time: PreciseTime::now(),
start_stack: None,
}
}
}
impl StartedTimelineMarker {
pub fn end(self) -> TimelineMarker {
TimelineMarker {
name: self.name,
start_time: self.start | CachedConsoleMessage | identifier_name |
lib.rs | #![crate_type = "rlib"]
#![allow(non_snake_case)]
#![deny(unsafe_code)]
#![feature(custom_derive, plugin)]
#![plugin(heapsize_plugin, serde_macros)]
| extern crate ipc_channel;
extern crate msg;
extern crate serde;
extern crate time;
extern crate url;
extern crate util;
use hyper::header::Headers;
use hyper::http::RawStatus;
use hyper::method::Method;
use ipc_channel::ipc::IpcSender;
use msg::constellation_msg::PipelineId;
use std::net::TcpStream;
use time::Duration;
use time::Tm;
use url::Url;
// Information would be attached to NewGlobal to be received and show in devtools.
// Extend these fields if we need more information.
#[derive(Deserialize, Serialize)]
pub struct DevtoolsPageInfo {
pub title: String,
pub url: Url
}
#[derive(Deserialize, HeapSizeOf, Serialize, Clone)]
pub struct CSSError {
pub filename: String,
pub line: usize,
pub column: usize,
pub msg: String
}
/// Messages to instruct the devtools server to update its known actors/state
/// according to changes in the browser.
pub enum DevtoolsControlMsg {
/// Messages from threads in the chrome process (resource/constellation/devtools)
FromChrome(ChromeToDevtoolsControlMsg),
/// Messages from script threads
FromScript(ScriptToDevtoolsControlMsg),
}
/// Events that the devtools server must act upon.
pub enum ChromeToDevtoolsControlMsg {
/// A new client has connected to the server.
AddClient(TcpStream),
/// The browser is shutting down.
ServerExitMsg,
/// A network event occurred (request, reply, etc.). The actor with the
/// provided name should be notified.
NetworkEvent(String, NetworkEvent),
}
#[derive(Deserialize, Serialize)]
/// Events that the devtools server must act upon.
pub enum ScriptToDevtoolsControlMsg {
/// A new global object was created, associated with a particular pipeline.
/// The means of communicating directly with it are provided.
NewGlobal((PipelineId, Option<WorkerId>),
IpcSender<DevtoolScriptControlMsg>,
DevtoolsPageInfo),
/// A particular page has invoked the console API.
ConsoleAPI(PipelineId, ConsoleMessage, Option<WorkerId>),
/// An animation frame with the given timestamp was processed in a script thread.
/// The actor with the provided name should be notified.
FramerateTick(String, f64),
/// Report a CSS parse error for the given pipeline
ReportCSSError(PipelineId, CSSError),
}
/// Serialized JS return values
/// TODO: generalize this beyond the EvaluateJS message?
#[derive(Deserialize, Serialize)]
pub enum EvaluateJSReply {
VoidValue,
NullValue,
BooleanValue(bool),
NumberValue(f64),
StringValue(String),
ActorValue { class: String, uuid: String },
}
#[derive(Deserialize, Serialize)]
pub struct AttrInfo {
pub namespace: String,
pub name: String,
pub value: String,
}
#[derive(Deserialize, Serialize)]
pub struct NodeInfo {
pub uniqueId: String,
pub baseURI: String,
pub parent: String,
pub nodeType: u16,
pub namespaceURI: String,
pub nodeName: String,
pub numChildren: usize,
pub name: String,
pub publicId: String,
pub systemId: String,
pub attrs: Vec<AttrInfo>,
pub isDocumentElement: bool,
pub shortValue: String,
pub incompleteValue: bool,
}
pub struct StartedTimelineMarker {
name: String,
start_time: PreciseTime,
start_stack: Option<Vec<()>>,
}
#[derive(Deserialize, Serialize)]
pub struct TimelineMarker {
pub name: String,
pub start_time: PreciseTime,
pub start_stack: Option<Vec<()>>,
pub end_time: PreciseTime,
pub end_stack: Option<Vec<()>>,
}
#[derive(PartialEq, Eq, Hash, Clone, Deserialize, Serialize)]
pub enum TimelineMarkerType {
Reflow,
DOMEvent,
}
/// The properties of a DOM node as computed by layout.
#[derive(Deserialize, Serialize)]
pub struct ComputedNodeLayout {
pub display: String,
pub position: String,
pub zIndex: String,
pub boxSizing: String,
pub autoMargins: AutoMargins,
pub marginTop: String,
pub marginRight: String,
pub marginBottom: String,
pub marginLeft: String,
pub borderTopWidth: String,
pub borderRightWidth: String,
pub borderBottomWidth: String,
pub borderLeftWidth: String,
pub paddingTop: String,
pub paddingRight: String,
pub paddingBottom: String,
pub paddingLeft: String,
pub width: f32,
pub height: f32,
}
#[derive(Deserialize, Serialize)]
pub struct AutoMargins {
pub top: bool,
pub right: bool,
pub bottom: bool,
pub left: bool,
}
/// Messages to process in a particular script thread, as instructed by a devtools client.
#[derive(Deserialize, Serialize)]
pub enum DevtoolScriptControlMsg {
/// Evaluate a JS snippet in the context of the global for the given pipeline.
EvaluateJS(PipelineId, String, IpcSender<EvaluateJSReply>),
/// Retrieve the details of the root node (ie. the document) for the given pipeline.
GetRootNode(PipelineId, IpcSender<NodeInfo>),
/// Retrieve the details of the document element for the given pipeline.
GetDocumentElement(PipelineId, IpcSender<NodeInfo>),
/// Retrieve the details of the child nodes of the given node in the given pipeline.
GetChildren(PipelineId, String, IpcSender<Vec<NodeInfo>>),
/// Retrieve the computed layout properties of the given node in the given pipeline.
GetLayout(PipelineId, String, IpcSender<ComputedNodeLayout>),
/// Retrieve all stored console messages for the given pipeline.
GetCachedMessages(PipelineId, CachedConsoleMessageTypes, IpcSender<Vec<CachedConsoleMessage>>),
/// Update a given node's attributes with a list of modifications.
ModifyAttribute(PipelineId, String, Vec<Modification>),
/// Request live console messages for a given pipeline (true if desired, false otherwise).
WantsLiveNotifications(PipelineId, bool),
/// Request live notifications for a given set of timeline events for a given pipeline.
SetTimelineMarkers(PipelineId, Vec<TimelineMarkerType>, IpcSender<TimelineMarker>),
/// Withdraw request for live timeline notifications for a given pipeline.
DropTimelineMarkers(PipelineId, Vec<TimelineMarkerType>),
/// Request a callback directed at the given actor name from the next animation frame
/// executed in the given pipeline.
RequestAnimationFrame(PipelineId, String),
}
#[derive(Deserialize, Serialize)]
pub struct Modification {
pub attributeName: String,
pub newValue: Option<String>,
}
#[derive(Clone, Deserialize, Serialize)]
pub enum LogLevel {
Log,
Debug,
Info,
Warn,
Error,
}
#[derive(Clone, Deserialize, Serialize)]
pub struct ConsoleMessage {
pub message: String,
pub logLevel: LogLevel,
pub filename: String,
pub lineNumber: usize,
pub columnNumber: usize,
}
bitflags! {
#[derive(Deserialize, Serialize)]
flags CachedConsoleMessageTypes: u8 {
const PAGE_ERROR = 1 << 0,
const CONSOLE_API = 1 << 1,
}
}
#[derive(Deserialize, Serialize)]
pub struct PageError {
#[serde(rename = "type")]
pub type_: String,
pub errorMessage: String,
pub sourceName: String,
pub lineText: String,
pub lineNumber: u32,
pub columnNumber: u32,
pub category: String,
pub timeStamp: u64,
pub error: bool,
pub warning: bool,
pub exception: bool,
pub strict: bool,
pub private: bool,
}
#[derive(Deserialize, Serialize)]
pub struct ConsoleAPI {
#[serde(rename = "type")]
pub type_: String,
pub level: String,
pub filename: String,
pub lineNumber: u32,
pub functionName: String,
pub timeStamp: u64,
pub private: bool,
pub arguments: Vec<String>,
}
#[derive(Deserialize, Serialize)]
pub enum CachedConsoleMessage {
PageError(PageError),
ConsoleAPI(ConsoleAPI),
}
#[derive(Debug, PartialEq)]
pub struct HttpRequest {
pub url: Url,
pub method: Method,
pub headers: Headers,
pub body: Option<Vec<u8>>,
pub pipeline_id: PipelineId,
pub startedDateTime: Tm
}
#[derive(Debug, PartialEq)]
pub struct HttpResponse {
pub headers: Option<Headers>,
pub status: Option<RawStatus>,
pub body: Option<Vec<u8>>,
pub pipeline_id: PipelineId,
}
pub enum NetworkEvent {
HttpRequest(HttpRequest),
HttpResponse(HttpResponse),
}
impl TimelineMarker {
pub fn start(name: String) -> StartedTimelineMarker {
StartedTimelineMarker {
name: name,
start_time: PreciseTime::now(),
start_stack: None,
}
}
}
impl StartedTimelineMarker {
pub fn end(self) -> TimelineMarker {
TimelineMarker {
name: self.name,
start_time: self.start_time | #[macro_use]
extern crate bitflags;
extern crate heapsize;
extern crate hyper; | random_line_split |
lib.es2015.symbol.d.ts | /*! *****************************************************************************
Copyright (c) Microsoft Corporation. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use
this file except in compliance with the License. You may obtain a copy of the
License at http://www.apache.org/licenses/LICENSE-2.0
THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
MERCHANTABLITY OR NON-INFRINGEMENT.
See the Apache Version 2.0 License for specific language governing permissions
| /** Returns a string representation of an object. */
toString(): string;
/** Returns the primitive value of the specified object. */
valueOf(): Object;
}
interface SymbolConstructor {
/**
* A reference to the prototype.
*/
readonly prototype: Symbol;
/**
* Returns a new unique Symbol value.
* @param description Description of the new Symbol object.
*/
(description?: string|number): symbol;
/**
* Returns a Symbol object from the global symbol registry matching the given key if found.
* Otherwise, returns a new symbol with this key.
* @param key key to search for.
*/
for(key: string): symbol;
/**
* Returns a key from the global symbol registry matching the given Symbol if found.
* Otherwise, returns a undefined.
* @param sym Symbol to find the key for.
*/
keyFor(sym: symbol): string | undefined;
}
declare var Symbol: SymbolConstructor; | and limitations under the License.
***************************************************************************** */
/// <reference no-default-lib="true"/>
interface Symbol {
| random_line_split |
dev-server.js | var path = require('path')
var fs = require('fs')
var argv = require('optimist').argv;
var express = require('express')
var webpack = require('webpack')
var config = require('../config')
var opn = require('opn')
var proxyMiddleware = require('http-proxy-middleware')
var webpackConfig = process.env.NODE_ENV === 'testing'
? require('./webpack.prod.conf')
: require('./webpack.dev.conf')
// default port where dev server listens for incoming traffic
var port = process.env.PORT || config.dev.port
// Define HTTP proxies to your custom API backend
// https://github.com/chimurai/http-proxy-middleware
var proxyTable = config.dev.proxyTable
var app = express()
var compiler = webpack(webpackConfig)
var devMiddleware = require('webpack-dev-middleware')(compiler, {
publicPath: webpackConfig.output.publicPath,
stats: {
colors: true,
chunks: false
}
})
var hotMiddleware = require('webpack-hot-middleware')(compiler)
// force page reload when html-webpack-plugin template changes
compiler.plugin('compilation', function (compilation) {
compilation.plugin('html-webpack-plugin-after-emit', function (data, cb) {
hotMiddleware.publish({ action: 'reload' })
cb()
})
})
// proxy api requests
// Object.keys(proxyTable).forEach(function (context) {
// var options = proxyTable[context]
// if (typeof options === 'string') {
// options = { target: options }
// }
// app.use(proxyMiddleware(context, options))
// });
// mock/proxy api requests
var mockDir = path.resolve(__dirname, '../mock');
(function setMock(mockDir) {
fs.readdirSync(mockDir).forEach(function (file) {
var filePath = path.resolve(mockDir, file);
var mock;
if (fs.statSync(filePath).isDirectory()) {
setMock(filePath);
}
else |
});
})(mockDir);;
// handle fallback for HTML5 history API
app.use(require('connect-history-api-fallback')({
index: '/index.html'
}))
// serve webpack bundle output
app.use(devMiddleware)
// enable hot-reload and state-preserving
// compilation error display
app.use(hotMiddleware)
// serve pure static assets
var staticPath = path.posix.join(config.dev.assetsPublicPath, config.dev.assetsSubDirectory)
app.use(staticPath, express.static('./static'))
module.exports = app.listen(port, function (err) {
if (err) {
console.log(err)
return
}
var uri = 'http://localhost:' + port
console.log('Dev server listening at ' + uri + '\n')
// opn(uri)
})
| {
mock = require(filePath);
app.use(mock.api, argv.proxy ? proxyMiddleware({target: 'http://' + argv.proxy}) : mock.response);
} | conditional_block |
dev-server.js | var path = require('path')
var fs = require('fs')
var argv = require('optimist').argv;
var express = require('express')
var webpack = require('webpack')
var config = require('../config')
var opn = require('opn')
var proxyMiddleware = require('http-proxy-middleware')
var webpackConfig = process.env.NODE_ENV === 'testing'
? require('./webpack.prod.conf')
: require('./webpack.dev.conf')
// default port where dev server listens for incoming traffic
var port = process.env.PORT || config.dev.port
// Define HTTP proxies to your custom API backend
// https://github.com/chimurai/http-proxy-middleware
var proxyTable = config.dev.proxyTable
var app = express()
var compiler = webpack(webpackConfig)
var devMiddleware = require('webpack-dev-middleware')(compiler, {
publicPath: webpackConfig.output.publicPath,
stats: {
colors: true, | // force page reload when html-webpack-plugin template changes
compiler.plugin('compilation', function (compilation) {
compilation.plugin('html-webpack-plugin-after-emit', function (data, cb) {
hotMiddleware.publish({ action: 'reload' })
cb()
})
})
// proxy api requests
// Object.keys(proxyTable).forEach(function (context) {
// var options = proxyTable[context]
// if (typeof options === 'string') {
// options = { target: options }
// }
// app.use(proxyMiddleware(context, options))
// });
// mock/proxy api requests
var mockDir = path.resolve(__dirname, '../mock');
(function setMock(mockDir) {
fs.readdirSync(mockDir).forEach(function (file) {
var filePath = path.resolve(mockDir, file);
var mock;
if (fs.statSync(filePath).isDirectory()) {
setMock(filePath);
}
else {
mock = require(filePath);
app.use(mock.api, argv.proxy ? proxyMiddleware({target: 'http://' + argv.proxy}) : mock.response);
}
});
})(mockDir);;
// handle fallback for HTML5 history API
app.use(require('connect-history-api-fallback')({
index: '/index.html'
}))
// serve webpack bundle output
app.use(devMiddleware)
// enable hot-reload and state-preserving
// compilation error display
app.use(hotMiddleware)
// serve pure static assets
var staticPath = path.posix.join(config.dev.assetsPublicPath, config.dev.assetsSubDirectory)
app.use(staticPath, express.static('./static'))
module.exports = app.listen(port, function (err) {
if (err) {
console.log(err)
return
}
var uri = 'http://localhost:' + port
console.log('Dev server listening at ' + uri + '\n')
// opn(uri)
}) | chunks: false
}
})
var hotMiddleware = require('webpack-hot-middleware')(compiler) | random_line_split |
sha1Hash_test.py | #!/usr/bin/env python
""" sha1Hash_test.py
Unit tests for sha1.py
"""
from crypto.hash.sha1Hash import SHA1
import unittest
import struct
assert struct.calcsize('!IIIII') == 20, '5 integers should be 20 bytes'
class SHA1_FIPS180_TestCases(unittest.TestCase):
""" SHA-1 tests from FIPS180-1 Appendix A, B and C """
def testFIPS180_1_Appendix_A(self):
""" APPENDIX A. A SAMPLE MESSAGE AND ITS MESSAGE DIGEST """
hashAlg = SHA1()
message = 'abc'
message_digest = 0xA9993E36L, 0x4706816AL, 0xBA3E2571L, 0x7850C26CL, 0x9CD0D89DL
md_string = _toBString(message_digest)
assert( hashAlg(message) == md_string ), 'FIPS180 Appendix A test Failed'
def testFIPS180_1_Appendix_B(self):
""" APPENDIX B. A SECOND SAMPLE MESSAGE AND ITS MESSAGE DIGEST """
hashAlg = SHA1()
message = 'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq'
message_digest = 0x84983E44L, 0x1C3BD26EL, 0xBAAE4AA1L, 0xF95129E5L, 0xE54670F1L
md_string = _toBString(message_digest)
assert( hashAlg(message) == md_string ), 'FIPS180 Appendix B test Failed'
def testFIPS180_1_Appendix_C(self):
""" APPENDIX C. A THIRD SAMPLE MESSAGE AND ITS MESSAGE DIGEST
Let the message be the binary-coded form of the ASCII string which consists
of 1,000,000 repetitions of "a". """
hashAlg = SHA1()
message = 1000000*'a'
message_digest = 0x34AA973CL, 0xD4C4DAA4L, 0xF61EEB2BL, 0xDBAD2731L, 0x6534016FL
md_string = _toBString(message_digest)
assert( hashAlg(message) == md_string ), 'FIPS180 Appendix C test Failed'
def _toBlock(binaryString):
""" Convert binary string to blocks of 5 words of uint32() """
return [uint32(word) for word in struct.unpack('!IIIII', binaryString)]
def _toBString(block):
""" Convert block (5 words of 32 bits to binary string """
return ''.join([struct.pack('!I',word) for word in block])
if __name__ == '__main__':
# Run the tests from the command line
| unittest.main() | conditional_block |
|
sha1Hash_test.py | #!/usr/bin/env python
""" sha1Hash_test.py
Unit tests for sha1.py
"""
from crypto.hash.sha1Hash import SHA1
import unittest
import struct
assert struct.calcsize('!IIIII') == 20, '5 integers should be 20 bytes'
class SHA1_FIPS180_TestCases(unittest.TestCase):
""" SHA-1 tests from FIPS180-1 Appendix A, B and C """
def testFIPS180_1_Appendix_A(self):
""" APPENDIX A. A SAMPLE MESSAGE AND ITS MESSAGE DIGEST """
hashAlg = SHA1()
message = 'abc'
message_digest = 0xA9993E36L, 0x4706816AL, 0xBA3E2571L, 0x7850C26CL, 0x9CD0D89DL
md_string = _toBString(message_digest)
assert( hashAlg(message) == md_string ), 'FIPS180 Appendix A test Failed'
def testFIPS180_1_Appendix_B(self):
|
def testFIPS180_1_Appendix_C(self):
""" APPENDIX C. A THIRD SAMPLE MESSAGE AND ITS MESSAGE DIGEST
Let the message be the binary-coded form of the ASCII string which consists
of 1,000,000 repetitions of "a". """
hashAlg = SHA1()
message = 1000000*'a'
message_digest = 0x34AA973CL, 0xD4C4DAA4L, 0xF61EEB2BL, 0xDBAD2731L, 0x6534016FL
md_string = _toBString(message_digest)
assert( hashAlg(message) == md_string ), 'FIPS180 Appendix C test Failed'
def _toBlock(binaryString):
""" Convert binary string to blocks of 5 words of uint32() """
return [uint32(word) for word in struct.unpack('!IIIII', binaryString)]
def _toBString(block):
""" Convert block (5 words of 32 bits to binary string """
return ''.join([struct.pack('!I',word) for word in block])
if __name__ == '__main__':
# Run the tests from the command line
unittest.main()
| """ APPENDIX B. A SECOND SAMPLE MESSAGE AND ITS MESSAGE DIGEST """
hashAlg = SHA1()
message = 'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq'
message_digest = 0x84983E44L, 0x1C3BD26EL, 0xBAAE4AA1L, 0xF95129E5L, 0xE54670F1L
md_string = _toBString(message_digest)
assert( hashAlg(message) == md_string ), 'FIPS180 Appendix B test Failed' | identifier_body |
sha1Hash_test.py | #!/usr/bin/env python
""" sha1Hash_test.py
Unit tests for sha1.py
"""
from crypto.hash.sha1Hash import SHA1
import unittest
import struct
assert struct.calcsize('!IIIII') == 20, '5 integers should be 20 bytes'
class SHA1_FIPS180_TestCases(unittest.TestCase):
""" SHA-1 tests from FIPS180-1 Appendix A, B and C """
def testFIPS180_1_Appendix_A(self):
""" APPENDIX A. A SAMPLE MESSAGE AND ITS MESSAGE DIGEST """
hashAlg = SHA1()
message = 'abc'
message_digest = 0xA9993E36L, 0x4706816AL, 0xBA3E2571L, 0x7850C26CL, 0x9CD0D89DL
md_string = _toBString(message_digest)
assert( hashAlg(message) == md_string ), 'FIPS180 Appendix A test Failed'
def | (self):
""" APPENDIX B. A SECOND SAMPLE MESSAGE AND ITS MESSAGE DIGEST """
hashAlg = SHA1()
message = 'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq'
message_digest = 0x84983E44L, 0x1C3BD26EL, 0xBAAE4AA1L, 0xF95129E5L, 0xE54670F1L
md_string = _toBString(message_digest)
assert( hashAlg(message) == md_string ), 'FIPS180 Appendix B test Failed'
def testFIPS180_1_Appendix_C(self):
""" APPENDIX C. A THIRD SAMPLE MESSAGE AND ITS MESSAGE DIGEST
Let the message be the binary-coded form of the ASCII string which consists
of 1,000,000 repetitions of "a". """
hashAlg = SHA1()
message = 1000000*'a'
message_digest = 0x34AA973CL, 0xD4C4DAA4L, 0xF61EEB2BL, 0xDBAD2731L, 0x6534016FL
md_string = _toBString(message_digest)
assert( hashAlg(message) == md_string ), 'FIPS180 Appendix C test Failed'
def _toBlock(binaryString):
""" Convert binary string to blocks of 5 words of uint32() """
return [uint32(word) for word in struct.unpack('!IIIII', binaryString)]
def _toBString(block):
""" Convert block (5 words of 32 bits to binary string """
return ''.join([struct.pack('!I',word) for word in block])
if __name__ == '__main__':
# Run the tests from the command line
unittest.main()
| testFIPS180_1_Appendix_B | identifier_name |
sha1Hash_test.py | #!/usr/bin/env python
""" sha1Hash_test.py
Unit tests for sha1.py
"""
from crypto.hash.sha1Hash import SHA1
import unittest
import struct
assert struct.calcsize('!IIIII') == 20, '5 integers should be 20 bytes'
class SHA1_FIPS180_TestCases(unittest.TestCase):
""" SHA-1 tests from FIPS180-1 Appendix A, B and C """
def testFIPS180_1_Appendix_A(self):
""" APPENDIX A. A SAMPLE MESSAGE AND ITS MESSAGE DIGEST """
hashAlg = SHA1()
message = 'abc'
message_digest = 0xA9993E36L, 0x4706816AL, 0xBA3E2571L, 0x7850C26CL, 0x9CD0D89DL
md_string = _toBString(message_digest)
assert( hashAlg(message) == md_string ), 'FIPS180 Appendix A test Failed'
def testFIPS180_1_Appendix_B(self):
""" APPENDIX B. A SECOND SAMPLE MESSAGE AND ITS MESSAGE DIGEST """
hashAlg = SHA1()
message = 'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq'
message_digest = 0x84983E44L, 0x1C3BD26EL, 0xBAAE4AA1L, 0xF95129E5L, 0xE54670F1L
md_string = _toBString(message_digest)
assert( hashAlg(message) == md_string ), 'FIPS180 Appendix B test Failed'
def testFIPS180_1_Appendix_C(self):
""" APPENDIX C. A THIRD SAMPLE MESSAGE AND ITS MESSAGE DIGEST
Let the message be the binary-coded form of the ASCII string which consists
of 1,000,000 repetitions of "a". """
hashAlg = SHA1()
message = 1000000*'a'
message_digest = 0x34AA973CL, 0xD4C4DAA4L, 0xF61EEB2BL, 0xDBAD2731L, 0x6534016FL
md_string = _toBString(message_digest)
assert( hashAlg(message) == md_string ), 'FIPS180 Appendix C test Failed'
def _toBlock(binaryString):
""" Convert binary string to blocks of 5 words of uint32() """
return [uint32(word) for word in struct.unpack('!IIIII', binaryString)]
def _toBString(block):
""" Convert block (5 words of 32 bits to binary string """
return ''.join([struct.pack('!I',word) for word in block])
if __name__ == '__main__': | # Run the tests from the command line
unittest.main() | random_line_split |
|
adaboost.py | #!/usr/bin/python2 |
from util import get_split_training_dataset
from metrics import suite
import feature_selection_trees as fclassify
from sklearn.grid_search import GridSearchCV
from sklearn.ensemble import AdaBoostClassifier
from sklearn.tree import DecisionTreeClassifier
def train(Xtrain, Ytrain):
""" Use entirety of provided X, Y to predict
Default Arguments
Xtrain -- Training data
Ytrain -- Training prediction
Named Arguments
C -- regularization parameter
Returns
classifier -- a tree fitted to Xtrain and Ytrain
"""
# Initialize classifier parameters for adaboost
# For adaboost, this means the number of estimators for now
ada = AdaBoostClassifier(DecisionTreeClassifier(max_depth=1))
parameters = {'n_estimators': [150]}
# Classify over grid of parameters
classifier = GridSearchCV(ada, parameters)
classifier.fit(Xtrain, Ytrain)
return classifier
if __name__ == "__main__":
# Let's take our training data and train a decision tree
# on a subset. Scikit-learn provides a good module for cross-
# validation.
Xt, Xv, Yt, Yv = get_split_training_dataset()
Classifier = train(Xt, Yt)
print "Adaboost Classifier"
suite(Yv, Classifier.predict(Xv))
# smaller feature set
Xtimp, features = fclassify.get_important_data_features(Xt, Yt, max_features=25)
Xvimp = fclassify.compress_data_to_important_features(Xv, features)
ClassifierImp = train(Xtimp,Yt)
print "Adaboosts Classiifer, 25 important features"
suite(Yv, ClassifierImp.predict(Xvimp)) |
# This is an Adaboost classifier
import sys | random_line_split |
adaboost.py | #!/usr/bin/python2
# This is an Adaboost classifier
import sys
from util import get_split_training_dataset
from metrics import suite
import feature_selection_trees as fclassify
from sklearn.grid_search import GridSearchCV
from sklearn.ensemble import AdaBoostClassifier
from sklearn.tree import DecisionTreeClassifier
def train(Xtrain, Ytrain):
| return classifier
if __name__ == "__main__":
# Let's take our training data and train a decision tree
# on a subset. Scikit-learn provides a good module for cross-
# validation.
Xt, Xv, Yt, Yv = get_split_training_dataset()
Classifier = train(Xt, Yt)
print "Adaboost Classifier"
suite(Yv, Classifier.predict(Xv))
# smaller feature set
Xtimp, features = fclassify.get_important_data_features(Xt, Yt, max_features=25)
Xvimp = fclassify.compress_data_to_important_features(Xv, features)
ClassifierImp = train(Xtimp,Yt)
print "Adaboosts Classiifer, 25 important features"
suite(Yv, ClassifierImp.predict(Xvimp))
| """ Use entirety of provided X, Y to predict
Default Arguments
Xtrain -- Training data
Ytrain -- Training prediction
Named Arguments
C -- regularization parameter
Returns
classifier -- a tree fitted to Xtrain and Ytrain
"""
# Initialize classifier parameters for adaboost
# For adaboost, this means the number of estimators for now
ada = AdaBoostClassifier(DecisionTreeClassifier(max_depth=1))
parameters = {'n_estimators': [150]}
# Classify over grid of parameters
classifier = GridSearchCV(ada, parameters)
classifier.fit(Xtrain, Ytrain) | identifier_body |
adaboost.py | #!/usr/bin/python2
# This is an Adaboost classifier
import sys
from util import get_split_training_dataset
from metrics import suite
import feature_selection_trees as fclassify
from sklearn.grid_search import GridSearchCV
from sklearn.ensemble import AdaBoostClassifier
from sklearn.tree import DecisionTreeClassifier
def | (Xtrain, Ytrain):
""" Use entirety of provided X, Y to predict
Default Arguments
Xtrain -- Training data
Ytrain -- Training prediction
Named Arguments
C -- regularization parameter
Returns
classifier -- a tree fitted to Xtrain and Ytrain
"""
# Initialize classifier parameters for adaboost
# For adaboost, this means the number of estimators for now
ada = AdaBoostClassifier(DecisionTreeClassifier(max_depth=1))
parameters = {'n_estimators': [150]}
# Classify over grid of parameters
classifier = GridSearchCV(ada, parameters)
classifier.fit(Xtrain, Ytrain)
return classifier
if __name__ == "__main__":
# Let's take our training data and train a decision tree
# on a subset. Scikit-learn provides a good module for cross-
# validation.
Xt, Xv, Yt, Yv = get_split_training_dataset()
Classifier = train(Xt, Yt)
print "Adaboost Classifier"
suite(Yv, Classifier.predict(Xv))
# smaller feature set
Xtimp, features = fclassify.get_important_data_features(Xt, Yt, max_features=25)
Xvimp = fclassify.compress_data_to_important_features(Xv, features)
ClassifierImp = train(Xtimp,Yt)
print "Adaboosts Classiifer, 25 important features"
suite(Yv, ClassifierImp.predict(Xvimp))
| train | identifier_name |
adaboost.py | #!/usr/bin/python2
# This is an Adaboost classifier
import sys
from util import get_split_training_dataset
from metrics import suite
import feature_selection_trees as fclassify
from sklearn.grid_search import GridSearchCV
from sklearn.ensemble import AdaBoostClassifier
from sklearn.tree import DecisionTreeClassifier
def train(Xtrain, Ytrain):
""" Use entirety of provided X, Y to predict
Default Arguments
Xtrain -- Training data
Ytrain -- Training prediction
Named Arguments
C -- regularization parameter
Returns
classifier -- a tree fitted to Xtrain and Ytrain
"""
# Initialize classifier parameters for adaboost
# For adaboost, this means the number of estimators for now
ada = AdaBoostClassifier(DecisionTreeClassifier(max_depth=1))
parameters = {'n_estimators': [150]}
# Classify over grid of parameters
classifier = GridSearchCV(ada, parameters)
classifier.fit(Xtrain, Ytrain)
return classifier
if __name__ == "__main__":
# Let's take our training data and train a decision tree
# on a subset. Scikit-learn provides a good module for cross-
# validation.
| Xt, Xv, Yt, Yv = get_split_training_dataset()
Classifier = train(Xt, Yt)
print "Adaboost Classifier"
suite(Yv, Classifier.predict(Xv))
# smaller feature set
Xtimp, features = fclassify.get_important_data_features(Xt, Yt, max_features=25)
Xvimp = fclassify.compress_data_to_important_features(Xv, features)
ClassifierImp = train(Xtimp,Yt)
print "Adaboosts Classiifer, 25 important features"
suite(Yv, ClassifierImp.predict(Xvimp)) | conditional_block |
|
setup.py | '''
pyttsx setup script.
Copyright (c) 2009, 2013 Peter Parente
Permission to use, copy, modify, and distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED 'AS IS' AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
'''
from setuptools import setup, find_packages
setup(name='pyttsx',
version='1.2',
description='pyttsx - cross platform text-to-speech',
long_description='pyttsx is a Python package supporting common text-to-speech engines on Mac OS X, Windows, and Linux.',
author='Peter Parente',
author_email='[email protected]',
url='https://github.com/parente/pyttsx',
download_url='http://pypi.python.org/pypi/pyttsx',
license='BSD License', | packages=['pyttsx', 'pyttsx.drivers']
) | random_line_split |
|
two-weakrefs.js | // Copyright 2018 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --harmony-weak-refs --expose-gc --noincremental-marking --allow-natives-syntax
let o1 = {};
let o2 = {};
let wr1;
let wr2;
(function() {
wr1 = new WeakRef(o1);
wr2 = new WeakRef(o2);
})();
// Since the WeakRefs were created during this turn, they're not cleared by GC.
gc();
(function() {
assertNotEquals(undefined, wr1.deref());
assertNotEquals(undefined, wr2.deref());
})();
%PerformMicrotaskCheckpoint();
// New turn.
wr1.deref();
o1 = null;
gc(); // deref makes sure we don't clean up wr1
%PerformMicrotaskCheckpoint();
// New turn.
wr2.deref();
o2 = null;
gc(); // deref makes sure we don't clean up wr2
|
gc();
%PerformMicrotaskCheckpoint();
// New turn.
assertEquals(undefined, wr2.deref()); | %PerformMicrotaskCheckpoint();
// New turn.
assertEquals(undefined, wr1.deref()); | random_line_split |
fields.py | import os
from .provider_manager import ProviderManager
from .resolvers import resolver_registry
field_registry = {}
def register_field(name):
"""add resolver class to registry"""
def add_class(clazz):
field_registry[name] = clazz
return clazz
return add_class
class Field:
def __init__(self, key, props):
self.key = key
self.props = props
@staticmethod
def create(key, context):
# normalize
if isinstance(context, str):
|
field_type = context["lookup"]
return field_registry[field_type](key, context["prop"])
@register_field("literal")
class LiteralField(Field):
def __init__(self, key, props):
super().__init__(key, props)
@property
def value(self):
return self.props["value"]
@register_field("environment")
class EnvironmentField(Field):
def __init__(self, key, props):
super().__init__(key, props)
@property
def value(self):
return os.environ[self.props["variable"]]
@register_field("secretsmanager")
class SecretsManagerField(Field):
def __init__(self, key, props):
super().__init__(key, props)
@property
def value(self):
return self.resolver.get(
self.props["value"],
self.props.get("index", None)
)
@property
def provider(self):
provider_manager = ProviderManager.instance()
return provider_manager.get(self.props["provider"])
@property
def resolver(self):
clazz = resolver_registry["secretsmanager"]
return clazz(self.provider, None, None)
| context = {
"lookup": "literal",
"prop": {
"value": context
}
} | conditional_block |
fields.py | import os
from .provider_manager import ProviderManager
from .resolvers import resolver_registry
field_registry = {}
def | (name):
"""add resolver class to registry"""
def add_class(clazz):
field_registry[name] = clazz
return clazz
return add_class
class Field:
def __init__(self, key, props):
self.key = key
self.props = props
@staticmethod
def create(key, context):
# normalize
if isinstance(context, str):
context = {
"lookup": "literal",
"prop": {
"value": context
}
}
field_type = context["lookup"]
return field_registry[field_type](key, context["prop"])
@register_field("literal")
class LiteralField(Field):
def __init__(self, key, props):
super().__init__(key, props)
@property
def value(self):
return self.props["value"]
@register_field("environment")
class EnvironmentField(Field):
def __init__(self, key, props):
super().__init__(key, props)
@property
def value(self):
return os.environ[self.props["variable"]]
@register_field("secretsmanager")
class SecretsManagerField(Field):
def __init__(self, key, props):
super().__init__(key, props)
@property
def value(self):
return self.resolver.get(
self.props["value"],
self.props.get("index", None)
)
@property
def provider(self):
provider_manager = ProviderManager.instance()
return provider_manager.get(self.props["provider"])
@property
def resolver(self):
clazz = resolver_registry["secretsmanager"]
return clazz(self.provider, None, None)
| register_field | identifier_name |
fields.py | import os
from .provider_manager import ProviderManager
from .resolvers import resolver_registry
field_registry = {}
def register_field(name):
"""add resolver class to registry"""
def add_class(clazz):
field_registry[name] = clazz
return clazz
return add_class
class Field:
def __init__(self, key, props):
self.key = key
self.props = props
@staticmethod
def create(key, context):
# normalize
if isinstance(context, str):
context = {
"lookup": "literal",
"prop": {
"value": context
}
}
field_type = context["lookup"]
return field_registry[field_type](key, context["prop"])
@register_field("literal")
class LiteralField(Field):
def __init__(self, key, props):
super().__init__(key, props)
@property
def value(self):
|
@register_field("environment")
class EnvironmentField(Field):
def __init__(self, key, props):
super().__init__(key, props)
@property
def value(self):
return os.environ[self.props["variable"]]
@register_field("secretsmanager")
class SecretsManagerField(Field):
def __init__(self, key, props):
super().__init__(key, props)
@property
def value(self):
return self.resolver.get(
self.props["value"],
self.props.get("index", None)
)
@property
def provider(self):
provider_manager = ProviderManager.instance()
return provider_manager.get(self.props["provider"])
@property
def resolver(self):
clazz = resolver_registry["secretsmanager"]
return clazz(self.provider, None, None)
| return self.props["value"] | identifier_body |
fields.py | import os
from .provider_manager import ProviderManager
from .resolvers import resolver_registry
field_registry = {}
def register_field(name): | def add_class(clazz):
field_registry[name] = clazz
return clazz
return add_class
class Field:
def __init__(self, key, props):
self.key = key
self.props = props
@staticmethod
def create(key, context):
# normalize
if isinstance(context, str):
context = {
"lookup": "literal",
"prop": {
"value": context
}
}
field_type = context["lookup"]
return field_registry[field_type](key, context["prop"])
@register_field("literal")
class LiteralField(Field):
def __init__(self, key, props):
super().__init__(key, props)
@property
def value(self):
return self.props["value"]
@register_field("environment")
class EnvironmentField(Field):
def __init__(self, key, props):
super().__init__(key, props)
@property
def value(self):
return os.environ[self.props["variable"]]
@register_field("secretsmanager")
class SecretsManagerField(Field):
def __init__(self, key, props):
super().__init__(key, props)
@property
def value(self):
return self.resolver.get(
self.props["value"],
self.props.get("index", None)
)
@property
def provider(self):
provider_manager = ProviderManager.instance()
return provider_manager.get(self.props["provider"])
@property
def resolver(self):
clazz = resolver_registry["secretsmanager"]
return clazz(self.provider, None, None) | """add resolver class to registry""" | random_line_split |
entry_point.ts | /**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import * as ts from 'typescript';
import {AbsoluteFsPath} from '../../../src/ngtsc/path';
import {FileSystem} from '../file_system/file_system';
import {parseStatementForUmdModule} from '../host/umd_host';
import {Logger} from '../logging/logger';
/**
* The possible values for the format of an entry-point.
*/
export type EntryPointFormat = 'esm5' | 'esm2015' | 'umd' | 'commonjs';
/**
* An object containing information about an entry-point, including paths
* to each of the possible entry-point formats.
*/
export interface EntryPoint {
/** The name of the package (e.g. `@angular/core`). */
name: string;
/** The parsed package.json file for this entry-point. */
packageJson: EntryPointPackageJson;
/** The path to the package that contains this entry-point. */
package: AbsoluteFsPath;
/** The path to this entry point. */
path: AbsoluteFsPath;
/** The path to a typings (.d.ts) file for this entry-point. */
typings: AbsoluteFsPath;
/** Is this EntryPoint compiled with the Angular View Engine compiler? */
compiledByAngular: boolean;
}
interface PackageJsonFormatProperties {
fesm2015?: string;
fesm5?: string;
es2015?: string; // if exists then it is actually FESM2015
esm2015?: string;
esm5?: string;
main?: string; // UMD
module?: string; // if exists then it is actually FESM5
types?: string; // Synonymous to `typings` property - see https://bit.ly/2OgWp2H
typings?: string; // TypeScript .d.ts files
}
/**
* The properties that may be loaded from the `package.json` file.
*/
export interface EntryPointPackageJson extends PackageJsonFormatProperties {
name: string;
__processed_by_ivy_ngcc__?: {[key: string]: string};
}
export type EntryPointJsonProperty = keyof(PackageJsonFormatProperties);
// We need to keep the elements of this const and the `EntryPointJsonProperty` type in sync.
export const SUPPORTED_FORMAT_PROPERTIES: EntryPointJsonProperty[] =
['fesm2015', 'fesm5', 'es2015', 'esm2015', 'esm5', 'main', 'module'];
/**
* Try to create an entry-point from the given paths and properties.
*
* @param packagePath the absolute path to the containing npm package
* @param entryPointPath the absolute path to the potential entry-point.
* @returns An entry-point if it is valid, `null` otherwise.
*/
export function getEntryPointInfo(
fs: FileSystem, logger: Logger, packagePath: AbsoluteFsPath,
entryPointPath: AbsoluteFsPath): EntryPoint|null {
const packageJsonPath = AbsoluteFsPath.resolve(entryPointPath, 'package.json');
if (!fs.exists(packageJsonPath)) {
return null;
}
const entryPointPackageJson = loadEntryPointPackage(fs, logger, packageJsonPath);
if (!entryPointPackageJson) {
return null;
}
// We must have a typings property
const typings = entryPointPackageJson.typings || entryPointPackageJson.types;
if (!typings) {
return null;
}
// Also there must exist a `metadata.json` file next to the typings entry-point.
const metadataPath =
AbsoluteFsPath.resolve(entryPointPath, typings.replace(/\.d\.ts$/, '') + '.metadata.json');
const entryPointInfo: EntryPoint = {
name: entryPointPackageJson.name,
packageJson: entryPointPackageJson,
package: packagePath,
path: entryPointPath,
typings: AbsoluteFsPath.resolve(entryPointPath, typings),
compiledByAngular: fs.exists(metadataPath),
};
return entryPointInfo;
}
/**
* Convert a package.json property into an entry-point format.
*
* @param property The property to convert to a format.
* @returns An entry-point format or `undefined` if none match the given property.
*/
export function getEntryPointFormat(
fs: FileSystem, entryPoint: EntryPoint, property: string): EntryPointFormat|undefined {
switch (property) {
case 'fesm2015':
return 'esm2015';
case 'fesm5':
return 'esm5';
case 'es2015':
return 'esm2015';
case 'esm2015':
return 'esm2015';
case 'esm5':
return 'esm5';
case 'main':
const pathToMain = AbsoluteFsPath.join(entryPoint.path, entryPoint.packageJson['main'] !);
return isUmdModule(fs, pathToMain) ? 'umd' : 'commonjs';
case 'module':
return 'esm5';
default:
return undefined;
}
}
/**
* Parses the JSON from a package.json file.
* @param packageJsonPath the absolute path to the package.json file.
* @returns JSON from the package.json file if it is valid, `null` otherwise.
*/
function loadEntryPointPackage(
fs: FileSystem, logger: Logger, packageJsonPath: AbsoluteFsPath): EntryPointPackageJson|null {
try {
return JSON.parse(fs.readFile(packageJsonPath));
} catch (e) {
// We may have run into a package.json with unexpected symbols
logger.warn(`Failed to read entry point info from ${packageJsonPath} with error ${e}.`);
return null;
}
}
function isUmdModule(fs: FileSystem, sourceFilePath: AbsoluteFsPath): boolean { | return sourceFile.statements.length > 0 &&
parseStatementForUmdModule(sourceFile.statements[0]) !== null;
} | const sourceFile =
ts.createSourceFile(sourceFilePath, fs.readFile(sourceFilePath), ts.ScriptTarget.ES5); | random_line_split |
entry_point.ts | /**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import * as ts from 'typescript';
import {AbsoluteFsPath} from '../../../src/ngtsc/path';
import {FileSystem} from '../file_system/file_system';
import {parseStatementForUmdModule} from '../host/umd_host';
import {Logger} from '../logging/logger';
/**
* The possible values for the format of an entry-point.
*/
export type EntryPointFormat = 'esm5' | 'esm2015' | 'umd' | 'commonjs';
/**
* An object containing information about an entry-point, including paths
* to each of the possible entry-point formats.
*/
export interface EntryPoint {
/** The name of the package (e.g. `@angular/core`). */
name: string;
/** The parsed package.json file for this entry-point. */
packageJson: EntryPointPackageJson;
/** The path to the package that contains this entry-point. */
package: AbsoluteFsPath;
/** The path to this entry point. */
path: AbsoluteFsPath;
/** The path to a typings (.d.ts) file for this entry-point. */
typings: AbsoluteFsPath;
/** Is this EntryPoint compiled with the Angular View Engine compiler? */
compiledByAngular: boolean;
}
interface PackageJsonFormatProperties {
fesm2015?: string;
fesm5?: string;
es2015?: string; // if exists then it is actually FESM2015
esm2015?: string;
esm5?: string;
main?: string; // UMD
module?: string; // if exists then it is actually FESM5
types?: string; // Synonymous to `typings` property - see https://bit.ly/2OgWp2H
typings?: string; // TypeScript .d.ts files
}
/**
* The properties that may be loaded from the `package.json` file.
*/
export interface EntryPointPackageJson extends PackageJsonFormatProperties {
name: string;
__processed_by_ivy_ngcc__?: {[key: string]: string};
}
export type EntryPointJsonProperty = keyof(PackageJsonFormatProperties);
// We need to keep the elements of this const and the `EntryPointJsonProperty` type in sync.
export const SUPPORTED_FORMAT_PROPERTIES: EntryPointJsonProperty[] =
['fesm2015', 'fesm5', 'es2015', 'esm2015', 'esm5', 'main', 'module'];
/**
* Try to create an entry-point from the given paths and properties.
*
* @param packagePath the absolute path to the containing npm package
* @param entryPointPath the absolute path to the potential entry-point.
* @returns An entry-point if it is valid, `null` otherwise.
*/
export function getEntryPointInfo(
fs: FileSystem, logger: Logger, packagePath: AbsoluteFsPath,
entryPointPath: AbsoluteFsPath): EntryPoint|null {
const packageJsonPath = AbsoluteFsPath.resolve(entryPointPath, 'package.json');
if (!fs.exists(packageJsonPath)) {
return null;
}
const entryPointPackageJson = loadEntryPointPackage(fs, logger, packageJsonPath);
if (!entryPointPackageJson) {
return null;
}
// We must have a typings property
const typings = entryPointPackageJson.typings || entryPointPackageJson.types;
if (!typings) {
return null;
}
// Also there must exist a `metadata.json` file next to the typings entry-point.
const metadataPath =
AbsoluteFsPath.resolve(entryPointPath, typings.replace(/\.d\.ts$/, '') + '.metadata.json');
const entryPointInfo: EntryPoint = {
name: entryPointPackageJson.name,
packageJson: entryPointPackageJson,
package: packagePath,
path: entryPointPath,
typings: AbsoluteFsPath.resolve(entryPointPath, typings),
compiledByAngular: fs.exists(metadataPath),
};
return entryPointInfo;
}
/**
* Convert a package.json property into an entry-point format.
*
* @param property The property to convert to a format.
* @returns An entry-point format or `undefined` if none match the given property.
*/
export function getEntryPointFormat(
fs: FileSystem, entryPoint: EntryPoint, property: string): EntryPointFormat|undefined {
switch (property) {
case 'fesm2015':
return 'esm2015';
case 'fesm5':
return 'esm5';
case 'es2015':
return 'esm2015';
case 'esm2015':
return 'esm2015';
case 'esm5':
return 'esm5';
case 'main':
const pathToMain = AbsoluteFsPath.join(entryPoint.path, entryPoint.packageJson['main'] !);
return isUmdModule(fs, pathToMain) ? 'umd' : 'commonjs';
case 'module':
return 'esm5';
default:
return undefined;
}
}
/**
* Parses the JSON from a package.json file.
* @param packageJsonPath the absolute path to the package.json file.
* @returns JSON from the package.json file if it is valid, `null` otherwise.
*/
function loadEntryPointPackage(
fs: FileSystem, logger: Logger, packageJsonPath: AbsoluteFsPath): EntryPointPackageJson|null {
try {
return JSON.parse(fs.readFile(packageJsonPath));
} catch (e) {
// We may have run into a package.json with unexpected symbols
logger.warn(`Failed to read entry point info from ${packageJsonPath} with error ${e}.`);
return null;
}
}
function isUmdModule(fs: FileSystem, sourceFilePath: AbsoluteFsPath): boolean | {
const sourceFile =
ts.createSourceFile(sourceFilePath, fs.readFile(sourceFilePath), ts.ScriptTarget.ES5);
return sourceFile.statements.length > 0 &&
parseStatementForUmdModule(sourceFile.statements[0]) !== null;
} | identifier_body |
|
entry_point.ts | /**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import * as ts from 'typescript';
import {AbsoluteFsPath} from '../../../src/ngtsc/path';
import {FileSystem} from '../file_system/file_system';
import {parseStatementForUmdModule} from '../host/umd_host';
import {Logger} from '../logging/logger';
/**
* The possible values for the format of an entry-point.
*/
export type EntryPointFormat = 'esm5' | 'esm2015' | 'umd' | 'commonjs';
/**
* An object containing information about an entry-point, including paths
* to each of the possible entry-point formats.
*/
export interface EntryPoint {
/** The name of the package (e.g. `@angular/core`). */
name: string;
/** The parsed package.json file for this entry-point. */
packageJson: EntryPointPackageJson;
/** The path to the package that contains this entry-point. */
package: AbsoluteFsPath;
/** The path to this entry point. */
path: AbsoluteFsPath;
/** The path to a typings (.d.ts) file for this entry-point. */
typings: AbsoluteFsPath;
/** Is this EntryPoint compiled with the Angular View Engine compiler? */
compiledByAngular: boolean;
}
interface PackageJsonFormatProperties {
fesm2015?: string;
fesm5?: string;
es2015?: string; // if exists then it is actually FESM2015
esm2015?: string;
esm5?: string;
main?: string; // UMD
module?: string; // if exists then it is actually FESM5
types?: string; // Synonymous to `typings` property - see https://bit.ly/2OgWp2H
typings?: string; // TypeScript .d.ts files
}
/**
* The properties that may be loaded from the `package.json` file.
*/
export interface EntryPointPackageJson extends PackageJsonFormatProperties {
name: string;
__processed_by_ivy_ngcc__?: {[key: string]: string};
}
export type EntryPointJsonProperty = keyof(PackageJsonFormatProperties);
// We need to keep the elements of this const and the `EntryPointJsonProperty` type in sync.
export const SUPPORTED_FORMAT_PROPERTIES: EntryPointJsonProperty[] =
['fesm2015', 'fesm5', 'es2015', 'esm2015', 'esm5', 'main', 'module'];
/**
* Try to create an entry-point from the given paths and properties.
*
* @param packagePath the absolute path to the containing npm package
* @param entryPointPath the absolute path to the potential entry-point.
* @returns An entry-point if it is valid, `null` otherwise.
*/
export function getEntryPointInfo(
fs: FileSystem, logger: Logger, packagePath: AbsoluteFsPath,
entryPointPath: AbsoluteFsPath): EntryPoint|null {
const packageJsonPath = AbsoluteFsPath.resolve(entryPointPath, 'package.json');
if (!fs.exists(packageJsonPath)) {
return null;
}
const entryPointPackageJson = loadEntryPointPackage(fs, logger, packageJsonPath);
if (!entryPointPackageJson) {
return null;
}
// We must have a typings property
const typings = entryPointPackageJson.typings || entryPointPackageJson.types;
if (!typings) {
return null;
}
// Also there must exist a `metadata.json` file next to the typings entry-point.
const metadataPath =
AbsoluteFsPath.resolve(entryPointPath, typings.replace(/\.d\.ts$/, '') + '.metadata.json');
const entryPointInfo: EntryPoint = {
name: entryPointPackageJson.name,
packageJson: entryPointPackageJson,
package: packagePath,
path: entryPointPath,
typings: AbsoluteFsPath.resolve(entryPointPath, typings),
compiledByAngular: fs.exists(metadataPath),
};
return entryPointInfo;
}
/**
* Convert a package.json property into an entry-point format.
*
* @param property The property to convert to a format.
* @returns An entry-point format or `undefined` if none match the given property.
*/
export function getEntryPointFormat(
fs: FileSystem, entryPoint: EntryPoint, property: string): EntryPointFormat|undefined {
switch (property) {
case 'fesm2015':
return 'esm2015';
case 'fesm5':
return 'esm5';
case 'es2015':
return 'esm2015';
case 'esm2015':
return 'esm2015';
case 'esm5':
return 'esm5';
case 'main':
const pathToMain = AbsoluteFsPath.join(entryPoint.path, entryPoint.packageJson['main'] !);
return isUmdModule(fs, pathToMain) ? 'umd' : 'commonjs';
case 'module':
return 'esm5';
default:
return undefined;
}
}
/**
* Parses the JSON from a package.json file.
* @param packageJsonPath the absolute path to the package.json file.
* @returns JSON from the package.json file if it is valid, `null` otherwise.
*/
function | (
fs: FileSystem, logger: Logger, packageJsonPath: AbsoluteFsPath): EntryPointPackageJson|null {
try {
return JSON.parse(fs.readFile(packageJsonPath));
} catch (e) {
// We may have run into a package.json with unexpected symbols
logger.warn(`Failed to read entry point info from ${packageJsonPath} with error ${e}.`);
return null;
}
}
function isUmdModule(fs: FileSystem, sourceFilePath: AbsoluteFsPath): boolean {
const sourceFile =
ts.createSourceFile(sourceFilePath, fs.readFile(sourceFilePath), ts.ScriptTarget.ES5);
return sourceFile.statements.length > 0 &&
parseStatementForUmdModule(sourceFile.statements[0]) !== null;
}
| loadEntryPointPackage | identifier_name |
job.py | __author__ = 'Christof Pieloth'
import logging
from packbacker.errors import ParameterError
from packbacker.installers import installer_prototypes
from packbacker.utils import UtilsUI
class Job(object):
log = logging.getLogger(__name__)
def __init__(self):
self._installers = []
def add_installer(self, installer):
self._installers.append(installer)
def execute(self):
errors = 0
for i in self._installers:
if not UtilsUI.ask_for_execute('Install ' + i.label):
continue
try:
if i.install():
Job.log.info(i.name + ' executed.')
else:
errors += 1
Job.log.error('Error on executing ' + i.name + '!')
except Exception as ex:
errors += 1
Job.log.error('Unknown error:\n' + str(ex))
return errors
@staticmethod
def read_job(fname):
prototypes = []
prototypes.extend(installer_prototypes())
job = None
try:
job_file = open(fname, 'r')
except IOError as err:
Job.log.critical('Error on reading job file:\n' + str(err))
else:
with job_file:
job = Job()
for line in job_file:
|
return job
@staticmethod
def read_parameter(line):
params = {}
i = line.find(': ') + 2
line = line[i:]
pairs = line.split(';')
for pair in pairs:
pair = pair.strip()
par = pair.split('=')
if len(par) == 2:
params[par[0]] = par[1]
return params | if line[0] == '#':
continue
for p in prototypes:
if p.matches(line):
try:
params = Job.read_parameter(line)
cmd = p.instance(params)
job.add_installer(cmd)
except ParameterError as err:
Job.log.error("Installer '" + p.name + "' is skipped: " + str(err))
except Exception as ex:
Job.log.critical('Unknown error: \n' + str(ex))
continue | conditional_block |
job.py | __author__ = 'Christof Pieloth'
import logging
from packbacker.errors import ParameterError
from packbacker.installers import installer_prototypes
from packbacker.utils import UtilsUI
class Job(object):
log = logging.getLogger(__name__)
def __init__(self):
self._installers = [] | errors = 0
for i in self._installers:
if not UtilsUI.ask_for_execute('Install ' + i.label):
continue
try:
if i.install():
Job.log.info(i.name + ' executed.')
else:
errors += 1
Job.log.error('Error on executing ' + i.name + '!')
except Exception as ex:
errors += 1
Job.log.error('Unknown error:\n' + str(ex))
return errors
@staticmethod
def read_job(fname):
prototypes = []
prototypes.extend(installer_prototypes())
job = None
try:
job_file = open(fname, 'r')
except IOError as err:
Job.log.critical('Error on reading job file:\n' + str(err))
else:
with job_file:
job = Job()
for line in job_file:
if line[0] == '#':
continue
for p in prototypes:
if p.matches(line):
try:
params = Job.read_parameter(line)
cmd = p.instance(params)
job.add_installer(cmd)
except ParameterError as err:
Job.log.error("Installer '" + p.name + "' is skipped: " + str(err))
except Exception as ex:
Job.log.critical('Unknown error: \n' + str(ex))
continue
return job
@staticmethod
def read_parameter(line):
params = {}
i = line.find(': ') + 2
line = line[i:]
pairs = line.split(';')
for pair in pairs:
pair = pair.strip()
par = pair.split('=')
if len(par) == 2:
params[par[0]] = par[1]
return params |
def add_installer(self, installer):
self._installers.append(installer)
def execute(self): | random_line_split |
job.py | __author__ = 'Christof Pieloth'
import logging
from packbacker.errors import ParameterError
from packbacker.installers import installer_prototypes
from packbacker.utils import UtilsUI
class Job(object):
log = logging.getLogger(__name__)
def __init__(self):
self._installers = []
def add_installer(self, installer):
self._installers.append(installer)
def execute(self):
errors = 0
for i in self._installers:
if not UtilsUI.ask_for_execute('Install ' + i.label):
continue
try:
if i.install():
Job.log.info(i.name + ' executed.')
else:
errors += 1
Job.log.error('Error on executing ' + i.name + '!')
except Exception as ex:
errors += 1
Job.log.error('Unknown error:\n' + str(ex))
return errors
@staticmethod
def read_job(fname):
prototypes = []
prototypes.extend(installer_prototypes())
job = None
try:
job_file = open(fname, 'r')
except IOError as err:
Job.log.critical('Error on reading job file:\n' + str(err))
else:
with job_file:
job = Job()
for line in job_file:
if line[0] == '#':
continue
for p in prototypes:
if p.matches(line):
try:
params = Job.read_parameter(line)
cmd = p.instance(params)
job.add_installer(cmd)
except ParameterError as err:
Job.log.error("Installer '" + p.name + "' is skipped: " + str(err))
except Exception as ex:
Job.log.critical('Unknown error: \n' + str(ex))
continue
return job
@staticmethod
def | (line):
params = {}
i = line.find(': ') + 2
line = line[i:]
pairs = line.split(';')
for pair in pairs:
pair = pair.strip()
par = pair.split('=')
if len(par) == 2:
params[par[0]] = par[1]
return params | read_parameter | identifier_name |
job.py | __author__ = 'Christof Pieloth'
import logging
from packbacker.errors import ParameterError
from packbacker.installers import installer_prototypes
from packbacker.utils import UtilsUI
class Job(object):
log = logging.getLogger(__name__)
def __init__(self):
self._installers = []
def add_installer(self, installer):
self._installers.append(installer)
def execute(self):
errors = 0
for i in self._installers:
if not UtilsUI.ask_for_execute('Install ' + i.label):
continue
try:
if i.install():
Job.log.info(i.name + ' executed.')
else:
errors += 1
Job.log.error('Error on executing ' + i.name + '!')
except Exception as ex:
errors += 1
Job.log.error('Unknown error:\n' + str(ex))
return errors
@staticmethod
def read_job(fname):
prototypes = []
prototypes.extend(installer_prototypes())
job = None
try:
job_file = open(fname, 'r')
except IOError as err:
Job.log.critical('Error on reading job file:\n' + str(err))
else:
with job_file:
job = Job()
for line in job_file:
if line[0] == '#':
continue
for p in prototypes:
if p.matches(line):
try:
params = Job.read_parameter(line)
cmd = p.instance(params)
job.add_installer(cmd)
except ParameterError as err:
Job.log.error("Installer '" + p.name + "' is skipped: " + str(err))
except Exception as ex:
Job.log.critical('Unknown error: \n' + str(ex))
continue
return job
@staticmethod
def read_parameter(line):
| params = {}
i = line.find(': ') + 2
line = line[i:]
pairs = line.split(';')
for pair in pairs:
pair = pair.strip()
par = pair.split('=')
if len(par) == 2:
params[par[0]] = par[1]
return params | identifier_body |
|
zero.rs | #![feature(core, zero_one)]
extern crate core;
#[cfg(test)]
mod tests {
use core::num::Zero;
// pub trait Zero {
// /// The "zero" (usually, additive identity) for this type.
// fn zero() -> Self;
// }
// pub trait One {
// /// The "one" (usually, multiplicative identity) for this type.
// fn one() -> Self;
// }
// macro_rules! zero_one_impl {
// ($($t:ty)*) => ($(
// impl Zero for $t {
// #[inline]
// fn zero() -> Self { 0 }
// }
// impl One for $t {
// #[inline]
// fn one() -> Self { 1 }
// }
// )*)
// }
// zero_one_impl! { u8 u16 u32 u64 usize i8 i16 i32 i64 isize }
// macro_rules! zero_one_impl_float {
// ($($t:ty)*) => ($(
// impl Zero for $t {
// #[inline]
// fn zero() -> Self { 0.0 }
// }
// impl One for $t {
// #[inline]
// fn one() -> Self { 1.0 }
// }
// )*)
// }
// zero_one_impl_float! { f32 f64 }
type T = u64;
#[test]
fn | () {
let value: T = T::zero();
assert_eq!(value, 0x0000000000000000);
}
}
| zero_test1 | identifier_name |
zero.rs | #![feature(core, zero_one)]
extern crate core;
#[cfg(test)]
mod tests {
use core::num::Zero;
// pub trait Zero {
// /// The "zero" (usually, additive identity) for this type.
// fn zero() -> Self;
// }
// pub trait One {
// /// The "one" (usually, multiplicative identity) for this type.
// fn one() -> Self;
// }
// macro_rules! zero_one_impl {
// ($($t:ty)*) => ($(
// impl Zero for $t {
// #[inline]
// fn zero() -> Self { 0 } | // impl One for $t {
// #[inline]
// fn one() -> Self { 1 }
// }
// )*)
// }
// zero_one_impl! { u8 u16 u32 u64 usize i8 i16 i32 i64 isize }
// macro_rules! zero_one_impl_float {
// ($($t:ty)*) => ($(
// impl Zero for $t {
// #[inline]
// fn zero() -> Self { 0.0 }
// }
// impl One for $t {
// #[inline]
// fn one() -> Self { 1.0 }
// }
// )*)
// }
// zero_one_impl_float! { f32 f64 }
type T = u64;
#[test]
fn zero_test1() {
let value: T = T::zero();
assert_eq!(value, 0x0000000000000000);
}
} | // } | random_line_split |
zero.rs | #![feature(core, zero_one)]
extern crate core;
#[cfg(test)]
mod tests {
use core::num::Zero;
// pub trait Zero {
// /// The "zero" (usually, additive identity) for this type.
// fn zero() -> Self;
// }
// pub trait One {
// /// The "one" (usually, multiplicative identity) for this type.
// fn one() -> Self;
// }
// macro_rules! zero_one_impl {
// ($($t:ty)*) => ($(
// impl Zero for $t {
// #[inline]
// fn zero() -> Self { 0 }
// }
// impl One for $t {
// #[inline]
// fn one() -> Self { 1 }
// }
// )*)
// }
// zero_one_impl! { u8 u16 u32 u64 usize i8 i16 i32 i64 isize }
// macro_rules! zero_one_impl_float {
// ($($t:ty)*) => ($(
// impl Zero for $t {
// #[inline]
// fn zero() -> Self { 0.0 }
// }
// impl One for $t {
// #[inline]
// fn one() -> Self { 1.0 }
// }
// )*)
// }
// zero_one_impl_float! { f32 f64 }
type T = u64;
#[test]
fn zero_test1() |
}
| {
let value: T = T::zero();
assert_eq!(value, 0x0000000000000000);
} | identifier_body |
nzbToGamez.py | #!/usr/bin/env python2
#
##############################################################################
### NZBGET POST-PROCESSING SCRIPT ###
# Post-Process to CouchPotato, SickBeard, NzbDrone, Mylar, Gamez, HeadPhones.
#
# This script sends the download to your automated media management servers.
#
# NOTE: This script requires Python to be installed on your system.
##############################################################################
#
### OPTIONS ###
## General
# Auto Update nzbToMedia (0, 1).
#
# Set to 1 if you want nzbToMedia to automatically check for and update to the latest version
#auto_update=0
# Safe Mode protection of DestDir (0, 1).
#
# Enable/Disable a safety check to ensure we don't process all downloads in the default_downloadDirectory by mistake.
#safe_mode=1
## Gamez
# Gamez script category.
# |
# Gamez host.
#
# The ipaddress for your Gamez server. e.g For the Same system use localhost or 127.0.0.1
#gzhost=localhost
# Gamez port.
#gzport=8085
# Gamez uses ssl (0, 1).
#
# Set to 1 if using ssl, else set to 0.
#gzssl=0
# Gamez library
#
# move downloaded games here.
#gzlibrary
# Gamez web_root
#
# set this if using a reverse proxy.
#gzweb_root=
# Gamez watch directory.
#
# set this to where your Gamez completed downloads are.
#gzwatch_dir=
## Posix
# Niceness for external tasks Extractor and Transcoder.
#
# Set the Niceness value for the nice command. These range from -20 (most favorable to the process) to 19 (least favorable to the process).
#niceness=10
# ionice scheduling class (0, 1, 2, 3).
#
# Set the ionice scheduling class. 0 for none, 1 for real time, 2 for best-effort, 3 for idle.
#ionice_class=2
# ionice scheduling class data.
#
# Set the ionice scheduling class data. This defines the class data, if the class accepts an argument. For real time and best-effort, 0-7 is valid data.
#ionice_classdata=4
## WakeOnLan
# use WOL (0, 1).
#
# set to 1 to send WOL broadcast to the mac and test the server (e.g. xbmc) on the host and port specified.
#wolwake=0
# WOL MAC
#
# enter the mac address of the system to be woken.
#wolmac=00:01:2e:2D:64:e1
# Set the Host and Port of a server to verify system has woken.
#wolhost=192.168.1.37
#wolport=80
### NZBGET POST-PROCESSING SCRIPT ###
##############################################################################
import sys
import nzbToMedia
section = "Gamez"
result = nzbToMedia.main(sys.argv, section)
sys.exit(result) | # category that gets called for post-processing with Gamez.
#gzCategory=games
# Gamez api key.
#gzapikey= | random_line_split |
selectionRendererFactory.ts |
module awk.grid {
export class SelectionRendererFactory {
angularGrid: any;
selectionController: any;
init(angularGrid: any, selectionController: any) {
this.angularGrid = angularGrid;
this.selectionController = selectionController;
}
createCheckboxColDef() {
return {
width: 30,
suppressMenu: true,
suppressSorting: true,
headerCellRenderer: function () {
var eCheckbox = document.createElement('input');
eCheckbox.type = 'checkbox';
eCheckbox.name = 'name';
return eCheckbox;
},
cellRenderer: this.createCheckboxRenderer()
};
}
createCheckboxRenderer() {
var that = this;
return function (params: any) {
return that.createSelectionCheckbox(params.node, params.rowIndex);
};
}
createSelectionCheckbox(node: any, rowIndex: any) {
var eCheckbox = document.createElement('input');
eCheckbox.type = "checkbox";
eCheckbox.name = "name";
eCheckbox.className = 'ag-selection-checkbox';
setCheckboxState(eCheckbox, this.selectionController.isNodeSelected(node));
var that = this;
eCheckbox.onclick = function (event) {
event.stopPropagation();
};
eCheckbox.onchange = function () {
var newValue = eCheckbox.checked;
if (newValue) {
that.selectionController.selectIndex(rowIndex, true);
} else {
that.selectionController.deselectIndex(rowIndex);
}
};
this.angularGrid.addVirtualRowListener(rowIndex, {
rowSelected: function (selected: any) {
setCheckboxState(eCheckbox, selected);
},
rowRemoved: function () {
}
});
return eCheckbox;
}
}
function setCheckboxState(eCheckbox: any, state: any) {
if (typeof state === 'boolean') | else {
// isNodeSelected returns back undefined if it's a group and the children
// are a mix of selected and unselected
eCheckbox.indeterminate = true;
}
}
}
| {
eCheckbox.checked = state;
eCheckbox.indeterminate = false;
} | conditional_block |
selectionRendererFactory.ts | module awk.grid {
export class SelectionRendererFactory {
angularGrid: any;
selectionController: any;
init(angularGrid: any, selectionController: any) {
this.angularGrid = angularGrid;
this.selectionController = selectionController;
}
createCheckboxColDef() {
return {
width: 30,
suppressMenu: true,
suppressSorting: true,
headerCellRenderer: function () {
var eCheckbox = document.createElement('input');
eCheckbox.type = 'checkbox';
eCheckbox.name = 'name';
return eCheckbox;
},
cellRenderer: this.createCheckboxRenderer()
};
}
createCheckboxRenderer() {
var that = this;
return function (params: any) {
return that.createSelectionCheckbox(params.node, params.rowIndex);
};
}
createSelectionCheckbox(node: any, rowIndex: any) {
var eCheckbox = document.createElement('input');
eCheckbox.type = "checkbox";
eCheckbox.name = "name";
eCheckbox.className = 'ag-selection-checkbox';
setCheckboxState(eCheckbox, this.selectionController.isNodeSelected(node));
var that = this;
eCheckbox.onclick = function (event) {
event.stopPropagation();
};
eCheckbox.onchange = function () {
var newValue = eCheckbox.checked;
if (newValue) {
that.selectionController.selectIndex(rowIndex, true);
} else {
that.selectionController.deselectIndex(rowIndex);
}
};
this.angularGrid.addVirtualRowListener(rowIndex, {
rowSelected: function (selected: any) {
setCheckboxState(eCheckbox, selected);
},
rowRemoved: function () {
}
});
return eCheckbox;
}
}
function setCheckboxState(eCheckbox: any, state: any) {
if (typeof state === 'boolean') {
eCheckbox.checked = state;
eCheckbox.indeterminate = false; | }
} | } else {
// isNodeSelected returns back undefined if it's a group and the children
// are a mix of selected and unselected
eCheckbox.indeterminate = true;
} | random_line_split |
selectionRendererFactory.ts |
module awk.grid {
export class SelectionRendererFactory {
angularGrid: any;
selectionController: any;
| (angularGrid: any, selectionController: any) {
this.angularGrid = angularGrid;
this.selectionController = selectionController;
}
createCheckboxColDef() {
return {
width: 30,
suppressMenu: true,
suppressSorting: true,
headerCellRenderer: function () {
var eCheckbox = document.createElement('input');
eCheckbox.type = 'checkbox';
eCheckbox.name = 'name';
return eCheckbox;
},
cellRenderer: this.createCheckboxRenderer()
};
}
createCheckboxRenderer() {
var that = this;
return function (params: any) {
return that.createSelectionCheckbox(params.node, params.rowIndex);
};
}
createSelectionCheckbox(node: any, rowIndex: any) {
var eCheckbox = document.createElement('input');
eCheckbox.type = "checkbox";
eCheckbox.name = "name";
eCheckbox.className = 'ag-selection-checkbox';
setCheckboxState(eCheckbox, this.selectionController.isNodeSelected(node));
var that = this;
eCheckbox.onclick = function (event) {
event.stopPropagation();
};
eCheckbox.onchange = function () {
var newValue = eCheckbox.checked;
if (newValue) {
that.selectionController.selectIndex(rowIndex, true);
} else {
that.selectionController.deselectIndex(rowIndex);
}
};
this.angularGrid.addVirtualRowListener(rowIndex, {
rowSelected: function (selected: any) {
setCheckboxState(eCheckbox, selected);
},
rowRemoved: function () {
}
});
return eCheckbox;
}
}
function setCheckboxState(eCheckbox: any, state: any) {
if (typeof state === 'boolean') {
eCheckbox.checked = state;
eCheckbox.indeterminate = false;
} else {
// isNodeSelected returns back undefined if it's a group and the children
// are a mix of selected and unselected
eCheckbox.indeterminate = true;
}
}
}
| init | identifier_name |
selectionRendererFactory.ts |
module awk.grid {
export class SelectionRendererFactory {
angularGrid: any;
selectionController: any;
init(angularGrid: any, selectionController: any) {
this.angularGrid = angularGrid;
this.selectionController = selectionController;
}
createCheckboxColDef() {
return {
width: 30,
suppressMenu: true,
suppressSorting: true,
headerCellRenderer: function () {
var eCheckbox = document.createElement('input');
eCheckbox.type = 'checkbox';
eCheckbox.name = 'name';
return eCheckbox;
},
cellRenderer: this.createCheckboxRenderer()
};
}
createCheckboxRenderer() {
var that = this;
return function (params: any) {
return that.createSelectionCheckbox(params.node, params.rowIndex);
};
}
createSelectionCheckbox(node: any, rowIndex: any) {
var eCheckbox = document.createElement('input');
eCheckbox.type = "checkbox";
eCheckbox.name = "name";
eCheckbox.className = 'ag-selection-checkbox';
setCheckboxState(eCheckbox, this.selectionController.isNodeSelected(node));
var that = this;
eCheckbox.onclick = function (event) {
event.stopPropagation();
};
eCheckbox.onchange = function () {
var newValue = eCheckbox.checked;
if (newValue) {
that.selectionController.selectIndex(rowIndex, true);
} else {
that.selectionController.deselectIndex(rowIndex);
}
};
this.angularGrid.addVirtualRowListener(rowIndex, {
rowSelected: function (selected: any) {
setCheckboxState(eCheckbox, selected);
},
rowRemoved: function () {
}
});
return eCheckbox;
}
}
function setCheckboxState(eCheckbox: any, state: any) |
}
| {
if (typeof state === 'boolean') {
eCheckbox.checked = state;
eCheckbox.indeterminate = false;
} else {
// isNodeSelected returns back undefined if it's a group and the children
// are a mix of selected and unselected
eCheckbox.indeterminate = true;
}
} | identifier_body |
sock_serv.py | # -*- coding: utf-8 -*-
"""
Simple sockjs-tornado chat application. By default will listen on port 8080.
"""
import sys
import os
import json
from urllib import urlencode
import tornado.ioloop
import tornado.web
from tornado import gen
from tornado.httpclient import AsyncHTTPClient
import sockjs.tornado
sys.path.append( os.path.join(os.path.dirname(__file__), '..') )
os.environ['DJANGO_SETTINGS_MODULE'] = 'dj_backend.settings'
from importlib import import_module
from django.conf import settings
SessionStore = import_module(settings.SESSION_ENGINE).SessionStore
DJ_BACKEND = 'http://localhost:8000'
try:
from .local_sock_settings import *
except ImportError:
pass
class IndexHandler(tornado.web.RequestHandler):
"""Regular HTTP handler to serve the chatroom page"""
def get(self):
self.render('index.html')
class ChatConnection(sockjs.tornado.SockJSConnection):
"""Chat connection implementation""" | def on_open(self, info):
self.joined_session = []
@gen.coroutine
def on_message(self, message):
recv_data = json.loads(message)
if recv_data['type']=='session_join':
session_name = recv_data['data']
if self.session_blocks.get(session_name, False) == False:
self.session_blocks[session_name] = set()
session_id = self.session.conn_info.cookies['sessionid'].value
dj_session = SessionStore(session_key=session_id)
dj_user_id = dj_session.get('_auth_user_id')
if dj_user_id:
user_response = yield self.http_client.fetch(DJ_BACKEND + '/api/users/%d/?format=json' % dj_user_id)
dj_user = json.loads(user_response.body)
self.dj_user = dj_user
self.user_name = self.dj_user['username']
else:
self.user_name = dj_session.get('insta_name')
self.dj_user = None
self.session_blocks[session_name].add(self)
self.joined_session.append(session_name)
self.session_list_update(session_name)
send_data = {}
send_data['type'] = 'code_update'
send_data['data'] = self.session_code_storage[session_name]
self.send(json.dumps(send_data))
elif recv_data['type']=='code_update':
session_name = recv_data['session']
send_data = {}
send_data['type'] = 'code_update'
send_data['data'] = recv_data['data']
self.session_code_storage[session_name] = recv_data['data']
self.broadcast(self.session_blocks[session_name] - set([self]), json.dumps(send_data))
elif recv_data['type']=='chat_message_send':
session_name = recv_data['session']
send_data = {}
send_data['type'] = 'chat_message_receive'
chat_data = {'username': self.user_name}
if self.dj_user:
chat_data['avatar_url'] = self.dj_user['avatar_url']
chat_data['message'] = recv_data['data']
send_data['data'] = json.dumps(chat_data)
self.broadcast(self.session_blocks[session_name], json.dumps(send_data))
elif recv_data['type']=='run_code':
session_name = recv_data['session']
code = recv_data['data']
language = recv_data['language']
post_data = urlencode({'code': code, 'language': language})
result = yield self.http_client.fetch(DJ_BACKEND + '/pair_session/run_code/',
method='POST', body=post_data)
result_data = {'result': result.body}
send_data = {}
send_data['type'] = 'run_code_result'
send_data['data'] = json.dumps(result_data)
self.broadcast(self.session_blocks[session_name], json.dumps(send_data))
def on_close(self):
for session in self.joined_session:
self.session_blocks[session].remove(self)
self.session_list_update(session)
def session_list_update(self, session_name):
session_list = []
for session in self.session_blocks[session_name]:
user = {'username': session.user_name}
if session.dj_user:
user['avatar_url'] = session.dj_user['avatar_url']
session_list.append(user)
send_data = {}
send_data['type'] = 'session_list_update';
send_data['data'] = json.dumps(session_list)
self.broadcast(self.session_blocks[session_name], json.dumps(send_data))
if __name__ == "__main__":
import logging
logging.getLogger().setLevel(logging.INFO)
# 1. Create chat router
ChatRouter = sockjs.tornado.SockJSRouter(ChatConnection, '/chat')
# 2. Create Tornado application
app = tornado.web.Application(
[(r"/", IndexHandler)] + ChatRouter.urls,
debug=True
)
# 3. Make Tornado app listen on port 8080
app.listen(8888)
# 4. Start IOLoop
tornado.ioloop.IOLoop.instance().start() | session_blocks = dict()
session_code_storage = dict()
http_client = AsyncHTTPClient()
| random_line_split |
sock_serv.py | # -*- coding: utf-8 -*-
"""
Simple sockjs-tornado chat application. By default will listen on port 8080.
"""
import sys
import os
import json
from urllib import urlencode
import tornado.ioloop
import tornado.web
from tornado import gen
from tornado.httpclient import AsyncHTTPClient
import sockjs.tornado
sys.path.append( os.path.join(os.path.dirname(__file__), '..') )
os.environ['DJANGO_SETTINGS_MODULE'] = 'dj_backend.settings'
from importlib import import_module
from django.conf import settings
SessionStore = import_module(settings.SESSION_ENGINE).SessionStore
DJ_BACKEND = 'http://localhost:8000'
try:
from .local_sock_settings import *
except ImportError:
pass
class IndexHandler(tornado.web.RequestHandler):
"""Regular HTTP handler to serve the chatroom page"""
def get(self):
self.render('index.html')
class ChatConnection(sockjs.tornado.SockJSConnection):
"""Chat connection implementation"""
session_blocks = dict()
session_code_storage = dict()
http_client = AsyncHTTPClient()
def on_open(self, info):
self.joined_session = []
@gen.coroutine
def | (self, message):
recv_data = json.loads(message)
if recv_data['type']=='session_join':
session_name = recv_data['data']
if self.session_blocks.get(session_name, False) == False:
self.session_blocks[session_name] = set()
session_id = self.session.conn_info.cookies['sessionid'].value
dj_session = SessionStore(session_key=session_id)
dj_user_id = dj_session.get('_auth_user_id')
if dj_user_id:
user_response = yield self.http_client.fetch(DJ_BACKEND + '/api/users/%d/?format=json' % dj_user_id)
dj_user = json.loads(user_response.body)
self.dj_user = dj_user
self.user_name = self.dj_user['username']
else:
self.user_name = dj_session.get('insta_name')
self.dj_user = None
self.session_blocks[session_name].add(self)
self.joined_session.append(session_name)
self.session_list_update(session_name)
send_data = {}
send_data['type'] = 'code_update'
send_data['data'] = self.session_code_storage[session_name]
self.send(json.dumps(send_data))
elif recv_data['type']=='code_update':
session_name = recv_data['session']
send_data = {}
send_data['type'] = 'code_update'
send_data['data'] = recv_data['data']
self.session_code_storage[session_name] = recv_data['data']
self.broadcast(self.session_blocks[session_name] - set([self]), json.dumps(send_data))
elif recv_data['type']=='chat_message_send':
session_name = recv_data['session']
send_data = {}
send_data['type'] = 'chat_message_receive'
chat_data = {'username': self.user_name}
if self.dj_user:
chat_data['avatar_url'] = self.dj_user['avatar_url']
chat_data['message'] = recv_data['data']
send_data['data'] = json.dumps(chat_data)
self.broadcast(self.session_blocks[session_name], json.dumps(send_data))
elif recv_data['type']=='run_code':
session_name = recv_data['session']
code = recv_data['data']
language = recv_data['language']
post_data = urlencode({'code': code, 'language': language})
result = yield self.http_client.fetch(DJ_BACKEND + '/pair_session/run_code/',
method='POST', body=post_data)
result_data = {'result': result.body}
send_data = {}
send_data['type'] = 'run_code_result'
send_data['data'] = json.dumps(result_data)
self.broadcast(self.session_blocks[session_name], json.dumps(send_data))
def on_close(self):
for session in self.joined_session:
self.session_blocks[session].remove(self)
self.session_list_update(session)
def session_list_update(self, session_name):
session_list = []
for session in self.session_blocks[session_name]:
user = {'username': session.user_name}
if session.dj_user:
user['avatar_url'] = session.dj_user['avatar_url']
session_list.append(user)
send_data = {}
send_data['type'] = 'session_list_update';
send_data['data'] = json.dumps(session_list)
self.broadcast(self.session_blocks[session_name], json.dumps(send_data))
if __name__ == "__main__":
import logging
logging.getLogger().setLevel(logging.INFO)
# 1. Create chat router
ChatRouter = sockjs.tornado.SockJSRouter(ChatConnection, '/chat')
# 2. Create Tornado application
app = tornado.web.Application(
[(r"/", IndexHandler)] + ChatRouter.urls,
debug=True
)
# 3. Make Tornado app listen on port 8080
app.listen(8888)
# 4. Start IOLoop
tornado.ioloop.IOLoop.instance().start()
| on_message | identifier_name |
sock_serv.py | # -*- coding: utf-8 -*-
"""
Simple sockjs-tornado chat application. By default will listen on port 8080.
"""
import sys
import os
import json
from urllib import urlencode
import tornado.ioloop
import tornado.web
from tornado import gen
from tornado.httpclient import AsyncHTTPClient
import sockjs.tornado
sys.path.append( os.path.join(os.path.dirname(__file__), '..') )
os.environ['DJANGO_SETTINGS_MODULE'] = 'dj_backend.settings'
from importlib import import_module
from django.conf import settings
SessionStore = import_module(settings.SESSION_ENGINE).SessionStore
DJ_BACKEND = 'http://localhost:8000'
try:
from .local_sock_settings import *
except ImportError:
pass
class IndexHandler(tornado.web.RequestHandler):
"""Regular HTTP handler to serve the chatroom page"""
def get(self):
self.render('index.html')
class ChatConnection(sockjs.tornado.SockJSConnection):
"""Chat connection implementation"""
session_blocks = dict()
session_code_storage = dict()
http_client = AsyncHTTPClient()
def on_open(self, info):
self.joined_session = []
@gen.coroutine
def on_message(self, message):
recv_data = json.loads(message)
if recv_data['type']=='session_join':
session_name = recv_data['data']
if self.session_blocks.get(session_name, False) == False:
self.session_blocks[session_name] = set()
session_id = self.session.conn_info.cookies['sessionid'].value
dj_session = SessionStore(session_key=session_id)
dj_user_id = dj_session.get('_auth_user_id')
if dj_user_id:
user_response = yield self.http_client.fetch(DJ_BACKEND + '/api/users/%d/?format=json' % dj_user_id)
dj_user = json.loads(user_response.body)
self.dj_user = dj_user
self.user_name = self.dj_user['username']
else:
self.user_name = dj_session.get('insta_name')
self.dj_user = None
self.session_blocks[session_name].add(self)
self.joined_session.append(session_name)
self.session_list_update(session_name)
send_data = {}
send_data['type'] = 'code_update'
send_data['data'] = self.session_code_storage[session_name]
self.send(json.dumps(send_data))
elif recv_data['type']=='code_update':
session_name = recv_data['session']
send_data = {}
send_data['type'] = 'code_update'
send_data['data'] = recv_data['data']
self.session_code_storage[session_name] = recv_data['data']
self.broadcast(self.session_blocks[session_name] - set([self]), json.dumps(send_data))
elif recv_data['type']=='chat_message_send':
session_name = recv_data['session']
send_data = {}
send_data['type'] = 'chat_message_receive'
chat_data = {'username': self.user_name}
if self.dj_user:
|
chat_data['message'] = recv_data['data']
send_data['data'] = json.dumps(chat_data)
self.broadcast(self.session_blocks[session_name], json.dumps(send_data))
elif recv_data['type']=='run_code':
session_name = recv_data['session']
code = recv_data['data']
language = recv_data['language']
post_data = urlencode({'code': code, 'language': language})
result = yield self.http_client.fetch(DJ_BACKEND + '/pair_session/run_code/',
method='POST', body=post_data)
result_data = {'result': result.body}
send_data = {}
send_data['type'] = 'run_code_result'
send_data['data'] = json.dumps(result_data)
self.broadcast(self.session_blocks[session_name], json.dumps(send_data))
def on_close(self):
for session in self.joined_session:
self.session_blocks[session].remove(self)
self.session_list_update(session)
def session_list_update(self, session_name):
session_list = []
for session in self.session_blocks[session_name]:
user = {'username': session.user_name}
if session.dj_user:
user['avatar_url'] = session.dj_user['avatar_url']
session_list.append(user)
send_data = {}
send_data['type'] = 'session_list_update';
send_data['data'] = json.dumps(session_list)
self.broadcast(self.session_blocks[session_name], json.dumps(send_data))
if __name__ == "__main__":
import logging
logging.getLogger().setLevel(logging.INFO)
# 1. Create chat router
ChatRouter = sockjs.tornado.SockJSRouter(ChatConnection, '/chat')
# 2. Create Tornado application
app = tornado.web.Application(
[(r"/", IndexHandler)] + ChatRouter.urls,
debug=True
)
# 3. Make Tornado app listen on port 8080
app.listen(8888)
# 4. Start IOLoop
tornado.ioloop.IOLoop.instance().start()
| chat_data['avatar_url'] = self.dj_user['avatar_url'] | conditional_block |
sock_serv.py | # -*- coding: utf-8 -*-
"""
Simple sockjs-tornado chat application. By default will listen on port 8080.
"""
import sys
import os
import json
from urllib import urlencode
import tornado.ioloop
import tornado.web
from tornado import gen
from tornado.httpclient import AsyncHTTPClient
import sockjs.tornado
sys.path.append( os.path.join(os.path.dirname(__file__), '..') )
os.environ['DJANGO_SETTINGS_MODULE'] = 'dj_backend.settings'
from importlib import import_module
from django.conf import settings
SessionStore = import_module(settings.SESSION_ENGINE).SessionStore
DJ_BACKEND = 'http://localhost:8000'
try:
from .local_sock_settings import *
except ImportError:
pass
class IndexHandler(tornado.web.RequestHandler):
"""Regular HTTP handler to serve the chatroom page"""
def get(self):
self.render('index.html')
class ChatConnection(sockjs.tornado.SockJSConnection):
"""Chat connection implementation"""
session_blocks = dict()
session_code_storage = dict()
http_client = AsyncHTTPClient()
def on_open(self, info):
self.joined_session = []
@gen.coroutine
def on_message(self, message):
| send_data = {}
send_data['type'] = 'code_update'
send_data['data'] = self.session_code_storage[session_name]
self.send(json.dumps(send_data))
elif recv_data['type']=='code_update':
session_name = recv_data['session']
send_data = {}
send_data['type'] = 'code_update'
send_data['data'] = recv_data['data']
self.session_code_storage[session_name] = recv_data['data']
self.broadcast(self.session_blocks[session_name] - set([self]), json.dumps(send_data))
elif recv_data['type']=='chat_message_send':
session_name = recv_data['session']
send_data = {}
send_data['type'] = 'chat_message_receive'
chat_data = {'username': self.user_name}
if self.dj_user:
chat_data['avatar_url'] = self.dj_user['avatar_url']
chat_data['message'] = recv_data['data']
send_data['data'] = json.dumps(chat_data)
self.broadcast(self.session_blocks[session_name], json.dumps(send_data))
elif recv_data['type']=='run_code':
session_name = recv_data['session']
code = recv_data['data']
language = recv_data['language']
post_data = urlencode({'code': code, 'language': language})
result = yield self.http_client.fetch(DJ_BACKEND + '/pair_session/run_code/',
method='POST', body=post_data)
result_data = {'result': result.body}
send_data = {}
send_data['type'] = 'run_code_result'
send_data['data'] = json.dumps(result_data)
self.broadcast(self.session_blocks[session_name], json.dumps(send_data))
def on_close(self):
for session in self.joined_session:
self.session_blocks[session].remove(self)
self.session_list_update(session)
def session_list_update(self, session_name):
session_list = []
for session in self.session_blocks[session_name]:
user = {'username': session.user_name}
if session.dj_user:
user['avatar_url'] = session.dj_user['avatar_url']
session_list.append(user)
send_data = {}
send_data['type'] = 'session_list_update';
send_data['data'] = json.dumps(session_list)
self.broadcast(self.session_blocks[session_name], json.dumps(send_data))
if __name__ == "__main__":
import logging
logging.getLogger().setLevel(logging.INFO)
# 1. Create chat router
ChatRouter = sockjs.tornado.SockJSRouter(ChatConnection, '/chat')
# 2. Create Tornado application
app = tornado.web.Application(
[(r"/", IndexHandler)] + ChatRouter.urls,
debug=True
)
# 3. Make Tornado app listen on port 8080
app.listen(8888)
# 4. Start IOLoop
tornado.ioloop.IOLoop.instance().start()
| recv_data = json.loads(message)
if recv_data['type']=='session_join':
session_name = recv_data['data']
if self.session_blocks.get(session_name, False) == False:
self.session_blocks[session_name] = set()
session_id = self.session.conn_info.cookies['sessionid'].value
dj_session = SessionStore(session_key=session_id)
dj_user_id = dj_session.get('_auth_user_id')
if dj_user_id:
user_response = yield self.http_client.fetch(DJ_BACKEND + '/api/users/%d/?format=json' % dj_user_id)
dj_user = json.loads(user_response.body)
self.dj_user = dj_user
self.user_name = self.dj_user['username']
else:
self.user_name = dj_session.get('insta_name')
self.dj_user = None
self.session_blocks[session_name].add(self)
self.joined_session.append(session_name)
self.session_list_update(session_name) | identifier_body |
canteenie_v1.py | #!/usr/bin/env python3
"""canteenie.py: A small python script that prints today's canteen/mensa menu for FAU on console."""
import requests
import datetime
import argparse
from lxml import html
from colorama import Fore, Style
import textwrap
import xmascc
# command line arguments
parser = argparse.ArgumentParser(description='A small python script that prints today\'s canteen/mensa menu for FAU on console.')
parser.add_argument('-m','--mensa', help='for which mensa? (lmpl: Erlangen Langemarckplatz (default), sued: Erlangen Süd, isch: Nürnberg Insel Schütt)', required=False, default="lmpl", choices=['lmpl', 'sued', 'isch'])
parser.add_argument('-l','--lite', help='disable ascii art header and color (lite view)', required=False, default=False, action='store_true')
args = vars(parser.parse_args())
# get html content from webpage
page = requests.get('http://www.werkswelt.de/?id=%s' %args['mensa'])
tree = html.fromstring(page.content)
menu = tree.xpath('/html/body/div[3]/div/div[2]/div[2]/text()')
# join to string and tidy up the text
menu_str = ' '.join(menu) # join list to one string
menu_str = menu_str.replace('\xa0', ' ') # remove no break space
menu_str = menu_str.replace('\n', ' ') # remove line feed
menu_str = menu_str.replace('\r', ' ') # remove carriage return
menu_str = " ".join(menu_str.split()) # remove more than one space
# count amount of meals
meal_count = menu_str.count("Essen")
meal_special_count = menu_str.count("Aktionsessen")
# print header
now = datetime.datetime.now()
if not args['lite']: print(Fore.YELLOW + '', end="")
if args['lite'] == False:
print("\t ")
print("\t __ __ ")
print("\t| \/ | ___ _ __ ___ __ _ ")
print("\t| |\/| |/ _ \ '_ \/ __|/ _` |")
print("\t| | | | __/ | | \__ \ (_| |")
print("\t|_| |_|\___|_| |_|___/\__,_|")
print("\t ")
if not args['lite']: print(Style.RESET_ALL + '', end="")
if not args['lite']: print(Fore.GREEN + '', end="")
if not args['lite']: print("\t", end='')
print("////////", now.strftime("%d.%m.%Y"),"/////////")
if not args['lite']: print(Style.RESET_ALL + '', end="")
print("")
def wra | al_string):
prefix = "\t\t"
preferredWidth = 105
wrapper = textwrap.TextWrapper(subsequent_indent=prefix, width=preferredWidth)
print(wrapper.fill(meal_string))
return
# print normal meals
i = 1
while i < meal_count +1:
if "Essen %d" %i in menu_str: # check for missing menu
slice_amount = -8
if "- €" in menu_str.split("Essen %d" %i,1)[1].split("(Gäste)",1)[0][:-8]: # check for missing price
slice_amount = -5
if not args['lite']: print(Fore.CYAN + '', end="")
if not args['lite']: print("\t", end='')
meal_string = "%d\t" %i + menu_str.split("Essen %d" %i,1)[1].split("(Gäste)",1)[0][:slice_amount]
if not args['lite']:
wrap(meal_string)
else:
print(meal_string)
if not args['lite']: print(Style.RESET_ALL + '', end="")
i += 1
else:
meal_count += 1
i += 1
# print special meals
if meal_special_count != 0:
print("")
i = 1
while i < meal_special_count + 1:
if "Aktionsessen %d" %i in menu_str: # check for missing menu
slice_amount = -8
if "- €" in menu_str.split("Aktionsessen %d" %i,1)[1].split("(Gäste)",1)[0][:-8]: # check for missing price
slice_amount = -5
if not args['lite']: print(Fore.BLUE + '', end="")
if not args['lite']: print("\t", end='')
meal_special_string= "A%d\t" %i + menu_str.split("Aktionsessen %d" %i,1)[1].split("(Gäste)",1)[0][:slice_amount]
if not args['lite']:
wrap(meal_special_string)
else:
print(meal_special_string)
if not args['lite']: print(Style.RESET_ALL + '', end="")
i += 1
else:
meal_special_count += 1
i += 1
print("")
if not args['lite']: print("")
#xmascc
#if not args['lite']: print(Fore.MAGENTA + '', end="")
#if not args['lite']: print("\t", end='')
#print(xmascc.get_countdown())
#if not args['lite']: print(Style.RESET_ALL + '', end="")
#print("")
| p(me | identifier_name |
canteenie_v1.py | #!/usr/bin/env python3
"""canteenie.py: A small python script that prints today's canteen/mensa menu for FAU on console."""
import requests
import datetime
import argparse
from lxml import html
from colorama import Fore, Style
import textwrap
import xmascc
# command line arguments
parser = argparse.ArgumentParser(description='A small python script that prints today\'s canteen/mensa menu for FAU on console.')
parser.add_argument('-m','--mensa', help='for which mensa? (lmpl: Erlangen Langemarckplatz (default), sued: Erlangen Süd, isch: Nürnberg Insel Schütt)', required=False, default="lmpl", choices=['lmpl', 'sued', 'isch'])
parser.add_argument('-l','--lite', help='disable ascii art header and color (lite view)', required=False, default=False, action='store_true')
args = vars(parser.parse_args())
# get html content from webpage | # join to string and tidy up the text
menu_str = ' '.join(menu) # join list to one string
menu_str = menu_str.replace('\xa0', ' ') # remove no break space
menu_str = menu_str.replace('\n', ' ') # remove line feed
menu_str = menu_str.replace('\r', ' ') # remove carriage return
menu_str = " ".join(menu_str.split()) # remove more than one space
# count amount of meals
meal_count = menu_str.count("Essen")
meal_special_count = menu_str.count("Aktionsessen")
# print header
now = datetime.datetime.now()
if not args['lite']: print(Fore.YELLOW + '', end="")
if args['lite'] == False:
print("\t ")
print("\t __ __ ")
print("\t| \/ | ___ _ __ ___ __ _ ")
print("\t| |\/| |/ _ \ '_ \/ __|/ _` |")
print("\t| | | | __/ | | \__ \ (_| |")
print("\t|_| |_|\___|_| |_|___/\__,_|")
print("\t ")
if not args['lite']: print(Style.RESET_ALL + '', end="")
if not args['lite']: print(Fore.GREEN + '', end="")
if not args['lite']: print("\t", end='')
print("////////", now.strftime("%d.%m.%Y"),"/////////")
if not args['lite']: print(Style.RESET_ALL + '', end="")
print("")
def wrap(meal_string):
prefix = "\t\t"
preferredWidth = 105
wrapper = textwrap.TextWrapper(subsequent_indent=prefix, width=preferredWidth)
print(wrapper.fill(meal_string))
return
# print normal meals
i = 1
while i < meal_count +1:
if "Essen %d" %i in menu_str: # check for missing menu
slice_amount = -8
if "- €" in menu_str.split("Essen %d" %i,1)[1].split("(Gäste)",1)[0][:-8]: # check for missing price
slice_amount = -5
if not args['lite']: print(Fore.CYAN + '', end="")
if not args['lite']: print("\t", end='')
meal_string = "%d\t" %i + menu_str.split("Essen %d" %i,1)[1].split("(Gäste)",1)[0][:slice_amount]
if not args['lite']:
wrap(meal_string)
else:
print(meal_string)
if not args['lite']: print(Style.RESET_ALL + '', end="")
i += 1
else:
meal_count += 1
i += 1
# print special meals
if meal_special_count != 0:
print("")
i = 1
while i < meal_special_count + 1:
if "Aktionsessen %d" %i in menu_str: # check for missing menu
slice_amount = -8
if "- €" in menu_str.split("Aktionsessen %d" %i,1)[1].split("(Gäste)",1)[0][:-8]: # check for missing price
slice_amount = -5
if not args['lite']: print(Fore.BLUE + '', end="")
if not args['lite']: print("\t", end='')
meal_special_string= "A%d\t" %i + menu_str.split("Aktionsessen %d" %i,1)[1].split("(Gäste)",1)[0][:slice_amount]
if not args['lite']:
wrap(meal_special_string)
else:
print(meal_special_string)
if not args['lite']: print(Style.RESET_ALL + '', end="")
i += 1
else:
meal_special_count += 1
i += 1
print("")
if not args['lite']: print("")
#xmascc
#if not args['lite']: print(Fore.MAGENTA + '', end="")
#if not args['lite']: print("\t", end='')
#print(xmascc.get_countdown())
#if not args['lite']: print(Style.RESET_ALL + '', end="")
#print("") | page = requests.get('http://www.werkswelt.de/?id=%s' %args['mensa'])
tree = html.fromstring(page.content)
menu = tree.xpath('/html/body/div[3]/div/div[2]/div[2]/text()')
| random_line_split |
canteenie_v1.py | #!/usr/bin/env python3
"""canteenie.py: A small python script that prints today's canteen/mensa menu for FAU on console."""
import requests
import datetime
import argparse
from lxml import html
from colorama import Fore, Style
import textwrap
import xmascc
# command line arguments
parser = argparse.ArgumentParser(description='A small python script that prints today\'s canteen/mensa menu for FAU on console.')
parser.add_argument('-m','--mensa', help='for which mensa? (lmpl: Erlangen Langemarckplatz (default), sued: Erlangen Süd, isch: Nürnberg Insel Schütt)', required=False, default="lmpl", choices=['lmpl', 'sued', 'isch'])
parser.add_argument('-l','--lite', help='disable ascii art header and color (lite view)', required=False, default=False, action='store_true')
args = vars(parser.parse_args())
# get html content from webpage
page = requests.get('http://www.werkswelt.de/?id=%s' %args['mensa'])
tree = html.fromstring(page.content)
menu = tree.xpath('/html/body/div[3]/div/div[2]/div[2]/text()')
# join to string and tidy up the text
menu_str = ' '.join(menu) # join list to one string
menu_str = menu_str.replace('\xa0', ' ') # remove no break space
menu_str = menu_str.replace('\n', ' ') # remove line feed
menu_str = menu_str.replace('\r', ' ') # remove carriage return
menu_str = " ".join(menu_str.split()) # remove more than one space
# count amount of meals
meal_count = menu_str.count("Essen")
meal_special_count = menu_str.count("Aktionsessen")
# print header
now = datetime.datetime.now()
if not args['lite']: print(Fore.YELLOW + '', end="")
if args['lite'] == False:
print("\t ")
print("\t __ __ ")
print("\t| \/ | ___ _ __ ___ __ _ ")
print("\t| |\/| |/ _ \ '_ \/ __|/ _` |")
print("\t| | | | __/ | | \__ \ (_| |")
print("\t|_| |_|\___|_| |_|___/\__,_|")
print("\t ")
if not args['lite']: print(Style.RESET_ALL + '', end="")
if not args['lite']: print(Fore.GREEN + '', end="")
if not args['lite']: print("\t", end='')
print("////////", now.strftime("%d.%m.%Y"),"/////////")
if not args['lite']: print(Style.RESET_ALL + '', end="")
print("")
def wrap(meal_string):
prefix = "\t\t"
preferredWidth = 105
wrapper = textwrap.TextWrapper(subsequent_indent=prefix, width=preferredWidth)
print(wrapper.fill(meal_string))
return
# print normal meals
i = 1
while i < meal_count +1:
if "Essen %d" %i in menu_str: # check for missing menu
slice_amount = -8
if "- €" in menu_str.split("Essen %d" %i,1)[1].split("(Gäste)",1)[0][:-8]: # check for missing price
slice_amount = -5
if not args['lite']: print(Fore.CYAN + '', end="")
if not args['lite']: print("\t", end='')
meal_string = "%d\t" %i + menu_str.split("Essen %d" %i,1)[1].split("(Gäste)",1)[0][:slice_amount]
if not args['lite']:
wrap(meal_string)
else:
print(m | ot args['lite']: print(Style.RESET_ALL + '', end="")
i += 1
else:
meal_count += 1
i += 1
# print special meals
if meal_special_count != 0:
print("")
i = 1
while i < meal_special_count + 1:
if "Aktionsessen %d" %i in menu_str: # check for missing menu
slice_amount = -8
if "- €" in menu_str.split("Aktionsessen %d" %i,1)[1].split("(Gäste)",1)[0][:-8]: # check for missing price
slice_amount = -5
if not args['lite']: print(Fore.BLUE + '', end="")
if not args['lite']: print("\t", end='')
meal_special_string= "A%d\t" %i + menu_str.split("Aktionsessen %d" %i,1)[1].split("(Gäste)",1)[0][:slice_amount]
if not args['lite']:
wrap(meal_special_string)
else:
print(meal_special_string)
if not args['lite']: print(Style.RESET_ALL + '', end="")
i += 1
else:
meal_special_count += 1
i += 1
print("")
if not args['lite']: print("")
#xmascc
#if not args['lite']: print(Fore.MAGENTA + '', end="")
#if not args['lite']: print("\t", end='')
#print(xmascc.get_countdown())
#if not args['lite']: print(Style.RESET_ALL + '', end="")
#print("")
| eal_string)
if n | conditional_block |
canteenie_v1.py | #!/usr/bin/env python3
"""canteenie.py: A small python script that prints today's canteen/mensa menu for FAU on console."""
import requests
import datetime
import argparse
from lxml import html
from colorama import Fore, Style
import textwrap
import xmascc
# command line arguments
parser = argparse.ArgumentParser(description='A small python script that prints today\'s canteen/mensa menu for FAU on console.')
parser.add_argument('-m','--mensa', help='for which mensa? (lmpl: Erlangen Langemarckplatz (default), sued: Erlangen Süd, isch: Nürnberg Insel Schütt)', required=False, default="lmpl", choices=['lmpl', 'sued', 'isch'])
parser.add_argument('-l','--lite', help='disable ascii art header and color (lite view)', required=False, default=False, action='store_true')
args = vars(parser.parse_args())
# get html content from webpage
page = requests.get('http://www.werkswelt.de/?id=%s' %args['mensa'])
tree = html.fromstring(page.content)
menu = tree.xpath('/html/body/div[3]/div/div[2]/div[2]/text()')
# join to string and tidy up the text
menu_str = ' '.join(menu) # join list to one string
menu_str = menu_str.replace('\xa0', ' ') # remove no break space
menu_str = menu_str.replace('\n', ' ') # remove line feed
menu_str = menu_str.replace('\r', ' ') # remove carriage return
menu_str = " ".join(menu_str.split()) # remove more than one space
# count amount of meals
meal_count = menu_str.count("Essen")
meal_special_count = menu_str.count("Aktionsessen")
# print header
now = datetime.datetime.now()
if not args['lite']: print(Fore.YELLOW + '', end="")
if args['lite'] == False:
print("\t ")
print("\t __ __ ")
print("\t| \/ | ___ _ __ ___ __ _ ")
print("\t| |\/| |/ _ \ '_ \/ __|/ _` |")
print("\t| | | | __/ | | \__ \ (_| |")
print("\t|_| |_|\___|_| |_|___/\__,_|")
print("\t ")
if not args['lite']: print(Style.RESET_ALL + '', end="")
if not args['lite']: print(Fore.GREEN + '', end="")
if not args['lite']: print("\t", end='')
print("////////", now.strftime("%d.%m.%Y"),"/////////")
if not args['lite']: print(Style.RESET_ALL + '', end="")
print("")
def wrap(meal_string):
pre | print normal meals
i = 1
while i < meal_count +1:
if "Essen %d" %i in menu_str: # check for missing menu
slice_amount = -8
if "- €" in menu_str.split("Essen %d" %i,1)[1].split("(Gäste)",1)[0][:-8]: # check for missing price
slice_amount = -5
if not args['lite']: print(Fore.CYAN + '', end="")
if not args['lite']: print("\t", end='')
meal_string = "%d\t" %i + menu_str.split("Essen %d" %i,1)[1].split("(Gäste)",1)[0][:slice_amount]
if not args['lite']:
wrap(meal_string)
else:
print(meal_string)
if not args['lite']: print(Style.RESET_ALL + '', end="")
i += 1
else:
meal_count += 1
i += 1
# print special meals
if meal_special_count != 0:
print("")
i = 1
while i < meal_special_count + 1:
if "Aktionsessen %d" %i in menu_str: # check for missing menu
slice_amount = -8
if "- €" in menu_str.split("Aktionsessen %d" %i,1)[1].split("(Gäste)",1)[0][:-8]: # check for missing price
slice_amount = -5
if not args['lite']: print(Fore.BLUE + '', end="")
if not args['lite']: print("\t", end='')
meal_special_string= "A%d\t" %i + menu_str.split("Aktionsessen %d" %i,1)[1].split("(Gäste)",1)[0][:slice_amount]
if not args['lite']:
wrap(meal_special_string)
else:
print(meal_special_string)
if not args['lite']: print(Style.RESET_ALL + '', end="")
i += 1
else:
meal_special_count += 1
i += 1
print("")
if not args['lite']: print("")
#xmascc
#if not args['lite']: print(Fore.MAGENTA + '', end="")
#if not args['lite']: print("\t", end='')
#print(xmascc.get_countdown())
#if not args['lite']: print(Style.RESET_ALL + '', end="")
#print("")
| fix = "\t\t"
preferredWidth = 105
wrapper = textwrap.TextWrapper(subsequent_indent=prefix, width=preferredWidth)
print(wrapper.fill(meal_string))
return
# | identifier_body |
group-responses.service.ts | import { HttpClient } from '@angular/common/http';
import { Injectable } from '@angular/core';
@Injectable({
providedIn: 'root'
})
export class | {
constructor(
private httpClient:HttpClient
) { }
async list(groupId:string) {
return <Array<any>>await this.httpClient.get(`/group-responses/list/${groupId}`).toPromise()
}
async query(groupId:string, query:any) {
return <Array<any>>await this.httpClient.post(`/group-responses/query/${groupId}`, { query }).toPromise()
}
async getResponse(groupId:string, deviceId:string) {
return <any>await this.httpClient.get(`/group-responses/read/${groupId}/${deviceId}`).toPromise()
}
async updateResponse(groupId, device) {
return <any>await this.httpClient.post(`/group-responses/update/${groupId}`, {device}).toPromise()
}
async deleteResponse(groupId:string, deviceId:string) {
return <any>await this.httpClient.get(`/group-responses/delete/${groupId}/${deviceId}`).toPromise()
}
async createResponse(groupId, response:any = {}) {
return <any>await this.httpClient.post(`/group-responses/create/${groupId}`, {body: {
responseData: response
}}).toPromise()
}
}
| GroupResponsesService | identifier_name |
group-responses.service.ts | import { HttpClient } from '@angular/common/http';
import { Injectable } from '@angular/core';
@Injectable({
providedIn: 'root'
})
export class GroupResponsesService {
constructor(
private httpClient:HttpClient
) { }
async list(groupId:string) {
return <Array<any>>await this.httpClient.get(`/group-responses/list/${groupId}`).toPromise()
}
async query(groupId:string, query:any) {
return <Array<any>>await this.httpClient.post(`/group-responses/query/${groupId}`, { query }).toPromise() | }
async updateResponse(groupId, device) {
return <any>await this.httpClient.post(`/group-responses/update/${groupId}`, {device}).toPromise()
}
async deleteResponse(groupId:string, deviceId:string) {
return <any>await this.httpClient.get(`/group-responses/delete/${groupId}/${deviceId}`).toPromise()
}
async createResponse(groupId, response:any = {}) {
return <any>await this.httpClient.post(`/group-responses/create/${groupId}`, {body: {
responseData: response
}}).toPromise()
}
} | }
async getResponse(groupId:string, deviceId:string) {
return <any>await this.httpClient.get(`/group-responses/read/${groupId}/${deviceId}`).toPromise() | random_line_split |
meta.rs | // Copyright (C) 2015 Steven Allen
//
// This file is part of gazetta.
//
// This program is free software: you can redistribute it and/or modify it under the terms of the
// GNU General Public License as published by the Free Software Foundation version 3 of the
// License.
//
// This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
// without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See
// the GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License along with this program. If
// not, see <http://www.gnu.org/licenses/>.
//
use gazetta_core::model::Meta;
use gazetta_core::yaml::Hash;
use crate::link::Link;
use crate::person::Person;
use crate::util::BubbleResult;
use crate::yaml::*;
pub struct SourceMeta {
pub nav: Vec<Link>,
pub author: Person,
}
impl Meta for SourceMeta {
fn from_yaml(mut meta: Hash) -> Result<SourceMeta, &'static str> |
}
pub struct EntryMeta {
pub author: Option<Person>,
pub about: Option<Person>,
}
impl Meta for EntryMeta {
fn from_yaml(mut meta: Hash) -> Result<EntryMeta, &'static str> {
Ok(EntryMeta {
author: meta
.remove(&AUTHOR)
.map(Person::from_yaml)
.bubble_result()?,
about: meta.remove(&ABOUT).map(Person::from_yaml).bubble_result()?,
})
}
}
| {
Ok(SourceMeta {
nav: meta
.remove(&NAV)
.map(Link::many_from_yaml)
.bubble_result()?
.unwrap_or_else(Vec::new),
author: meta
.remove(&AUTHOR)
.map(Person::from_yaml)
.bubble_result()?
.ok_or("websites must have authors")?,
})
} | identifier_body |
meta.rs | // Copyright (C) 2015 Steven Allen
//
// This file is part of gazetta.
//
// This program is free software: you can redistribute it and/or modify it under the terms of the
// GNU General Public License as published by the Free Software Foundation version 3 of the
// License.
//
// This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
// without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See
// the GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License along with this program. If
// not, see <http://www.gnu.org/licenses/>.
//
use gazetta_core::model::Meta;
use gazetta_core::yaml::Hash;
use crate::link::Link;
use crate::person::Person;
use crate::util::BubbleResult;
use crate::yaml::*;
pub struct SourceMeta {
pub nav: Vec<Link>,
pub author: Person,
}
impl Meta for SourceMeta {
fn from_yaml(mut meta: Hash) -> Result<SourceMeta, &'static str> {
Ok(SourceMeta {
nav: meta
.remove(&NAV)
.map(Link::many_from_yaml)
.bubble_result()?
.unwrap_or_else(Vec::new),
author: meta | .remove(&AUTHOR)
.map(Person::from_yaml)
.bubble_result()?
.ok_or("websites must have authors")?,
})
}
}
pub struct EntryMeta {
pub author: Option<Person>,
pub about: Option<Person>,
}
impl Meta for EntryMeta {
fn from_yaml(mut meta: Hash) -> Result<EntryMeta, &'static str> {
Ok(EntryMeta {
author: meta
.remove(&AUTHOR)
.map(Person::from_yaml)
.bubble_result()?,
about: meta.remove(&ABOUT).map(Person::from_yaml).bubble_result()?,
})
}
} | random_line_split |
|
meta.rs | // Copyright (C) 2015 Steven Allen
//
// This file is part of gazetta.
//
// This program is free software: you can redistribute it and/or modify it under the terms of the
// GNU General Public License as published by the Free Software Foundation version 3 of the
// License.
//
// This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
// without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See
// the GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License along with this program. If
// not, see <http://www.gnu.org/licenses/>.
//
use gazetta_core::model::Meta;
use gazetta_core::yaml::Hash;
use crate::link::Link;
use crate::person::Person;
use crate::util::BubbleResult;
use crate::yaml::*;
pub struct SourceMeta {
pub nav: Vec<Link>,
pub author: Person,
}
impl Meta for SourceMeta {
fn from_yaml(mut meta: Hash) -> Result<SourceMeta, &'static str> {
Ok(SourceMeta {
nav: meta
.remove(&NAV)
.map(Link::many_from_yaml)
.bubble_result()?
.unwrap_or_else(Vec::new),
author: meta
.remove(&AUTHOR)
.map(Person::from_yaml)
.bubble_result()?
.ok_or("websites must have authors")?,
})
}
}
pub struct | {
pub author: Option<Person>,
pub about: Option<Person>,
}
impl Meta for EntryMeta {
fn from_yaml(mut meta: Hash) -> Result<EntryMeta, &'static str> {
Ok(EntryMeta {
author: meta
.remove(&AUTHOR)
.map(Person::from_yaml)
.bubble_result()?,
about: meta.remove(&ABOUT).map(Person::from_yaml).bubble_result()?,
})
}
}
| EntryMeta | identifier_name |
anitoonstv.py | # -*- coding: utf-8 -*-
import re
from channels import renumbertools
from channelselector import get_thumb
from core import httptools
from core import scrapertools
from core import servertools
from core import tmdb
from core.item import Item
from platformcode import config, logger
from channels import autoplay
IDIOMAS = {'latino': 'Latino'}
list_language = IDIOMAS.values()
list_servers = ['openload',
'okru',
'netutv',
'rapidvideo'
]
list_quality = ['default']
host = "http://www.anitoonstv.com"
def ma | tem):
logger.info()
thumb_series = get_thumb("channels_tvshow.png")
autoplay.init(item.channel, list_servers, list_quality)
itemlist = list()
itemlist.append(Item(channel=item.channel, action="lista", title="Anime", url=host,
thumbnail=thumb_series))
itemlist.append(Item(channel=item.channel, action="lista", title="Series Animadas", url=host,
thumbnail=thumb_series))
itemlist.append(Item(channel=item.channel, action="lista", title="Novedades", url=host,
thumbnail=thumb_series))
itemlist.append(Item(channel=item.channel, action="lista", title="Pokemon", url=host,
thumbnail=thumb_series))
itemlist = renumbertools.show_option(item.channel, itemlist)
autoplay.show_option(item.channel, itemlist)
return itemlist
def lista(item):
logger.info()
itemlist = []
data = httptools.downloadpage(item.url).data
data = re.sub(r"\n|\r|\t|\s{2}| ", "", data)
if 'Novedades' in item.title:
patron_cat = '<div class="activos"><h3>(.+?)<\/h2><\/a><\/div>'
patron = '<a href="(.+?)"><h2><span>(.+?)<\/span>'
else:
patron_cat = '<li><a href=.+?>'
patron_cat += str(item.title)
patron_cat += '<\/a><div>(.+?)<\/div><\/li>'
patron = "<a href='(.+?)'>(.+?)<\/a>"
data = scrapertools.find_single_match(data, patron_cat)
matches = scrapertools.find_multiple_matches(data, patron)
for link, name in matches:
if "Novedades" in item.title:
url = link
title = name.capitalize()
else:
url = host + link
title = name
if ":" in title:
cad = title.split(":")
show = cad[0]
else:
if "(" in title:
cad = title.split("(")
if "Super" in title:
show = cad[1]
show = show.replace(")", "")
else:
show = cad[0]
else:
show = title
if "&" in show:
cad = title.split("xy")
show = cad[0]
context1=[renumbertools.context(item), autoplay.context]
itemlist.append(
item.clone(title=title, url=url, plot=show, action="episodios", show=show,
context=context1))
tmdb.set_infoLabels(itemlist)
return itemlist
def episodios(item):
logger.info()
itemlist = []
data = httptools.downloadpage(item.url).data
data = re.sub(r"\n|\r|\t|\s{2}| ", "", data)
patron = '<div class="pagina">(.+?)<\/div><div id="fade".+?>'
data = scrapertools.find_single_match(data, patron)
patron_caps = "<a href='(.+?)'>Capitulo: (.+?) - (.+?)<\/a>"
matches = scrapertools.find_multiple_matches(data, patron_caps)
show = scrapertools.find_single_match(data, '<span>Titulo.+?<\/span>(.+?)<br><span>')
scrapedthumbnail = scrapertools.find_single_match(data, "<img src='(.+?)'.+?>")
scrapedplot = scrapertools.find_single_match(data, '<span>Descripcion.+?<\/span>(.+?)<br>')
i = 0
temp = 0
for link, cap, name in matches:
if int(cap) == 1:
temp = temp + 1
if int(cap) < 10:
cap = "0" + cap
season = temp
episode = int(cap)
season, episode = renumbertools.numbered_for_tratk(
item.channel, item.show, season, episode)
date = name
title = "%sx%s %s (%s)" % (season, str(episode).zfill(2), "Episodio %s" % episode, date)
# title = str(temp)+"x"+cap+" "+name
url = host + "/" + link
if "NO DISPONIBLE" not in name:
itemlist.append(Item(channel=item.channel, action="findvideos", title=title, thumbnail=scrapedthumbnail,
plot=scrapedplot, url=url, show=show))
if config.get_videolibrary_support() and len(itemlist) > 0:
itemlist.append(Item(channel=item.channel, title="Añadir esta serie a la videoteca", url=item.url,
action="add_serie_to_library", extra="episodios", show=show))
return itemlist
def findvideos(item):
logger.info()
itemlist = []
data = httptools.downloadpage(item.url).data
data1 = re.sub(r"\n|\r|\t|\s{2}| ", "", data)
data_vid = scrapertools.find_single_match(data1, '<div class="videos">(.+?)<\/div><div .+?>')
# name = scrapertools.find_single_match(data,'<span>Titulo.+?<\/span>([^<]+)<br>')
scrapedplot = scrapertools.find_single_match(data, '<br><span>Descrip.+?<\/span>([^<]+)<br>')
scrapedthumbnail = scrapertools.find_single_match(data, '<div class="caracteristicas"><img src="([^<]+)">')
itemla = scrapertools.find_multiple_matches(data_vid, '<div class="serv">.+?-(.+?)-(.+?)<\/div><.+? src="(.+?)"')
for server, quality, url in itemla:
if "Calidad Alta" in quality:
quality = quality.replace("Calidad Alta", "HQ")
server = server.lower().strip()
if "ok" == server:
server = 'okru'
if "netu" == server:
continue
itemlist.append(item.clone(url=url, action="play", server=server, contentQuality=quality,
thumbnail=scrapedthumbnail, plot=scrapedplot,
title="Enlace encontrado en %s: [%s]" % (server.capitalize(), quality)))
autoplay.start(itemlist, item)
return itemlist
def play(item):
logger.info()
itemlist = []
# Buscamos video por servidor ...
devuelve = servertools.findvideosbyserver(item.url, item.server)
if not devuelve:
# ...sino lo encontramos buscamos en todos los servidores disponibles
devuelve = servertools.findvideos(item.url, skip=True)
if devuelve:
# logger.debug(devuelve)
itemlist.append(Item(channel=item.channel, title=item.contentTitle, action="play", server=devuelve[0][2],
url=devuelve[0][1], thumbnail=item.thumbnail))
return itemlist
| inlist(i | identifier_name |
anitoonstv.py | # -*- coding: utf-8 -*-
import re
from channels import renumbertools
from channelselector import get_thumb
from core import httptools
from core import scrapertools
from core import servertools
from core import tmdb
from core.item import Item
from platformcode import config, logger
from channels import autoplay
IDIOMAS = {'latino': 'Latino'}
list_language = IDIOMAS.values()
list_servers = ['openload',
'okru',
'netutv',
'rapidvideo'
]
list_quality = ['default']
host = "http://www.anitoonstv.com"
def mainlist(item):
logger.info()
thumb_series = get_thumb("channels_tvshow.png")
autoplay.init(item.channel, list_servers, list_quality)
itemlist = list()
itemlist.append(Item(channel=item.channel, action="lista", title="Anime", url=host,
thumbnail=thumb_series))
itemlist.append(Item(channel=item.channel, action="lista", title="Series Animadas", url=host,
thumbnail=thumb_series))
itemlist.append(Item(channel=item.channel, action="lista", title="Novedades", url=host,
thumbnail=thumb_series))
itemlist.append(Item(channel=item.channel, action="lista", title="Pokemon", url=host,
thumbnail=thumb_series))
itemlist = renumbertools.show_option(item.channel, itemlist)
autoplay.show_option(item.channel, itemlist)
return itemlist
def lista(item):
logger.info()
itemlist = []
data = httptools.downloadpage(item.url).data
data = re.sub(r"\n|\r|\t|\s{2}| ", "", data)
if 'Novedades' in item.title:
patron_cat = '<div class="activos"><h3>(.+?)<\/h2><\/a><\/div>'
patron = '<a href="(.+?)"><h2><span>(.+?)<\/span>'
else:
patron_cat = '<li><a href=.+?>'
patron_cat += str(item.title)
patron_cat += '<\/a><div>(.+?)<\/div><\/li>'
patron = "<a href='(.+?)'>(.+?)<\/a>"
data = scrapertools.find_single_match(data, patron_cat)
matches = scrapertools.find_multiple_matches(data, patron)
for link, name in matches:
if "Novedades" in item.title:
url = link
title = name.capitalize()
else:
url = host + link
title = name
if ":" in title:
cad = title.split(":")
show = cad[0]
else:
if "(" in title:
cad = title.split("(")
if "Super" in title:
show = cad[1]
show = show.replace(")", "")
else:
show = cad[0]
else:
show = title
if "&" in show:
cad = title.split("xy")
show = cad[0]
context1=[renumbertools.context(item), autoplay.context]
itemlist.append(
item.clone(title=title, url=url, plot=show, action="episodios", show=show,
context=context1))
tmdb.set_infoLabels(itemlist)
return itemlist
def episodios(item):
logger.info()
itemlist = []
data = httptools.downloadpage(item.url).data
data = re.sub(r"\n|\r|\t|\s{2}| ", "", data)
patron = '<div class="pagina">(.+?)<\/div><div id="fade".+?>'
data = scrapertools.find_single_match(data, patron)
patron_caps = "<a href='(.+?)'>Capitulo: (.+?) - (.+?)<\/a>"
matches = scrapertools.find_multiple_matches(data, patron_caps)
show = scrapertools.find_single_match(data, '<span>Titulo.+?<\/span>(.+?)<br><span>')
scrapedthumbnail = scrapertools.find_single_match(data, "<img src='(.+?)'.+?>")
scrapedplot = scrapertools.find_single_match(data, '<span>Descripcion.+?<\/span>(.+?)<br>')
i = 0
temp = 0
for link, cap, name in matches:
if int(cap) == 1:
temp = temp + 1
if int(cap) < 10:
cap = "0" + cap
season = temp
episode = int(cap)
season, episode = renumbertools.numbered_for_tratk(
item.channel, item.show, season, episode)
date = name
title = "%sx%s %s (%s)" % (season, str(episode).zfill(2), "Episodio %s" % episode, date)
# title = str(temp)+"x"+cap+" "+name
url = host + "/" + link
if "NO DISPONIBLE" not in name:
itemlist.append(Item(channel=item.channel, action="findvideos", title=title, thumbnail=scrapedthumbnail,
plot=scrapedplot, url=url, show=show))
if config.get_videolibrary_support() and len(itemlist) > 0:
itemlist.append(Item(channel=item.channel, title="Añadir esta serie a la videoteca", url=item.url,
action="add_serie_to_library", extra="episodios", show=show))
return itemlist
def findvideos(item):
logger.info()
itemlist = []
data = httptools.downloadpage(item.url).data
data1 = re.sub(r"\n|\r|\t|\s{2}| ", "", data)
data_vid = scrapertools.find_single_match(data1, '<div class="videos">(.+?)<\/div><div .+?>')
# name = scrapertools.find_single_match(data,'<span>Titulo.+?<\/span>([^<]+)<br>')
scrapedplot = scrapertools.find_single_match(data, '<br><span>Descrip.+?<\/span>([^<]+)<br>')
scrapedthumbnail = scrapertools.find_single_match(data, '<div class="caracteristicas"><img src="([^<]+)">')
itemla = scrapertools.find_multiple_matches(data_vid, '<div class="serv">.+?-(.+?)-(.+?)<\/div><.+? src="(.+?)"')
for server, quality, url in itemla:
if "Calidad Alta" in quality:
qua | itemlist.append(item.clone(url=url, action="play", server=server, contentQuality=quality,
thumbnail=scrapedthumbnail, plot=scrapedplot,
title="Enlace encontrado en %s: [%s]" % (server.capitalize(), quality)))
autoplay.start(itemlist, item)
return itemlist
def play(item):
logger.info()
itemlist = []
# Buscamos video por servidor ...
devuelve = servertools.findvideosbyserver(item.url, item.server)
if not devuelve:
# ...sino lo encontramos buscamos en todos los servidores disponibles
devuelve = servertools.findvideos(item.url, skip=True)
if devuelve:
# logger.debug(devuelve)
itemlist.append(Item(channel=item.channel, title=item.contentTitle, action="play", server=devuelve[0][2],
url=devuelve[0][1], thumbnail=item.thumbnail))
return itemlist
| lity = quality.replace("Calidad Alta", "HQ")
server = server.lower().strip()
if "ok" == server:
server = 'okru'
if "netu" == server:
continue
| conditional_block |
anitoonstv.py | # -*- coding: utf-8 -*-
import re
from channels import renumbertools
from channelselector import get_thumb
from core import httptools
from core import scrapertools
from core import servertools
from core import tmdb
from core.item import Item
from platformcode import config, logger
from channels import autoplay
IDIOMAS = {'latino': 'Latino'}
list_language = IDIOMAS.values()
list_servers = ['openload',
'okru',
'netutv',
'rapidvideo'
]
list_quality = ['default']
host = "http://www.anitoonstv.com"
def mainlist(item):
logger.info()
thumb_series = get_thumb("channels_tvshow.png")
autoplay.init(item.channel, list_servers, list_quality)
itemlist = list()
itemlist.append(Item(channel=item.channel, action="lista", title="Anime", url=host,
thumbnail=thumb_series))
itemlist.append(Item(channel=item.channel, action="lista", title="Series Animadas", url=host,
thumbnail=thumb_series))
itemlist.append(Item(channel=item.channel, action="lista", title="Novedades", url=host,
thumbnail=thumb_series))
itemlist.append(Item(channel=item.channel, action="lista", title="Pokemon", url=host,
thumbnail=thumb_series))
itemlist = renumbertools.show_option(item.channel, itemlist)
autoplay.show_option(item.channel, itemlist)
return itemlist
def lista(item):
logger.info()
itemlist = []
data = httptools.downloadpage(item.url).data
data = re.sub(r"\n|\r|\t|\s{2}| ", "", data)
if 'Novedades' in item.title:
patron_cat = '<div class="activos"><h3>(.+?)<\/h2><\/a><\/div>'
patron = '<a href="(.+?)"><h2><span>(.+?)<\/span>'
else:
patron_cat = '<li><a href=.+?>'
patron_cat += str(item.title)
patron_cat += '<\/a><div>(.+?)<\/div><\/li>'
patron = "<a href='(.+?)'>(.+?)<\/a>"
data = scrapertools.find_single_match(data, patron_cat)
matches = scrapertools.find_multiple_matches(data, patron)
for link, name in matches:
if "Novedades" in item.title:
url = link
title = name.capitalize()
else:
url = host + link
title = name
if ":" in title:
cad = title.split(":")
show = cad[0]
else:
if "(" in title:
cad = title.split("(")
if "Super" in title:
show = cad[1]
show = show.replace(")", "")
else:
show = cad[0]
else:
show = title
if "&" in show:
cad = title.split("xy")
show = cad[0]
context1=[renumbertools.context(item), autoplay.context]
itemlist.append(
item.clone(title=title, url=url, plot=show, action="episodios", show=show,
context=context1))
tmdb.set_infoLabels(itemlist)
return itemlist
def episodios(item):
logger.info()
itemlist = []
data = httptools.downloadpage(item.url).data
data = re.sub(r"\n|\r|\t|\s{2}| ", "", data)
patron = '<div class="pagina">(.+?)<\/div><div id="fade".+?>'
data = scrapertools.find_single_match(data, patron)
patron_caps = "<a href='(.+?)'>Capitulo: (.+?) - (.+?)<\/a>"
matches = scrapertools.find_multiple_matches(data, patron_caps)
show = scrapertools.find_single_match(data, '<span>Titulo.+?<\/span>(.+?)<br><span>')
scrapedthumbnail = scrapertools.find_single_match(data, "<img src='(.+?)'.+?>")
scrapedplot = scrapertools.find_single_match(data, '<span>Descripcion.+?<\/span>(.+?)<br>')
i = 0
temp = 0
for link, cap, name in matches:
if int(cap) == 1:
temp = temp + 1
if int(cap) < 10:
cap = "0" + cap
season = temp
episode = int(cap)
season, episode = renumbertools.numbered_for_tratk(
item.channel, item.show, season, episode)
date = name
title = "%sx%s %s (%s)" % (season, str(episode).zfill(2), "Episodio %s" % episode, date)
# title = str(temp)+"x"+cap+" "+name
url = host + "/" + link
if "NO DISPONIBLE" not in name:
itemlist.append(Item(channel=item.channel, action="findvideos", title=title, thumbnail=scrapedthumbnail,
plot=scrapedplot, url=url, show=show))
if config.get_videolibrary_support() and len(itemlist) > 0:
itemlist.append(Item(channel=item.channel, title="Añadir esta serie a la videoteca", url=item.url,
action="add_serie_to_library", extra="episodios", show=show))
return itemlist
def findvideos(item):
logger.info()
itemlist = []
data = httptools.downloadpage(item.url).data
data1 = re.sub(r"\n|\r|\t|\s{2}| ", "", data)
data_vid = scrapertools.find_single_match(data1, '<div class="videos">(.+?)<\/div><div .+?>')
# name = scrapertools.find_single_match(data,'<span>Titulo.+?<\/span>([^<]+)<br>')
scrapedplot = scrapertools.find_single_match(data, '<br><span>Descrip.+?<\/span>([^<]+)<br>')
scrapedthumbnail = scrapertools.find_single_match(data, '<div class="caracteristicas"><img src="([^<]+)">')
itemla = scrapertools.find_multiple_matches(data_vid, '<div class="serv">.+?-(.+?)-(.+?)<\/div><.+? src="(.+?)"')
for server, quality, url in itemla:
if "Calidad Alta" in quality:
quality = quality.replace("Calidad Alta", "HQ")
server = server.lower().strip()
if "ok" == server:
server = 'okru'
if "netu" == server:
continue
itemlist.append(item.clone(url=url, action="play", server=server, contentQuality=quality,
thumbnail=scrapedthumbnail, plot=scrapedplot,
title="Enlace encontrado en %s: [%s]" % (server.capitalize(), quality)))
autoplay.start(itemlist, item)
return itemlist
def play(item):
logger.info()
itemlist = []
| # ...sino lo encontramos buscamos en todos los servidores disponibles
devuelve = servertools.findvideos(item.url, skip=True)
if devuelve:
# logger.debug(devuelve)
itemlist.append(Item(channel=item.channel, title=item.contentTitle, action="play", server=devuelve[0][2],
url=devuelve[0][1], thumbnail=item.thumbnail))
return itemlist | # Buscamos video por servidor ...
devuelve = servertools.findvideosbyserver(item.url, item.server)
if not devuelve: | random_line_split |
anitoonstv.py | # -*- coding: utf-8 -*-
import re
from channels import renumbertools
from channelselector import get_thumb
from core import httptools
from core import scrapertools
from core import servertools
from core import tmdb
from core.item import Item
from platformcode import config, logger
from channels import autoplay
IDIOMAS = {'latino': 'Latino'}
list_language = IDIOMAS.values()
list_servers = ['openload',
'okru',
'netutv',
'rapidvideo'
]
list_quality = ['default']
host = "http://www.anitoonstv.com"
def mainlist(item):
logger.info()
thumb_series = get_thumb("channels_tvshow.png")
autoplay.init(item.channel, list_servers, list_quality)
itemlist = list()
itemlist.append(Item(channel=item.channel, action="lista", title="Anime", url=host,
thumbnail=thumb_series))
itemlist.append(Item(channel=item.channel, action="lista", title="Series Animadas", url=host,
thumbnail=thumb_series))
itemlist.append(Item(channel=item.channel, action="lista", title="Novedades", url=host,
thumbnail=thumb_series))
itemlist.append(Item(channel=item.channel, action="lista", title="Pokemon", url=host,
thumbnail=thumb_series))
itemlist = renumbertools.show_option(item.channel, itemlist)
autoplay.show_option(item.channel, itemlist)
return itemlist
def lista(item):
logger.info()
itemlist = []
data = httptools.downloadpage(item.url).data
data = re.sub(r"\n|\r|\t|\s{2}| ", "", data)
if 'Novedades' in item.title:
patron_cat = '<div class="activos"><h3>(.+?)<\/h2><\/a><\/div>'
patron = '<a href="(.+?)"><h2><span>(.+?)<\/span>'
else:
patron_cat = '<li><a href=.+?>'
patron_cat += str(item.title)
patron_cat += '<\/a><div>(.+?)<\/div><\/li>'
patron = "<a href='(.+?)'>(.+?)<\/a>"
data = scrapertools.find_single_match(data, patron_cat)
matches = scrapertools.find_multiple_matches(data, patron)
for link, name in matches:
if "Novedades" in item.title:
url = link
title = name.capitalize()
else:
url = host + link
title = name
if ":" in title:
cad = title.split(":")
show = cad[0]
else:
if "(" in title:
cad = title.split("(")
if "Super" in title:
show = cad[1]
show = show.replace(")", "")
else:
show = cad[0]
else:
show = title
if "&" in show:
cad = title.split("xy")
show = cad[0]
context1=[renumbertools.context(item), autoplay.context]
itemlist.append(
item.clone(title=title, url=url, plot=show, action="episodios", show=show,
context=context1))
tmdb.set_infoLabels(itemlist)
return itemlist
def episodios(item):
logger.info()
itemlist = []
data = httptools.downloadpage(item.url).data
data = re.sub(r"\n|\r|\t|\s{2}| ", "", data)
patron = '<div class="pagina">(.+?)<\/div><div id="fade".+?>'
data = scrapertools.find_single_match(data, patron)
patron_caps = "<a href='(.+?)'>Capitulo: (.+?) - (.+?)<\/a>"
matches = scrapertools.find_multiple_matches(data, patron_caps)
show = scrapertools.find_single_match(data, '<span>Titulo.+?<\/span>(.+?)<br><span>')
scrapedthumbnail = scrapertools.find_single_match(data, "<img src='(.+?)'.+?>")
scrapedplot = scrapertools.find_single_match(data, '<span>Descripcion.+?<\/span>(.+?)<br>')
i = 0
temp = 0
for link, cap, name in matches:
if int(cap) == 1:
temp = temp + 1
if int(cap) < 10:
cap = "0" + cap
season = temp
episode = int(cap)
season, episode = renumbertools.numbered_for_tratk(
item.channel, item.show, season, episode)
date = name
title = "%sx%s %s (%s)" % (season, str(episode).zfill(2), "Episodio %s" % episode, date)
# title = str(temp)+"x"+cap+" "+name
url = host + "/" + link
if "NO DISPONIBLE" not in name:
itemlist.append(Item(channel=item.channel, action="findvideos", title=title, thumbnail=scrapedthumbnail,
plot=scrapedplot, url=url, show=show))
if config.get_videolibrary_support() and len(itemlist) > 0:
itemlist.append(Item(channel=item.channel, title="Añadir esta serie a la videoteca", url=item.url,
action="add_serie_to_library", extra="episodios", show=show))
return itemlist
def findvideos(item):
logger.info()
itemlist = []
data = httptools.downloadpage(item.url).data
data1 = re.sub(r"\n|\r|\t|\s{2}| ", "", data)
data_vid = scrapertools.find_single_match(data1, '<div class="videos">(.+?)<\/div><div .+?>')
# name = scrapertools.find_single_match(data,'<span>Titulo.+?<\/span>([^<]+)<br>')
scrapedplot = scrapertools.find_single_match(data, '<br><span>Descrip.+?<\/span>([^<]+)<br>')
scrapedthumbnail = scrapertools.find_single_match(data, '<div class="caracteristicas"><img src="([^<]+)">')
itemla = scrapertools.find_multiple_matches(data_vid, '<div class="serv">.+?-(.+?)-(.+?)<\/div><.+? src="(.+?)"')
for server, quality, url in itemla:
if "Calidad Alta" in quality:
quality = quality.replace("Calidad Alta", "HQ")
server = server.lower().strip()
if "ok" == server:
server = 'okru'
if "netu" == server:
continue
itemlist.append(item.clone(url=url, action="play", server=server, contentQuality=quality,
thumbnail=scrapedthumbnail, plot=scrapedplot,
title="Enlace encontrado en %s: [%s]" % (server.capitalize(), quality)))
autoplay.start(itemlist, item)
return itemlist
def play(item):
log | ger.info()
itemlist = []
# Buscamos video por servidor ...
devuelve = servertools.findvideosbyserver(item.url, item.server)
if not devuelve:
# ...sino lo encontramos buscamos en todos los servidores disponibles
devuelve = servertools.findvideos(item.url, skip=True)
if devuelve:
# logger.debug(devuelve)
itemlist.append(Item(channel=item.channel, title=item.contentTitle, action="play", server=devuelve[0][2],
url=devuelve[0][1], thumbnail=item.thumbnail))
return itemlist
| identifier_body |
|
autotools_test.py | import os
from conan.tools.files.files import save_toolchain_args
from conan.tools.gnu import Autotools
from conans.test.utils.mocks import ConanFileMock
from conans.test.utils.test_files import temp_folder
def | ():
folder = temp_folder()
os.chdir(folder)
save_toolchain_args({
"configure_args": "-foo bar",
"make_args": ""}
)
conanfile = ConanFileMock()
conanfile.folders.set_base_install(folder)
sources = "/path/to/sources"
conanfile.folders.set_base_source(sources)
autotools = Autotools(conanfile)
autotools.configure(build_script_folder="subfolder")
assert conanfile.command.replace("\\", "/") == '"/path/to/sources/subfolder/configure" -foo bar'
autotools.configure()
assert conanfile.command.replace("\\", "/") == '"/path/to/sources/configure" -foo bar'
| test_source_folder_works | identifier_name |
autotools_test.py | import os
from conan.tools.files.files import save_toolchain_args
from conan.tools.gnu import Autotools
from conans.test.utils.mocks import ConanFileMock
from conans.test.utils.test_files import temp_folder
def test_source_folder_works():
| folder = temp_folder()
os.chdir(folder)
save_toolchain_args({
"configure_args": "-foo bar",
"make_args": ""}
)
conanfile = ConanFileMock()
conanfile.folders.set_base_install(folder)
sources = "/path/to/sources"
conanfile.folders.set_base_source(sources)
autotools = Autotools(conanfile)
autotools.configure(build_script_folder="subfolder")
assert conanfile.command.replace("\\", "/") == '"/path/to/sources/subfolder/configure" -foo bar'
autotools.configure()
assert conanfile.command.replace("\\", "/") == '"/path/to/sources/configure" -foo bar' | identifier_body |
|
autotools_test.py | import os
from conan.tools.files.files import save_toolchain_args
from conan.tools.gnu import Autotools
from conans.test.utils.mocks import ConanFileMock
from conans.test.utils.test_files import temp_folder
def test_source_folder_works():
folder = temp_folder()
os.chdir(folder)
save_toolchain_args({ | sources = "/path/to/sources"
conanfile.folders.set_base_source(sources)
autotools = Autotools(conanfile)
autotools.configure(build_script_folder="subfolder")
assert conanfile.command.replace("\\", "/") == '"/path/to/sources/subfolder/configure" -foo bar'
autotools.configure()
assert conanfile.command.replace("\\", "/") == '"/path/to/sources/configure" -foo bar' | "configure_args": "-foo bar",
"make_args": ""}
)
conanfile = ConanFileMock()
conanfile.folders.set_base_install(folder) | random_line_split |
test_mlbam_util.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from unittest import TestCase, main
from pitchpx.mlbam_util import MlbamUtil, MlbAmHttpNotFound
__author__ = 'Shinichi Nakagawa'
class TestMlbamUtil(TestCase):
"""
MLBAM Util Class Test
"""
def setUp(self):
pass
def tearDown(self):
pass
def test_get_content_200(self):
"""
Get html content(status:200, head:default)
"""
req = MlbamUtil._get_content(
'http://gd2.mlb.com/components/game/mlb/year_2016/month_04/day_06/gid_2016_04_06_lanmlb_sdnmlb_1/game.xml'
)
self.assertEqual(req.status_code, 200)
self.assertEqual(req.request.headers['Accept'],
'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8')
self.assertEqual(req.request.headers['User-Agent'],
('Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_4) '
'AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.86 Safari/537.36'))
def test_get_content_200_setting_header(self):
"""
Get html content(status:200, head:original)
"""
req = MlbamUtil._get_content(
'http://gd2.mlb.com/components/game/mlb/year_2016/month_04/day_06/gid_2016_04_06_lanmlb_sdnmlb_1/game.xml',
headers={'Accept': 'text/html', 'User-Agent': 'Python-urllib/3.5'}
)
self.assertEqual(req.status_code, 200)
self.assertEqual(req.request.headers['Accept'], 'text/html')
self.assertEqual(req.request.headers['User-Agent'], 'Python-urllib/3.5')
def | (self):
"""
Get html content(status:404, head:original)
"""
req = MlbamUtil._get_content(
'http://gd2.mlb.com/components/game/mlb/year_2016/month_04/day_06/gid_2016_04_06_chnmlb_anamlb_1/game.xml',
headers={'Accept': 'text/html', 'User-Agent': 'Python-urllib/3.5'}
)
self.assertEqual(req.status_code, 404)
self.assertEqual(req.request.headers['Accept'], 'text/html')
self.assertEqual(req.request.headers['User-Agent'], 'Python-urllib/3.5')
def test_find_xml_200(self):
"""
Get xml content(status:200, head:default)
"""
req = MlbamUtil.find_xml(
'http://gd2.mlb.com/components/game/mlb/year_2016/month_04/day_06/gid_2016_04_06_lanmlb_sdnmlb_1/game.xml',
'lxml',
)
self.assertIsNotNone(req)
def test_find_xml_404(self):
"""
Get xml content(status:404, head:default)
"""
try:
_ = MlbamUtil.find_xml(
'http://gd2.mlb.com/components/game/mlb/year_2016/month_04/day_06/gid_2016_04_06_chnmlb_anamlb_1/game.xml',
'lxml',
)
except MlbAmHttpNotFound as e:
self.assertEqual(
e.msg,
('HTTP Error '
'url: http://gd2.mlb.com/components/game/mlb/year_2016/month_04/day_06/gid_2016_04_06_chnmlb_anamlb_1/game.xml '
'status: 404'
)
)
if __name__ == '__main__':
main() | test_get_content_404_setting_header | identifier_name |
test_mlbam_util.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from unittest import TestCase, main
from pitchpx.mlbam_util import MlbamUtil, MlbAmHttpNotFound
__author__ = 'Shinichi Nakagawa'
class TestMlbamUtil(TestCase):
"""
MLBAM Util Class Test
"""
def setUp(self):
pass
def tearDown(self):
pass
def test_get_content_200(self):
"""
Get html content(status:200, head:default)
"""
req = MlbamUtil._get_content(
'http://gd2.mlb.com/components/game/mlb/year_2016/month_04/day_06/gid_2016_04_06_lanmlb_sdnmlb_1/game.xml'
)
self.assertEqual(req.status_code, 200)
self.assertEqual(req.request.headers['Accept'],
'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8')
self.assertEqual(req.request.headers['User-Agent'],
('Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_4) '
'AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.86 Safari/537.36'))
def test_get_content_200_setting_header(self):
"""
Get html content(status:200, head:original)
"""
req = MlbamUtil._get_content(
'http://gd2.mlb.com/components/game/mlb/year_2016/month_04/day_06/gid_2016_04_06_lanmlb_sdnmlb_1/game.xml',
headers={'Accept': 'text/html', 'User-Agent': 'Python-urllib/3.5'}
) | """
Get html content(status:404, head:original)
"""
req = MlbamUtil._get_content(
'http://gd2.mlb.com/components/game/mlb/year_2016/month_04/day_06/gid_2016_04_06_chnmlb_anamlb_1/game.xml',
headers={'Accept': 'text/html', 'User-Agent': 'Python-urllib/3.5'}
)
self.assertEqual(req.status_code, 404)
self.assertEqual(req.request.headers['Accept'], 'text/html')
self.assertEqual(req.request.headers['User-Agent'], 'Python-urllib/3.5')
def test_find_xml_200(self):
"""
Get xml content(status:200, head:default)
"""
req = MlbamUtil.find_xml(
'http://gd2.mlb.com/components/game/mlb/year_2016/month_04/day_06/gid_2016_04_06_lanmlb_sdnmlb_1/game.xml',
'lxml',
)
self.assertIsNotNone(req)
def test_find_xml_404(self):
"""
Get xml content(status:404, head:default)
"""
try:
_ = MlbamUtil.find_xml(
'http://gd2.mlb.com/components/game/mlb/year_2016/month_04/day_06/gid_2016_04_06_chnmlb_anamlb_1/game.xml',
'lxml',
)
except MlbAmHttpNotFound as e:
self.assertEqual(
e.msg,
('HTTP Error '
'url: http://gd2.mlb.com/components/game/mlb/year_2016/month_04/day_06/gid_2016_04_06_chnmlb_anamlb_1/game.xml '
'status: 404'
)
)
if __name__ == '__main__':
main() | self.assertEqual(req.status_code, 200)
self.assertEqual(req.request.headers['Accept'], 'text/html')
self.assertEqual(req.request.headers['User-Agent'], 'Python-urllib/3.5')
def test_get_content_404_setting_header(self): | random_line_split |
test_mlbam_util.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from unittest import TestCase, main
from pitchpx.mlbam_util import MlbamUtil, MlbAmHttpNotFound
__author__ = 'Shinichi Nakagawa'
class TestMlbamUtil(TestCase):
| self.assertEqual(req.request.headers['User-Agent'],
('Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_4) '
'AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.86 Safari/537.36'))
def test_get_content_200_setting_header(self):
"""
Get html content(status:200, head:original)
"""
req = MlbamUtil._get_content(
'http://gd2.mlb.com/components/game/mlb/year_2016/month_04/day_06/gid_2016_04_06_lanmlb_sdnmlb_1/game.xml',
headers={'Accept': 'text/html', 'User-Agent': 'Python-urllib/3.5'}
)
self.assertEqual(req.status_code, 200)
self.assertEqual(req.request.headers['Accept'], 'text/html')
self.assertEqual(req.request.headers['User-Agent'], 'Python-urllib/3.5')
def test_get_content_404_setting_header(self):
"""
Get html content(status:404, head:original)
"""
req = MlbamUtil._get_content(
'http://gd2.mlb.com/components/game/mlb/year_2016/month_04/day_06/gid_2016_04_06_chnmlb_anamlb_1/game.xml',
headers={'Accept': 'text/html', 'User-Agent': 'Python-urllib/3.5'}
)
self.assertEqual(req.status_code, 404)
self.assertEqual(req.request.headers['Accept'], 'text/html')
self.assertEqual(req.request.headers['User-Agent'], 'Python-urllib/3.5')
def test_find_xml_200(self):
"""
Get xml content(status:200, head:default)
"""
req = MlbamUtil.find_xml(
'http://gd2.mlb.com/components/game/mlb/year_2016/month_04/day_06/gid_2016_04_06_lanmlb_sdnmlb_1/game.xml',
'lxml',
)
self.assertIsNotNone(req)
def test_find_xml_404(self):
"""
Get xml content(status:404, head:default)
"""
try:
_ = MlbamUtil.find_xml(
'http://gd2.mlb.com/components/game/mlb/year_2016/month_04/day_06/gid_2016_04_06_chnmlb_anamlb_1/game.xml',
'lxml',
)
except MlbAmHttpNotFound as e:
self.assertEqual(
e.msg,
('HTTP Error '
'url: http://gd2.mlb.com/components/game/mlb/year_2016/month_04/day_06/gid_2016_04_06_chnmlb_anamlb_1/game.xml '
'status: 404'
)
)
if __name__ == '__main__':
main() | """
MLBAM Util Class Test
"""
def setUp(self):
pass
def tearDown(self):
pass
def test_get_content_200(self):
"""
Get html content(status:200, head:default)
"""
req = MlbamUtil._get_content(
'http://gd2.mlb.com/components/game/mlb/year_2016/month_04/day_06/gid_2016_04_06_lanmlb_sdnmlb_1/game.xml'
)
self.assertEqual(req.status_code, 200)
self.assertEqual(req.request.headers['Accept'],
'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8') | identifier_body |
test_mlbam_util.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from unittest import TestCase, main
from pitchpx.mlbam_util import MlbamUtil, MlbAmHttpNotFound
__author__ = 'Shinichi Nakagawa'
class TestMlbamUtil(TestCase):
"""
MLBAM Util Class Test
"""
def setUp(self):
pass
def tearDown(self):
pass
def test_get_content_200(self):
"""
Get html content(status:200, head:default)
"""
req = MlbamUtil._get_content(
'http://gd2.mlb.com/components/game/mlb/year_2016/month_04/day_06/gid_2016_04_06_lanmlb_sdnmlb_1/game.xml'
)
self.assertEqual(req.status_code, 200)
self.assertEqual(req.request.headers['Accept'],
'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8')
self.assertEqual(req.request.headers['User-Agent'],
('Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_4) '
'AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.86 Safari/537.36'))
def test_get_content_200_setting_header(self):
"""
Get html content(status:200, head:original)
"""
req = MlbamUtil._get_content(
'http://gd2.mlb.com/components/game/mlb/year_2016/month_04/day_06/gid_2016_04_06_lanmlb_sdnmlb_1/game.xml',
headers={'Accept': 'text/html', 'User-Agent': 'Python-urllib/3.5'}
)
self.assertEqual(req.status_code, 200)
self.assertEqual(req.request.headers['Accept'], 'text/html')
self.assertEqual(req.request.headers['User-Agent'], 'Python-urllib/3.5')
def test_get_content_404_setting_header(self):
"""
Get html content(status:404, head:original)
"""
req = MlbamUtil._get_content(
'http://gd2.mlb.com/components/game/mlb/year_2016/month_04/day_06/gid_2016_04_06_chnmlb_anamlb_1/game.xml',
headers={'Accept': 'text/html', 'User-Agent': 'Python-urllib/3.5'}
)
self.assertEqual(req.status_code, 404)
self.assertEqual(req.request.headers['Accept'], 'text/html')
self.assertEqual(req.request.headers['User-Agent'], 'Python-urllib/3.5')
def test_find_xml_200(self):
"""
Get xml content(status:200, head:default)
"""
req = MlbamUtil.find_xml(
'http://gd2.mlb.com/components/game/mlb/year_2016/month_04/day_06/gid_2016_04_06_lanmlb_sdnmlb_1/game.xml',
'lxml',
)
self.assertIsNotNone(req)
def test_find_xml_404(self):
"""
Get xml content(status:404, head:default)
"""
try:
_ = MlbamUtil.find_xml(
'http://gd2.mlb.com/components/game/mlb/year_2016/month_04/day_06/gid_2016_04_06_chnmlb_anamlb_1/game.xml',
'lxml',
)
except MlbAmHttpNotFound as e:
self.assertEqual(
e.msg,
('HTTP Error '
'url: http://gd2.mlb.com/components/game/mlb/year_2016/month_04/day_06/gid_2016_04_06_chnmlb_anamlb_1/game.xml '
'status: 404'
)
)
if __name__ == '__main__':
| main() | conditional_block |
|
test-pvservice.js | var PvService = require('../service/PvService');
var path = require('path');
var StatisticsServicePV = require('../service/StatisticsService_PV');
GLOBAL.pjconfig = {
"mysql": {
"url": "mysql://badjs:[email protected]:4250/badjs"
}, | "queryCountUrl": "http://10.185.14.28:9000/queryCount",
"querySvgUrl": "http://10.185.14.28:9000/errorCountSvg"
},
"acceptor": {
"pushProjectUrl": "http://10.185.14.28:9001/getProjects"
},
"zmq": {
"url": "tcp://10.185.14.28:10000",
"subscribe": "badjs"
},
"email": {
"homepage": "http://badjs.sng.local/user/index.html",
"from": "[email protected]",
"emailSuffix": "@tencent.com",
"time": "09:00:00",
"top": 20,
"module": "email_tof"
},
"postgreSql": {
"connString": "postgres://tdw_v_zscai:[email protected]:5432/sng_vas_speedtest_database"
},
"fileStorage": {
"pageid": "./fileStorage/pageid.json",
"threshold": "./fileStorage/threshold.json"
}
};
var pvService = PvService.create({
filePath: '../fileStorage/',
filePrefix: 'pv_',
fileSuffix: ''
});
var SSPV = new StatisticsServicePV();
SSPV.getEP('20151116', function (err, data) {
console.log(err,'done');
});
//PvService.getPVByDate('201510302220', function (err, data) {
// pvService.save(data, function (err, data) {
// console.log(err, data);
// });
//});
//跑一个页面的多天数据(一次最多20小时)
//PvService.getPVByPageid('169_2122_1', '201510312200' , '201511021000', function (err, data) {
// console.log('数据回来', err, data);
// pvService.save(data, function (err, data) {
// console.log(err, data);
// });
//});
//pvService.updatePVNow();
//console.log(path.resolve(__dirname,'../fileStorage/')); | "storage": {
"errorMsgTopUrl": "http://10.185.14.28:9000/errorMsgTop",
"errorMsgTopCacheUrl": "http://10.185.14.28:9000/errorMsgTopCache",
"queryUrl": "http://10.185.14.28:9000/query", | random_line_split |
CorsCheck.js | import React, {useState, useEffect} from 'react'
import {Text, Container, Flex, Spinner, Stack} from '@sanity/ui'
import {versionedClient} from './versionedClient'
const checkCors = () =>
Promise.all([
versionedClient.request({uri: '/ping', withCredentials: false}).then(() => true),
versionedClient
.request({uri: '/users/me', withCredentials: false})
.then(() => true)
.catch(() => false),
])
.then((res) => ({
isCorsError: res[0] && !res[1],
pingResponded: res[0],
}))
.catch((error) => ({error}))
function | ({result, children}) {
const response = result && result.error && result.error.response
const message = response && response.body && response.body.message
if (!message) {
return <>{children}</>
}
return (
<div>
<Text>Error message:</Text>
<pre>
<code>{response.body.message}</code>
</pre>
{children}
</div>
)
}
export default function CorsCheck() {
const [state, setState] = useState({isLoading: true})
useEffect(() => {
checkCors().then((res) =>
setState({
result: res,
isLoading: false,
})
)
}, [])
const {isLoading, result} = state
const origin =
window.location.origin ||
window.location.href.replace(new RegExp(`${window.location.pathname}$`), '')
if (isLoading) {
return (
<Container width={4} padding={4} height="fill">
<Flex align="center" justify="center" height="fill">
<Text>
<Spinner />
</Text>
</Flex>
</Container>
)
}
const tld = versionedClient.config().apiHost.replace(/.*?sanity\.([a-z]+).*/, '$1')
const projectId = versionedClient.config().projectId
const corsUrl = `https://manage.sanity.${tld}/projects/${projectId}/settings/api`
const response = result.error && result.error.response
if (response) {
const is404 = response.statusCode === 404
const errType = response.body.attributes && response.body.attributes.type
if (is404 && errType === 'project') {
return (
<Stack space={4}>
<Text accent>{response.body.message || response.statusCode}</Text>
<Text accent>
Double-check that your <code>sanity.json</code> points to the right project ID!
</Text>
</Stack>
)
}
}
if (result.isCorsError) {
return (
<CorsWrapper result={state.result}>
<Text accent>
It looks like the error is being caused by the current origin (<code>{origin}</code>) not
being allowed for this project. If you are a project administrator or developer, you can
head to{' '}
<a rel="noopener noreferrer" target="_blank" href={corsUrl}>
the project management
</a>{' '}
interface. Add the origin under the{' '}
<a
href="https://www.sanity.io/docs/front-ends/cors"
target="_blank"
rel="noopener noreferrer"
>
<em>CORS Origins</em>
</a>{' '}
section. Do remember to <code>allow credentials</code>!
</Text>
</CorsWrapper>
)
}
if (result.pingResponded) {
return (
<CorsWrapper result={state.result}>
<Text accent>
Our diagnostics cannot quite determine why this happened. If it was a network glitch you
could try hitting the <strong>Retry</strong> button below. If you are working as a
developer on this project, you could also have a look at the browser's dev console and see
if any issues are listed there.
</Text>
</CorsWrapper>
)
}
return (
<CorsWrapper result={state.result}>
<Text accent>
It might be that your internet connection is unstable or down. You could try hitting the{' '}
<strong>Retry</strong> button to see if it was just a temporary glitch.
</Text>
</CorsWrapper>
)
}
| CorsWrapper | identifier_name |
CorsCheck.js | import React, {useState, useEffect} from 'react'
import {Text, Container, Flex, Spinner, Stack} from '@sanity/ui'
import {versionedClient} from './versionedClient'
const checkCors = () =>
Promise.all([
versionedClient.request({uri: '/ping', withCredentials: false}).then(() => true),
versionedClient
.request({uri: '/users/me', withCredentials: false})
.then(() => true)
.catch(() => false),
])
.then((res) => ({
isCorsError: res[0] && !res[1],
pingResponded: res[0],
}))
.catch((error) => ({error}))
function CorsWrapper({result, children}) {
const response = result && result.error && result.error.response
const message = response && response.body && response.body.message
if (!message) |
return (
<div>
<Text>Error message:</Text>
<pre>
<code>{response.body.message}</code>
</pre>
{children}
</div>
)
}
export default function CorsCheck() {
const [state, setState] = useState({isLoading: true})
useEffect(() => {
checkCors().then((res) =>
setState({
result: res,
isLoading: false,
})
)
}, [])
const {isLoading, result} = state
const origin =
window.location.origin ||
window.location.href.replace(new RegExp(`${window.location.pathname}$`), '')
if (isLoading) {
return (
<Container width={4} padding={4} height="fill">
<Flex align="center" justify="center" height="fill">
<Text>
<Spinner />
</Text>
</Flex>
</Container>
)
}
const tld = versionedClient.config().apiHost.replace(/.*?sanity\.([a-z]+).*/, '$1')
const projectId = versionedClient.config().projectId
const corsUrl = `https://manage.sanity.${tld}/projects/${projectId}/settings/api`
const response = result.error && result.error.response
if (response) {
const is404 = response.statusCode === 404
const errType = response.body.attributes && response.body.attributes.type
if (is404 && errType === 'project') {
return (
<Stack space={4}>
<Text accent>{response.body.message || response.statusCode}</Text>
<Text accent>
Double-check that your <code>sanity.json</code> points to the right project ID!
</Text>
</Stack>
)
}
}
if (result.isCorsError) {
return (
<CorsWrapper result={state.result}>
<Text accent>
It looks like the error is being caused by the current origin (<code>{origin}</code>) not
being allowed for this project. If you are a project administrator or developer, you can
head to{' '}
<a rel="noopener noreferrer" target="_blank" href={corsUrl}>
the project management
</a>{' '}
interface. Add the origin under the{' '}
<a
href="https://www.sanity.io/docs/front-ends/cors"
target="_blank"
rel="noopener noreferrer"
>
<em>CORS Origins</em>
</a>{' '}
section. Do remember to <code>allow credentials</code>!
</Text>
</CorsWrapper>
)
}
if (result.pingResponded) {
return (
<CorsWrapper result={state.result}>
<Text accent>
Our diagnostics cannot quite determine why this happened. If it was a network glitch you
could try hitting the <strong>Retry</strong> button below. If you are working as a
developer on this project, you could also have a look at the browser's dev console and see
if any issues are listed there.
</Text>
</CorsWrapper>
)
}
return (
<CorsWrapper result={state.result}>
<Text accent>
It might be that your internet connection is unstable or down. You could try hitting the{' '}
<strong>Retry</strong> button to see if it was just a temporary glitch.
</Text>
</CorsWrapper>
)
}
| {
return <>{children}</>
} | conditional_block |
CorsCheck.js | import React, {useState, useEffect} from 'react'
import {Text, Container, Flex, Spinner, Stack} from '@sanity/ui'
import {versionedClient} from './versionedClient'
const checkCors = () =>
Promise.all([
versionedClient.request({uri: '/ping', withCredentials: false}).then(() => true),
versionedClient
.request({uri: '/users/me', withCredentials: false})
.then(() => true)
.catch(() => false),
])
.then((res) => ({
isCorsError: res[0] && !res[1],
pingResponded: res[0],
}))
.catch((error) => ({error}))
function CorsWrapper({result, children}) {
const response = result && result.error && result.error.response
const message = response && response.body && response.body.message
if (!message) {
return <>{children}</>
}
return (
<div>
<Text>Error message:</Text>
<pre>
<code>{response.body.message}</code>
</pre>
{children}
</div>
)
}
export default function CorsCheck() {
const [state, setState] = useState({isLoading: true})
useEffect(() => {
checkCors().then((res) =>
setState({
result: res,
isLoading: false,
})
)
}, [])
const {isLoading, result} = state
const origin =
window.location.origin ||
window.location.href.replace(new RegExp(`${window.location.pathname}$`), '')
if (isLoading) {
return (
<Container width={4} padding={4} height="fill">
<Flex align="center" justify="center" height="fill">
<Text>
<Spinner />
</Text>
</Flex> | )
}
const tld = versionedClient.config().apiHost.replace(/.*?sanity\.([a-z]+).*/, '$1')
const projectId = versionedClient.config().projectId
const corsUrl = `https://manage.sanity.${tld}/projects/${projectId}/settings/api`
const response = result.error && result.error.response
if (response) {
const is404 = response.statusCode === 404
const errType = response.body.attributes && response.body.attributes.type
if (is404 && errType === 'project') {
return (
<Stack space={4}>
<Text accent>{response.body.message || response.statusCode}</Text>
<Text accent>
Double-check that your <code>sanity.json</code> points to the right project ID!
</Text>
</Stack>
)
}
}
if (result.isCorsError) {
return (
<CorsWrapper result={state.result}>
<Text accent>
It looks like the error is being caused by the current origin (<code>{origin}</code>) not
being allowed for this project. If you are a project administrator or developer, you can
head to{' '}
<a rel="noopener noreferrer" target="_blank" href={corsUrl}>
the project management
</a>{' '}
interface. Add the origin under the{' '}
<a
href="https://www.sanity.io/docs/front-ends/cors"
target="_blank"
rel="noopener noreferrer"
>
<em>CORS Origins</em>
</a>{' '}
section. Do remember to <code>allow credentials</code>!
</Text>
</CorsWrapper>
)
}
if (result.pingResponded) {
return (
<CorsWrapper result={state.result}>
<Text accent>
Our diagnostics cannot quite determine why this happened. If it was a network glitch you
could try hitting the <strong>Retry</strong> button below. If you are working as a
developer on this project, you could also have a look at the browser's dev console and see
if any issues are listed there.
</Text>
</CorsWrapper>
)
}
return (
<CorsWrapper result={state.result}>
<Text accent>
It might be that your internet connection is unstable or down. You could try hitting the{' '}
<strong>Retry</strong> button to see if it was just a temporary glitch.
</Text>
</CorsWrapper>
)
} | </Container> | random_line_split |
CorsCheck.js | import React, {useState, useEffect} from 'react'
import {Text, Container, Flex, Spinner, Stack} from '@sanity/ui'
import {versionedClient} from './versionedClient'
const checkCors = () =>
Promise.all([
versionedClient.request({uri: '/ping', withCredentials: false}).then(() => true),
versionedClient
.request({uri: '/users/me', withCredentials: false})
.then(() => true)
.catch(() => false),
])
.then((res) => ({
isCorsError: res[0] && !res[1],
pingResponded: res[0],
}))
.catch((error) => ({error}))
function CorsWrapper({result, children}) |
export default function CorsCheck() {
const [state, setState] = useState({isLoading: true})
useEffect(() => {
checkCors().then((res) =>
setState({
result: res,
isLoading: false,
})
)
}, [])
const {isLoading, result} = state
const origin =
window.location.origin ||
window.location.href.replace(new RegExp(`${window.location.pathname}$`), '')
if (isLoading) {
return (
<Container width={4} padding={4} height="fill">
<Flex align="center" justify="center" height="fill">
<Text>
<Spinner />
</Text>
</Flex>
</Container>
)
}
const tld = versionedClient.config().apiHost.replace(/.*?sanity\.([a-z]+).*/, '$1')
const projectId = versionedClient.config().projectId
const corsUrl = `https://manage.sanity.${tld}/projects/${projectId}/settings/api`
const response = result.error && result.error.response
if (response) {
const is404 = response.statusCode === 404
const errType = response.body.attributes && response.body.attributes.type
if (is404 && errType === 'project') {
return (
<Stack space={4}>
<Text accent>{response.body.message || response.statusCode}</Text>
<Text accent>
Double-check that your <code>sanity.json</code> points to the right project ID!
</Text>
</Stack>
)
}
}
if (result.isCorsError) {
return (
<CorsWrapper result={state.result}>
<Text accent>
It looks like the error is being caused by the current origin (<code>{origin}</code>) not
being allowed for this project. If you are a project administrator or developer, you can
head to{' '}
<a rel="noopener noreferrer" target="_blank" href={corsUrl}>
the project management
</a>{' '}
interface. Add the origin under the{' '}
<a
href="https://www.sanity.io/docs/front-ends/cors"
target="_blank"
rel="noopener noreferrer"
>
<em>CORS Origins</em>
</a>{' '}
section. Do remember to <code>allow credentials</code>!
</Text>
</CorsWrapper>
)
}
if (result.pingResponded) {
return (
<CorsWrapper result={state.result}>
<Text accent>
Our diagnostics cannot quite determine why this happened. If it was a network glitch you
could try hitting the <strong>Retry</strong> button below. If you are working as a
developer on this project, you could also have a look at the browser's dev console and see
if any issues are listed there.
</Text>
</CorsWrapper>
)
}
return (
<CorsWrapper result={state.result}>
<Text accent>
It might be that your internet connection is unstable or down. You could try hitting the{' '}
<strong>Retry</strong> button to see if it was just a temporary glitch.
</Text>
</CorsWrapper>
)
}
| {
const response = result && result.error && result.error.response
const message = response && response.body && response.body.message
if (!message) {
return <>{children}</>
}
return (
<div>
<Text>Error message:</Text>
<pre>
<code>{response.body.message}</code>
</pre>
{children}
</div>
)
} | identifier_body |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.