code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def transit_decrypt_data(self, name, ciphertext, context=None, nonce=None, batch_input=None, mount_point='transit'): """POST /<mount_point>/decrypt/<name> :param name: :type name: :param ciphertext: :type ciphertext: :param context: :type context: :param nonce: :type nonce: :param batch_input: :type batch_input: :param mount_point: :type mount_point: :return: :rtype: """ url = '/v1/{0}/decrypt/{1}'.format(mount_point, name) params = { 'ciphertext': ciphertext } if context is not None: params['context'] = context if nonce is not None: params['nonce'] = nonce if batch_input is not None: params['batch_input'] = batch_input return self._adapter.post(url, json=params).json()
def function[transit_decrypt_data, parameter[self, name, ciphertext, context, nonce, batch_input, mount_point]]: constant[POST /<mount_point>/decrypt/<name> :param name: :type name: :param ciphertext: :type ciphertext: :param context: :type context: :param nonce: :type nonce: :param batch_input: :type batch_input: :param mount_point: :type mount_point: :return: :rtype: ] variable[url] assign[=] call[constant[/v1/{0}/decrypt/{1}].format, parameter[name[mount_point], name[name]]] variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da20c992a10>], [<ast.Name object at 0x7da20c993400>]] if compare[name[context] is_not constant[None]] begin[:] call[name[params]][constant[context]] assign[=] name[context] if compare[name[nonce] is_not constant[None]] begin[:] call[name[params]][constant[nonce]] assign[=] name[nonce] if compare[name[batch_input] is_not constant[None]] begin[:] call[name[params]][constant[batch_input]] assign[=] name[batch_input] return[call[call[name[self]._adapter.post, parameter[name[url]]].json, parameter[]]]
keyword[def] identifier[transit_decrypt_data] ( identifier[self] , identifier[name] , identifier[ciphertext] , identifier[context] = keyword[None] , identifier[nonce] = keyword[None] , identifier[batch_input] = keyword[None] , identifier[mount_point] = literal[string] ): literal[string] identifier[url] = literal[string] . identifier[format] ( identifier[mount_point] , identifier[name] ) identifier[params] ={ literal[string] : identifier[ciphertext] } keyword[if] identifier[context] keyword[is] keyword[not] keyword[None] : identifier[params] [ literal[string] ]= identifier[context] keyword[if] identifier[nonce] keyword[is] keyword[not] keyword[None] : identifier[params] [ literal[string] ]= identifier[nonce] keyword[if] identifier[batch_input] keyword[is] keyword[not] keyword[None] : identifier[params] [ literal[string] ]= identifier[batch_input] keyword[return] identifier[self] . identifier[_adapter] . identifier[post] ( identifier[url] , identifier[json] = identifier[params] ). identifier[json] ()
def transit_decrypt_data(self, name, ciphertext, context=None, nonce=None, batch_input=None, mount_point='transit'): """POST /<mount_point>/decrypt/<name> :param name: :type name: :param ciphertext: :type ciphertext: :param context: :type context: :param nonce: :type nonce: :param batch_input: :type batch_input: :param mount_point: :type mount_point: :return: :rtype: """ url = '/v1/{0}/decrypt/{1}'.format(mount_point, name) params = {'ciphertext': ciphertext} if context is not None: params['context'] = context # depends on [control=['if'], data=['context']] if nonce is not None: params['nonce'] = nonce # depends on [control=['if'], data=['nonce']] if batch_input is not None: params['batch_input'] = batch_input # depends on [control=['if'], data=['batch_input']] return self._adapter.post(url, json=params).json()
def fit( self, df, id_col, event_col, start_col="start", stop_col="stop", weights_col=None, show_progress=False, step_size=None, robust=False, strata=None, initial_point=None, ): # pylint: disable=too-many-arguments """ Fit the Cox Proportional Hazard model to a time varying dataset. Tied survival times are handled using Efron's tie-method. Parameters ----------- df: DataFrame a Pandas DataFrame with necessary columns `duration_col` and `event_col`, plus other covariates. `duration_col` refers to the lifetimes of the subjects. `event_col` refers to whether the 'death' events was observed: 1 if observed, 0 else (censored). id_col: string A subject could have multiple rows in the DataFrame. This column contains the unique identifier per subject. event_col: string the column in DataFrame that contains the subjects' death observation. If left as None, assume all individuals are non-censored. start_col: string the column that contains the start of a subject's time period. stop_col: string the column that contains the end of a subject's time period. weights_col: string, optional the column that contains (possibly time-varying) weight of each subject-period row. show_progress: since the fitter is iterative, show convergence diagnostics. robust: boolean, optional (default: True) Compute the robust errors using the Huber sandwich estimator, aka Wei-Lin estimate. This does not handle ties, so if there are high number of ties, results may significantly differ. See "The Robust Inference for the Cox Proportional Hazards Model", Journal of the American Statistical Association, Vol. 84, No. 408 (Dec., 1989), pp. 1074- 1078 step_size: float, optional set an initial step size for the fitting algorithm. strata: list or string, optional specify a column or list of columns n to use in stratification. This is useful if a categorical covariate does not obey the proportional hazard assumption. This is used similar to the `strata` expression in R. See http://courses.washington.edu/b515/l17.pdf. initial_point: (d,) numpy array, optional initialize the starting point of the iterative algorithm. Default is the zero vector. Returns -------- self: CoxTimeVaryingFitter self, with additional properties like ``hazards_`` and ``print_summary`` """ self.strata = coalesce(strata, self.strata) self.robust = robust if self.robust: raise NotImplementedError("Not available yet.") self.event_col = event_col self.id_col = id_col self.stop_col = stop_col self.start_col = start_col self._time_fit_was_called = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S") df = df.copy() if not (id_col in df and event_col in df and start_col in df and stop_col in df): raise KeyError("A column specified in the call to `fit` does not exist in the DataFrame provided.") if weights_col is None: self.weights_col = None assert ( "__weights" not in df.columns ), "__weights is an internal lifelines column, please rename your column first." df["__weights"] = 1.0 else: self.weights_col = weights_col if (df[weights_col] <= 0).any(): raise ValueError("values in weights_col must be positive.") df = df.rename( columns={id_col: "id", event_col: "event", start_col: "start", stop_col: "stop", weights_col: "__weights"} ) if self.strata is None: df = df.set_index("id") else: df = df.set_index(_to_list(self.strata) + ["id"]) # TODO: needs to be a list df = df.sort_index() events, start, stop = ( pass_for_numeric_dtypes_or_raise_array(df.pop("event")).astype(bool), df.pop("start"), df.pop("stop"), ) weights = df.pop("__weights").astype(float) df = df.astype(float) self._check_values(df, events, start, stop) self._norm_mean = df.mean(0) self._norm_std = df.std(0) hazards_ = self._newton_rhaphson( normalize(df, self._norm_mean, self._norm_std), events, start, stop, weights, initial_point=initial_point, show_progress=show_progress, step_size=step_size, ) self.hazards_ = pd.Series(hazards_, index=df.columns, name="coef") / self._norm_std self.variance_matrix_ = -inv(self._hessian_) / np.outer(self._norm_std, self._norm_std) self.standard_errors_ = self._compute_standard_errors( normalize(df, self._norm_mean, self._norm_std), events, start, stop, weights ) self.confidence_intervals_ = self._compute_confidence_intervals() self.baseline_cumulative_hazard_ = self._compute_cumulative_baseline_hazard(df, events, start, stop, weights) self.baseline_survival_ = self._compute_baseline_survival() self.event_observed = events self.start_stop_and_events = pd.DataFrame({"event": events, "start": start, "stop": stop}) self.weights = weights self._n_examples = df.shape[0] self._n_unique = df.index.unique().shape[0] return self
def function[fit, parameter[self, df, id_col, event_col, start_col, stop_col, weights_col, show_progress, step_size, robust, strata, initial_point]]: constant[ Fit the Cox Proportional Hazard model to a time varying dataset. Tied survival times are handled using Efron's tie-method. Parameters ----------- df: DataFrame a Pandas DataFrame with necessary columns `duration_col` and `event_col`, plus other covariates. `duration_col` refers to the lifetimes of the subjects. `event_col` refers to whether the 'death' events was observed: 1 if observed, 0 else (censored). id_col: string A subject could have multiple rows in the DataFrame. This column contains the unique identifier per subject. event_col: string the column in DataFrame that contains the subjects' death observation. If left as None, assume all individuals are non-censored. start_col: string the column that contains the start of a subject's time period. stop_col: string the column that contains the end of a subject's time period. weights_col: string, optional the column that contains (possibly time-varying) weight of each subject-period row. show_progress: since the fitter is iterative, show convergence diagnostics. robust: boolean, optional (default: True) Compute the robust errors using the Huber sandwich estimator, aka Wei-Lin estimate. This does not handle ties, so if there are high number of ties, results may significantly differ. See "The Robust Inference for the Cox Proportional Hazards Model", Journal of the American Statistical Association, Vol. 84, No. 408 (Dec., 1989), pp. 1074- 1078 step_size: float, optional set an initial step size for the fitting algorithm. strata: list or string, optional specify a column or list of columns n to use in stratification. This is useful if a categorical covariate does not obey the proportional hazard assumption. This is used similar to the `strata` expression in R. See http://courses.washington.edu/b515/l17.pdf. initial_point: (d,) numpy array, optional initialize the starting point of the iterative algorithm. Default is the zero vector. Returns -------- self: CoxTimeVaryingFitter self, with additional properties like ``hazards_`` and ``print_summary`` ] name[self].strata assign[=] call[name[coalesce], parameter[name[strata], name[self].strata]] name[self].robust assign[=] name[robust] if name[self].robust begin[:] <ast.Raise object at 0x7da20c7ca620> name[self].event_col assign[=] name[event_col] name[self].id_col assign[=] name[id_col] name[self].stop_col assign[=] name[stop_col] name[self].start_col assign[=] name[start_col] name[self]._time_fit_was_called assign[=] call[call[name[datetime].utcnow, parameter[]].strftime, parameter[constant[%Y-%m-%d %H:%M:%S]]] variable[df] assign[=] call[name[df].copy, parameter[]] if <ast.UnaryOp object at 0x7da20c7cbdf0> begin[:] <ast.Raise object at 0x7da20c7c8460> if compare[name[weights_col] is constant[None]] begin[:] name[self].weights_col assign[=] constant[None] assert[compare[constant[__weights] <ast.NotIn object at 0x7da2590d7190> name[df].columns]] call[name[df]][constant[__weights]] assign[=] constant[1.0] variable[df] assign[=] call[name[df].rename, parameter[]] if compare[name[self].strata is constant[None]] begin[:] variable[df] assign[=] call[name[df].set_index, parameter[constant[id]]] <ast.Tuple object at 0x7da20e956c80> assign[=] tuple[[<ast.Call object at 0x7da20e954f10>, <ast.Call object at 0x7da20e955a50>, <ast.Call object at 0x7da20e9542b0>]] variable[weights] assign[=] call[call[name[df].pop, parameter[constant[__weights]]].astype, parameter[name[float]]] variable[df] assign[=] call[name[df].astype, parameter[name[float]]] call[name[self]._check_values, parameter[name[df], name[events], name[start], name[stop]]] name[self]._norm_mean assign[=] call[name[df].mean, parameter[constant[0]]] name[self]._norm_std assign[=] call[name[df].std, parameter[constant[0]]] variable[hazards_] assign[=] call[name[self]._newton_rhaphson, parameter[call[name[normalize], parameter[name[df], name[self]._norm_mean, name[self]._norm_std]], name[events], name[start], name[stop], name[weights]]] name[self].hazards_ assign[=] binary_operation[call[name[pd].Series, parameter[name[hazards_]]] / name[self]._norm_std] name[self].variance_matrix_ assign[=] binary_operation[<ast.UnaryOp object at 0x7da20c9939a0> / call[name[np].outer, parameter[name[self]._norm_std, name[self]._norm_std]]] name[self].standard_errors_ assign[=] call[name[self]._compute_standard_errors, parameter[call[name[normalize], parameter[name[df], name[self]._norm_mean, name[self]._norm_std]], name[events], name[start], name[stop], name[weights]]] name[self].confidence_intervals_ assign[=] call[name[self]._compute_confidence_intervals, parameter[]] name[self].baseline_cumulative_hazard_ assign[=] call[name[self]._compute_cumulative_baseline_hazard, parameter[name[df], name[events], name[start], name[stop], name[weights]]] name[self].baseline_survival_ assign[=] call[name[self]._compute_baseline_survival, parameter[]] name[self].event_observed assign[=] name[events] name[self].start_stop_and_events assign[=] call[name[pd].DataFrame, parameter[dictionary[[<ast.Constant object at 0x7da20c992890>, <ast.Constant object at 0x7da20c9934f0>, <ast.Constant object at 0x7da20c9922f0>], [<ast.Name object at 0x7da20c990e80>, <ast.Name object at 0x7da20c9937f0>, <ast.Name object at 0x7da20c991090>]]]] name[self].weights assign[=] name[weights] name[self]._n_examples assign[=] call[name[df].shape][constant[0]] name[self]._n_unique assign[=] call[call[name[df].index.unique, parameter[]].shape][constant[0]] return[name[self]]
keyword[def] identifier[fit] ( identifier[self] , identifier[df] , identifier[id_col] , identifier[event_col] , identifier[start_col] = literal[string] , identifier[stop_col] = literal[string] , identifier[weights_col] = keyword[None] , identifier[show_progress] = keyword[False] , identifier[step_size] = keyword[None] , identifier[robust] = keyword[False] , identifier[strata] = keyword[None] , identifier[initial_point] = keyword[None] , ): literal[string] identifier[self] . identifier[strata] = identifier[coalesce] ( identifier[strata] , identifier[self] . identifier[strata] ) identifier[self] . identifier[robust] = identifier[robust] keyword[if] identifier[self] . identifier[robust] : keyword[raise] identifier[NotImplementedError] ( literal[string] ) identifier[self] . identifier[event_col] = identifier[event_col] identifier[self] . identifier[id_col] = identifier[id_col] identifier[self] . identifier[stop_col] = identifier[stop_col] identifier[self] . identifier[start_col] = identifier[start_col] identifier[self] . identifier[_time_fit_was_called] = identifier[datetime] . identifier[utcnow] (). identifier[strftime] ( literal[string] ) identifier[df] = identifier[df] . identifier[copy] () keyword[if] keyword[not] ( identifier[id_col] keyword[in] identifier[df] keyword[and] identifier[event_col] keyword[in] identifier[df] keyword[and] identifier[start_col] keyword[in] identifier[df] keyword[and] identifier[stop_col] keyword[in] identifier[df] ): keyword[raise] identifier[KeyError] ( literal[string] ) keyword[if] identifier[weights_col] keyword[is] keyword[None] : identifier[self] . identifier[weights_col] = keyword[None] keyword[assert] ( literal[string] keyword[not] keyword[in] identifier[df] . identifier[columns] ), literal[string] identifier[df] [ literal[string] ]= literal[int] keyword[else] : identifier[self] . identifier[weights_col] = identifier[weights_col] keyword[if] ( identifier[df] [ identifier[weights_col] ]<= literal[int] ). identifier[any] (): keyword[raise] identifier[ValueError] ( literal[string] ) identifier[df] = identifier[df] . identifier[rename] ( identifier[columns] ={ identifier[id_col] : literal[string] , identifier[event_col] : literal[string] , identifier[start_col] : literal[string] , identifier[stop_col] : literal[string] , identifier[weights_col] : literal[string] } ) keyword[if] identifier[self] . identifier[strata] keyword[is] keyword[None] : identifier[df] = identifier[df] . identifier[set_index] ( literal[string] ) keyword[else] : identifier[df] = identifier[df] . identifier[set_index] ( identifier[_to_list] ( identifier[self] . identifier[strata] )+[ literal[string] ]) identifier[df] = identifier[df] . identifier[sort_index] () identifier[events] , identifier[start] , identifier[stop] =( identifier[pass_for_numeric_dtypes_or_raise_array] ( identifier[df] . identifier[pop] ( literal[string] )). identifier[astype] ( identifier[bool] ), identifier[df] . identifier[pop] ( literal[string] ), identifier[df] . identifier[pop] ( literal[string] ), ) identifier[weights] = identifier[df] . identifier[pop] ( literal[string] ). identifier[astype] ( identifier[float] ) identifier[df] = identifier[df] . identifier[astype] ( identifier[float] ) identifier[self] . identifier[_check_values] ( identifier[df] , identifier[events] , identifier[start] , identifier[stop] ) identifier[self] . identifier[_norm_mean] = identifier[df] . identifier[mean] ( literal[int] ) identifier[self] . identifier[_norm_std] = identifier[df] . identifier[std] ( literal[int] ) identifier[hazards_] = identifier[self] . identifier[_newton_rhaphson] ( identifier[normalize] ( identifier[df] , identifier[self] . identifier[_norm_mean] , identifier[self] . identifier[_norm_std] ), identifier[events] , identifier[start] , identifier[stop] , identifier[weights] , identifier[initial_point] = identifier[initial_point] , identifier[show_progress] = identifier[show_progress] , identifier[step_size] = identifier[step_size] , ) identifier[self] . identifier[hazards_] = identifier[pd] . identifier[Series] ( identifier[hazards_] , identifier[index] = identifier[df] . identifier[columns] , identifier[name] = literal[string] )/ identifier[self] . identifier[_norm_std] identifier[self] . identifier[variance_matrix_] =- identifier[inv] ( identifier[self] . identifier[_hessian_] )/ identifier[np] . identifier[outer] ( identifier[self] . identifier[_norm_std] , identifier[self] . identifier[_norm_std] ) identifier[self] . identifier[standard_errors_] = identifier[self] . identifier[_compute_standard_errors] ( identifier[normalize] ( identifier[df] , identifier[self] . identifier[_norm_mean] , identifier[self] . identifier[_norm_std] ), identifier[events] , identifier[start] , identifier[stop] , identifier[weights] ) identifier[self] . identifier[confidence_intervals_] = identifier[self] . identifier[_compute_confidence_intervals] () identifier[self] . identifier[baseline_cumulative_hazard_] = identifier[self] . identifier[_compute_cumulative_baseline_hazard] ( identifier[df] , identifier[events] , identifier[start] , identifier[stop] , identifier[weights] ) identifier[self] . identifier[baseline_survival_] = identifier[self] . identifier[_compute_baseline_survival] () identifier[self] . identifier[event_observed] = identifier[events] identifier[self] . identifier[start_stop_and_events] = identifier[pd] . identifier[DataFrame] ({ literal[string] : identifier[events] , literal[string] : identifier[start] , literal[string] : identifier[stop] }) identifier[self] . identifier[weights] = identifier[weights] identifier[self] . identifier[_n_examples] = identifier[df] . identifier[shape] [ literal[int] ] identifier[self] . identifier[_n_unique] = identifier[df] . identifier[index] . identifier[unique] (). identifier[shape] [ literal[int] ] keyword[return] identifier[self]
def fit(self, df, id_col, event_col, start_col='start', stop_col='stop', weights_col=None, show_progress=False, step_size=None, robust=False, strata=None, initial_point=None): # pylint: disable=too-many-arguments '\n Fit the Cox Proportional Hazard model to a time varying dataset. Tied survival times\n are handled using Efron\'s tie-method.\n\n Parameters\n -----------\n df: DataFrame\n a Pandas DataFrame with necessary columns `duration_col` and\n `event_col`, plus other covariates. `duration_col` refers to\n the lifetimes of the subjects. `event_col` refers to whether\n the \'death\' events was observed: 1 if observed, 0 else (censored).\n id_col: string\n A subject could have multiple rows in the DataFrame. This column contains\n the unique identifier per subject.\n event_col: string\n the column in DataFrame that contains the subjects\' death\n observation. If left as None, assume all individuals are non-censored.\n start_col: string\n the column that contains the start of a subject\'s time period.\n stop_col: string\n the column that contains the end of a subject\'s time period.\n weights_col: string, optional\n the column that contains (possibly time-varying) weight of each subject-period row.\n show_progress: since the fitter is iterative, show convergence\n diagnostics.\n robust: boolean, optional (default: True)\n Compute the robust errors using the Huber sandwich estimator, aka Wei-Lin estimate. This does not handle\n ties, so if there are high number of ties, results may significantly differ. See\n "The Robust Inference for the Cox Proportional Hazards Model", Journal of the American Statistical Association, Vol. 84, No. 408 (Dec., 1989), pp. 1074- 1078\n step_size: float, optional\n set an initial step size for the fitting algorithm.\n strata: list or string, optional\n specify a column or list of columns n to use in stratification. This is useful if a\n categorical covariate does not obey the proportional hazard assumption. This\n is used similar to the `strata` expression in R.\n See http://courses.washington.edu/b515/l17.pdf.\n initial_point: (d,) numpy array, optional\n initialize the starting point of the iterative\n algorithm. Default is the zero vector.\n\n Returns\n --------\n self: CoxTimeVaryingFitter\n self, with additional properties like ``hazards_`` and ``print_summary``\n\n ' self.strata = coalesce(strata, self.strata) self.robust = robust if self.robust: raise NotImplementedError('Not available yet.') # depends on [control=['if'], data=[]] self.event_col = event_col self.id_col = id_col self.stop_col = stop_col self.start_col = start_col self._time_fit_was_called = datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S') df = df.copy() if not (id_col in df and event_col in df and (start_col in df) and (stop_col in df)): raise KeyError('A column specified in the call to `fit` does not exist in the DataFrame provided.') # depends on [control=['if'], data=[]] if weights_col is None: self.weights_col = None assert '__weights' not in df.columns, '__weights is an internal lifelines column, please rename your column first.' df['__weights'] = 1.0 # depends on [control=['if'], data=[]] else: self.weights_col = weights_col if (df[weights_col] <= 0).any(): raise ValueError('values in weights_col must be positive.') # depends on [control=['if'], data=[]] df = df.rename(columns={id_col: 'id', event_col: 'event', start_col: 'start', stop_col: 'stop', weights_col: '__weights'}) if self.strata is None: df = df.set_index('id') # depends on [control=['if'], data=[]] else: df = df.set_index(_to_list(self.strata) + ['id']) # TODO: needs to be a list df = df.sort_index() (events, start, stop) = (pass_for_numeric_dtypes_or_raise_array(df.pop('event')).astype(bool), df.pop('start'), df.pop('stop')) weights = df.pop('__weights').astype(float) df = df.astype(float) self._check_values(df, events, start, stop) self._norm_mean = df.mean(0) self._norm_std = df.std(0) hazards_ = self._newton_rhaphson(normalize(df, self._norm_mean, self._norm_std), events, start, stop, weights, initial_point=initial_point, show_progress=show_progress, step_size=step_size) self.hazards_ = pd.Series(hazards_, index=df.columns, name='coef') / self._norm_std self.variance_matrix_ = -inv(self._hessian_) / np.outer(self._norm_std, self._norm_std) self.standard_errors_ = self._compute_standard_errors(normalize(df, self._norm_mean, self._norm_std), events, start, stop, weights) self.confidence_intervals_ = self._compute_confidence_intervals() self.baseline_cumulative_hazard_ = self._compute_cumulative_baseline_hazard(df, events, start, stop, weights) self.baseline_survival_ = self._compute_baseline_survival() self.event_observed = events self.start_stop_and_events = pd.DataFrame({'event': events, 'start': start, 'stop': stop}) self.weights = weights self._n_examples = df.shape[0] self._n_unique = df.index.unique().shape[0] return self
def handle(self): """ Executes the command. """ if not self.confirm_to_proceed( "<question>Are you sure you want to rollback the last migration?:</question> " ): return database = self.option("database") repository = DatabaseMigrationRepository(self.resolver, "migrations") migrator = Migrator(repository, self.resolver) self._prepare_database(migrator, database) pretend = self.option("pretend") path = self.option("path") if path is None: path = self._get_migration_path() migrator.rollback(path, pretend) for note in migrator.get_notes(): self.line(note)
def function[handle, parameter[self]]: constant[ Executes the command. ] if <ast.UnaryOp object at 0x7da18eb54ca0> begin[:] return[None] variable[database] assign[=] call[name[self].option, parameter[constant[database]]] variable[repository] assign[=] call[name[DatabaseMigrationRepository], parameter[name[self].resolver, constant[migrations]]] variable[migrator] assign[=] call[name[Migrator], parameter[name[repository], name[self].resolver]] call[name[self]._prepare_database, parameter[name[migrator], name[database]]] variable[pretend] assign[=] call[name[self].option, parameter[constant[pretend]]] variable[path] assign[=] call[name[self].option, parameter[constant[path]]] if compare[name[path] is constant[None]] begin[:] variable[path] assign[=] call[name[self]._get_migration_path, parameter[]] call[name[migrator].rollback, parameter[name[path], name[pretend]]] for taget[name[note]] in starred[call[name[migrator].get_notes, parameter[]]] begin[:] call[name[self].line, parameter[name[note]]]
keyword[def] identifier[handle] ( identifier[self] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[confirm_to_proceed] ( literal[string] ): keyword[return] identifier[database] = identifier[self] . identifier[option] ( literal[string] ) identifier[repository] = identifier[DatabaseMigrationRepository] ( identifier[self] . identifier[resolver] , literal[string] ) identifier[migrator] = identifier[Migrator] ( identifier[repository] , identifier[self] . identifier[resolver] ) identifier[self] . identifier[_prepare_database] ( identifier[migrator] , identifier[database] ) identifier[pretend] = identifier[self] . identifier[option] ( literal[string] ) identifier[path] = identifier[self] . identifier[option] ( literal[string] ) keyword[if] identifier[path] keyword[is] keyword[None] : identifier[path] = identifier[self] . identifier[_get_migration_path] () identifier[migrator] . identifier[rollback] ( identifier[path] , identifier[pretend] ) keyword[for] identifier[note] keyword[in] identifier[migrator] . identifier[get_notes] (): identifier[self] . identifier[line] ( identifier[note] )
def handle(self): """ Executes the command. """ if not self.confirm_to_proceed('<question>Are you sure you want to rollback the last migration?:</question> '): return # depends on [control=['if'], data=[]] database = self.option('database') repository = DatabaseMigrationRepository(self.resolver, 'migrations') migrator = Migrator(repository, self.resolver) self._prepare_database(migrator, database) pretend = self.option('pretend') path = self.option('path') if path is None: path = self._get_migration_path() # depends on [control=['if'], data=['path']] migrator.rollback(path, pretend) for note in migrator.get_notes(): self.line(note) # depends on [control=['for'], data=['note']]
async def getHealth(self, *args, **kwargs): """ Get EC2 account health metrics Give some basic stats on the health of our EC2 account This method gives output: ``v1/health.json#`` This method is ``experimental`` """ return await self._makeApiCall(self.funcinfo["getHealth"], *args, **kwargs)
<ast.AsyncFunctionDef object at 0x7da1b0400700>
keyword[async] keyword[def] identifier[getHealth] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[return] keyword[await] identifier[self] . identifier[_makeApiCall] ( identifier[self] . identifier[funcinfo] [ literal[string] ],* identifier[args] ,** identifier[kwargs] )
async def getHealth(self, *args, **kwargs): """ Get EC2 account health metrics Give some basic stats on the health of our EC2 account This method gives output: ``v1/health.json#`` This method is ``experimental`` """ return await self._makeApiCall(self.funcinfo['getHealth'], *args, **kwargs)
def close(self): """Close port.""" os.close(self.in_d) os.close(self.out_d)
def function[close, parameter[self]]: constant[Close port.] call[name[os].close, parameter[name[self].in_d]] call[name[os].close, parameter[name[self].out_d]]
keyword[def] identifier[close] ( identifier[self] ): literal[string] identifier[os] . identifier[close] ( identifier[self] . identifier[in_d] ) identifier[os] . identifier[close] ( identifier[self] . identifier[out_d] )
def close(self): """Close port.""" os.close(self.in_d) os.close(self.out_d)
def get_user(user_or_username=None): """Return Plone User :param user_or_username: Plone user or user id :type groupname: PloneUser/MemberData/str :returns: Plone MemberData :rtype: object """ if user_or_username is None: return None if hasattr(user_or_username, "getUserId"): return ploneapi.user.get(user_or_username.getUserId()) return ploneapi.user.get(userid=u.to_string(user_or_username))
def function[get_user, parameter[user_or_username]]: constant[Return Plone User :param user_or_username: Plone user or user id :type groupname: PloneUser/MemberData/str :returns: Plone MemberData :rtype: object ] if compare[name[user_or_username] is constant[None]] begin[:] return[constant[None]] if call[name[hasattr], parameter[name[user_or_username], constant[getUserId]]] begin[:] return[call[name[ploneapi].user.get, parameter[call[name[user_or_username].getUserId, parameter[]]]]] return[call[name[ploneapi].user.get, parameter[]]]
keyword[def] identifier[get_user] ( identifier[user_or_username] = keyword[None] ): literal[string] keyword[if] identifier[user_or_username] keyword[is] keyword[None] : keyword[return] keyword[None] keyword[if] identifier[hasattr] ( identifier[user_or_username] , literal[string] ): keyword[return] identifier[ploneapi] . identifier[user] . identifier[get] ( identifier[user_or_username] . identifier[getUserId] ()) keyword[return] identifier[ploneapi] . identifier[user] . identifier[get] ( identifier[userid] = identifier[u] . identifier[to_string] ( identifier[user_or_username] ))
def get_user(user_or_username=None): """Return Plone User :param user_or_username: Plone user or user id :type groupname: PloneUser/MemberData/str :returns: Plone MemberData :rtype: object """ if user_or_username is None: return None # depends on [control=['if'], data=[]] if hasattr(user_or_username, 'getUserId'): return ploneapi.user.get(user_or_username.getUserId()) # depends on [control=['if'], data=[]] return ploneapi.user.get(userid=u.to_string(user_or_username))
def update(self, data, timeout=-1, force=False): """ Updates one or more attributes for a server hardware type resource. Args: data (dict): Object to update. timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation in OneView; it just stops waiting for its completion. force: Flag to force the operation. Returns: dict: Updated server hardware type. """ uri = self.data["uri"] self.data = self._helper.update(data, uri=uri, timeout=timeout, force=force) return self
def function[update, parameter[self, data, timeout, force]]: constant[ Updates one or more attributes for a server hardware type resource. Args: data (dict): Object to update. timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation in OneView; it just stops waiting for its completion. force: Flag to force the operation. Returns: dict: Updated server hardware type. ] variable[uri] assign[=] call[name[self].data][constant[uri]] name[self].data assign[=] call[name[self]._helper.update, parameter[name[data]]] return[name[self]]
keyword[def] identifier[update] ( identifier[self] , identifier[data] , identifier[timeout] =- literal[int] , identifier[force] = keyword[False] ): literal[string] identifier[uri] = identifier[self] . identifier[data] [ literal[string] ] identifier[self] . identifier[data] = identifier[self] . identifier[_helper] . identifier[update] ( identifier[data] , identifier[uri] = identifier[uri] , identifier[timeout] = identifier[timeout] , identifier[force] = identifier[force] ) keyword[return] identifier[self]
def update(self, data, timeout=-1, force=False): """ Updates one or more attributes for a server hardware type resource. Args: data (dict): Object to update. timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation in OneView; it just stops waiting for its completion. force: Flag to force the operation. Returns: dict: Updated server hardware type. """ uri = self.data['uri'] self.data = self._helper.update(data, uri=uri, timeout=timeout, force=force) return self
def method_selector_fn(self): """Gets the method selector from the config. """ if callable(self.json_rpc_method): return self.json_rpc_method elif isinstance(self.json_rpc_method, (str,)): return lambda *_: self.json_rpc_method raise ValueError("``json_rpc_method`` config invalid. May be a string or function")
def function[method_selector_fn, parameter[self]]: constant[Gets the method selector from the config. ] if call[name[callable], parameter[name[self].json_rpc_method]] begin[:] return[name[self].json_rpc_method] <ast.Raise object at 0x7da18c4ce170>
keyword[def] identifier[method_selector_fn] ( identifier[self] ): literal[string] keyword[if] identifier[callable] ( identifier[self] . identifier[json_rpc_method] ): keyword[return] identifier[self] . identifier[json_rpc_method] keyword[elif] identifier[isinstance] ( identifier[self] . identifier[json_rpc_method] ,( identifier[str] ,)): keyword[return] keyword[lambda] * identifier[_] : identifier[self] . identifier[json_rpc_method] keyword[raise] identifier[ValueError] ( literal[string] )
def method_selector_fn(self): """Gets the method selector from the config. """ if callable(self.json_rpc_method): return self.json_rpc_method # depends on [control=['if'], data=[]] elif isinstance(self.json_rpc_method, (str,)): return lambda *_: self.json_rpc_method # depends on [control=['if'], data=[]] raise ValueError('``json_rpc_method`` config invalid. May be a string or function')
def _secondary_loop(self, reconnect=False): """The worker loop for the secondary nodes.""" if self.num_workers > 1: pool = multiprocessing.Pool(self.num_workers) else: pool = None should_reconnect = True while should_reconnect: i = 0 running = True try: self._reset_em() except (socket.error, EOFError, IOError, OSError, socket.gaierror, TypeError): continue while running: i += 1 if i % 5 == 0: # for better performance, only check every 5 cycles try: state = self.em.secondary_state except (socket.error, EOFError, IOError, OSError, socket.gaierror, TypeError): if not reconnect: raise else: break if state == _STATE_FORCED_SHUTDOWN: running = False should_reconnect = False elif state == _STATE_SHUTDOWN: running = False if not running: continue try: tasks = self.inqueue.get(block=True, timeout=0.2) except queue.Empty: continue except (socket.error, EOFError, IOError, OSError, socket.gaierror, TypeError): break except (managers.RemoteError, multiprocessing.ProcessError) as e: if ('Empty' in repr(e)) or ('TimeoutError' in repr(e)): continue if (('EOFError' in repr(e)) or ('PipeError' in repr(e)) or ('AuthenticationError' in repr(e))): # Second for Python 3.X, Third for 3.6+ break raise if pool is None: res = [] for genome_id, genome, config in tasks: fitness = self.eval_function(genome, config) res.append((genome_id, fitness)) else: genome_ids = [] jobs = [] for genome_id, genome, config in tasks: genome_ids.append(genome_id) jobs.append( pool.apply_async( self.eval_function, (genome, config) ) ) results = [ job.get(timeout=self.worker_timeout) for job in jobs ] res = zip(genome_ids, results) try: self.outqueue.put(res) except (socket.error, EOFError, IOError, OSError, socket.gaierror, TypeError): break except (managers.RemoteError, multiprocessing.ProcessError) as e: if ('Empty' in repr(e)) or ('TimeoutError' in repr(e)): continue if (('EOFError' in repr(e)) or ('PipeError' in repr(e)) or ('AuthenticationError' in repr(e))): # Second for Python 3.X, Third for 3.6+ break raise if not reconnect: should_reconnect = False break if pool is not None: pool.terminate()
def function[_secondary_loop, parameter[self, reconnect]]: constant[The worker loop for the secondary nodes.] if compare[name[self].num_workers greater[>] constant[1]] begin[:] variable[pool] assign[=] call[name[multiprocessing].Pool, parameter[name[self].num_workers]] variable[should_reconnect] assign[=] constant[True] while name[should_reconnect] begin[:] variable[i] assign[=] constant[0] variable[running] assign[=] constant[True] <ast.Try object at 0x7da1b18a1660> while name[running] begin[:] <ast.AugAssign object at 0x7da1b18a3730> if compare[binary_operation[name[i] <ast.Mod object at 0x7da2590d6920> constant[5]] equal[==] constant[0]] begin[:] <ast.Try object at 0x7da1b18a3c10> if compare[name[state] equal[==] name[_STATE_FORCED_SHUTDOWN]] begin[:] variable[running] assign[=] constant[False] variable[should_reconnect] assign[=] constant[False] if <ast.UnaryOp object at 0x7da1b18a09d0> begin[:] continue <ast.Try object at 0x7da1b18a13c0> if compare[name[pool] is constant[None]] begin[:] variable[res] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da1b18bf940>, <ast.Name object at 0x7da1b18bc130>, <ast.Name object at 0x7da1b18bce20>]]] in starred[name[tasks]] begin[:] variable[fitness] assign[=] call[name[self].eval_function, parameter[name[genome], name[config]]] call[name[res].append, parameter[tuple[[<ast.Name object at 0x7da1b18efeb0>, <ast.Name object at 0x7da1b18ed780>]]]] <ast.Try object at 0x7da1b18a3760> if <ast.UnaryOp object at 0x7da1b18a3ac0> begin[:] variable[should_reconnect] assign[=] constant[False] break if compare[name[pool] is_not constant[None]] begin[:] call[name[pool].terminate, parameter[]]
keyword[def] identifier[_secondary_loop] ( identifier[self] , identifier[reconnect] = keyword[False] ): literal[string] keyword[if] identifier[self] . identifier[num_workers] > literal[int] : identifier[pool] = identifier[multiprocessing] . identifier[Pool] ( identifier[self] . identifier[num_workers] ) keyword[else] : identifier[pool] = keyword[None] identifier[should_reconnect] = keyword[True] keyword[while] identifier[should_reconnect] : identifier[i] = literal[int] identifier[running] = keyword[True] keyword[try] : identifier[self] . identifier[_reset_em] () keyword[except] ( identifier[socket] . identifier[error] , identifier[EOFError] , identifier[IOError] , identifier[OSError] , identifier[socket] . identifier[gaierror] , identifier[TypeError] ): keyword[continue] keyword[while] identifier[running] : identifier[i] += literal[int] keyword[if] identifier[i] % literal[int] == literal[int] : keyword[try] : identifier[state] = identifier[self] . identifier[em] . identifier[secondary_state] keyword[except] ( identifier[socket] . identifier[error] , identifier[EOFError] , identifier[IOError] , identifier[OSError] , identifier[socket] . identifier[gaierror] , identifier[TypeError] ): keyword[if] keyword[not] identifier[reconnect] : keyword[raise] keyword[else] : keyword[break] keyword[if] identifier[state] == identifier[_STATE_FORCED_SHUTDOWN] : identifier[running] = keyword[False] identifier[should_reconnect] = keyword[False] keyword[elif] identifier[state] == identifier[_STATE_SHUTDOWN] : identifier[running] = keyword[False] keyword[if] keyword[not] identifier[running] : keyword[continue] keyword[try] : identifier[tasks] = identifier[self] . identifier[inqueue] . identifier[get] ( identifier[block] = keyword[True] , identifier[timeout] = literal[int] ) keyword[except] identifier[queue] . identifier[Empty] : keyword[continue] keyword[except] ( identifier[socket] . identifier[error] , identifier[EOFError] , identifier[IOError] , identifier[OSError] , identifier[socket] . identifier[gaierror] , identifier[TypeError] ): keyword[break] keyword[except] ( identifier[managers] . identifier[RemoteError] , identifier[multiprocessing] . identifier[ProcessError] ) keyword[as] identifier[e] : keyword[if] ( literal[string] keyword[in] identifier[repr] ( identifier[e] )) keyword[or] ( literal[string] keyword[in] identifier[repr] ( identifier[e] )): keyword[continue] keyword[if] (( literal[string] keyword[in] identifier[repr] ( identifier[e] )) keyword[or] ( literal[string] keyword[in] identifier[repr] ( identifier[e] )) keyword[or] ( literal[string] keyword[in] identifier[repr] ( identifier[e] ))): keyword[break] keyword[raise] keyword[if] identifier[pool] keyword[is] keyword[None] : identifier[res] =[] keyword[for] identifier[genome_id] , identifier[genome] , identifier[config] keyword[in] identifier[tasks] : identifier[fitness] = identifier[self] . identifier[eval_function] ( identifier[genome] , identifier[config] ) identifier[res] . identifier[append] (( identifier[genome_id] , identifier[fitness] )) keyword[else] : identifier[genome_ids] =[] identifier[jobs] =[] keyword[for] identifier[genome_id] , identifier[genome] , identifier[config] keyword[in] identifier[tasks] : identifier[genome_ids] . identifier[append] ( identifier[genome_id] ) identifier[jobs] . identifier[append] ( identifier[pool] . identifier[apply_async] ( identifier[self] . identifier[eval_function] ,( identifier[genome] , identifier[config] ) ) ) identifier[results] =[ identifier[job] . identifier[get] ( identifier[timeout] = identifier[self] . identifier[worker_timeout] ) keyword[for] identifier[job] keyword[in] identifier[jobs] ] identifier[res] = identifier[zip] ( identifier[genome_ids] , identifier[results] ) keyword[try] : identifier[self] . identifier[outqueue] . identifier[put] ( identifier[res] ) keyword[except] ( identifier[socket] . identifier[error] , identifier[EOFError] , identifier[IOError] , identifier[OSError] , identifier[socket] . identifier[gaierror] , identifier[TypeError] ): keyword[break] keyword[except] ( identifier[managers] . identifier[RemoteError] , identifier[multiprocessing] . identifier[ProcessError] ) keyword[as] identifier[e] : keyword[if] ( literal[string] keyword[in] identifier[repr] ( identifier[e] )) keyword[or] ( literal[string] keyword[in] identifier[repr] ( identifier[e] )): keyword[continue] keyword[if] (( literal[string] keyword[in] identifier[repr] ( identifier[e] )) keyword[or] ( literal[string] keyword[in] identifier[repr] ( identifier[e] )) keyword[or] ( literal[string] keyword[in] identifier[repr] ( identifier[e] ))): keyword[break] keyword[raise] keyword[if] keyword[not] identifier[reconnect] : identifier[should_reconnect] = keyword[False] keyword[break] keyword[if] identifier[pool] keyword[is] keyword[not] keyword[None] : identifier[pool] . identifier[terminate] ()
def _secondary_loop(self, reconnect=False): """The worker loop for the secondary nodes.""" if self.num_workers > 1: pool = multiprocessing.Pool(self.num_workers) # depends on [control=['if'], data=[]] else: pool = None should_reconnect = True while should_reconnect: i = 0 running = True try: self._reset_em() # depends on [control=['try'], data=[]] except (socket.error, EOFError, IOError, OSError, socket.gaierror, TypeError): continue # depends on [control=['except'], data=[]] while running: i += 1 if i % 5 == 0: # for better performance, only check every 5 cycles try: state = self.em.secondary_state # depends on [control=['try'], data=[]] except (socket.error, EOFError, IOError, OSError, socket.gaierror, TypeError): if not reconnect: raise # depends on [control=['if'], data=[]] else: break # depends on [control=['except'], data=[]] if state == _STATE_FORCED_SHUTDOWN: running = False should_reconnect = False # depends on [control=['if'], data=[]] elif state == _STATE_SHUTDOWN: running = False # depends on [control=['if'], data=[]] if not running: continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] try: tasks = self.inqueue.get(block=True, timeout=0.2) # depends on [control=['try'], data=[]] except queue.Empty: continue # depends on [control=['except'], data=[]] except (socket.error, EOFError, IOError, OSError, socket.gaierror, TypeError): break # depends on [control=['except'], data=[]] except (managers.RemoteError, multiprocessing.ProcessError) as e: if 'Empty' in repr(e) or 'TimeoutError' in repr(e): continue # depends on [control=['if'], data=[]] if 'EOFError' in repr(e) or 'PipeError' in repr(e) or 'AuthenticationError' in repr(e): # Second for Python 3.X, Third for 3.6+ break # depends on [control=['if'], data=[]] raise # depends on [control=['except'], data=['e']] if pool is None: res = [] for (genome_id, genome, config) in tasks: fitness = self.eval_function(genome, config) res.append((genome_id, fitness)) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] else: genome_ids = [] jobs = [] for (genome_id, genome, config) in tasks: genome_ids.append(genome_id) jobs.append(pool.apply_async(self.eval_function, (genome, config))) # depends on [control=['for'], data=[]] results = [job.get(timeout=self.worker_timeout) for job in jobs] res = zip(genome_ids, results) try: self.outqueue.put(res) # depends on [control=['try'], data=[]] except (socket.error, EOFError, IOError, OSError, socket.gaierror, TypeError): break # depends on [control=['except'], data=[]] except (managers.RemoteError, multiprocessing.ProcessError) as e: if 'Empty' in repr(e) or 'TimeoutError' in repr(e): continue # depends on [control=['if'], data=[]] if 'EOFError' in repr(e) or 'PipeError' in repr(e) or 'AuthenticationError' in repr(e): # Second for Python 3.X, Third for 3.6+ break # depends on [control=['if'], data=[]] raise # depends on [control=['except'], data=['e']] # depends on [control=['while'], data=[]] if not reconnect: should_reconnect = False break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] if pool is not None: pool.terminate() # depends on [control=['if'], data=['pool']]
def schedule(self, callback, *args, **kwargs): """Schedule the callback to be called asynchronously in a thread pool. Args: callback (Callable): The function to call. args: Positional arguments passed to the function. kwargs: Key-word arguments passed to the function. Returns: None """ self._executor.submit(callback, *args, **kwargs)
def function[schedule, parameter[self, callback]]: constant[Schedule the callback to be called asynchronously in a thread pool. Args: callback (Callable): The function to call. args: Positional arguments passed to the function. kwargs: Key-word arguments passed to the function. Returns: None ] call[name[self]._executor.submit, parameter[name[callback], <ast.Starred object at 0x7da204567400>]]
keyword[def] identifier[schedule] ( identifier[self] , identifier[callback] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[self] . identifier[_executor] . identifier[submit] ( identifier[callback] ,* identifier[args] ,** identifier[kwargs] )
def schedule(self, callback, *args, **kwargs): """Schedule the callback to be called asynchronously in a thread pool. Args: callback (Callable): The function to call. args: Positional arguments passed to the function. kwargs: Key-word arguments passed to the function. Returns: None """ self._executor.submit(callback, *args, **kwargs)
def CheckPermissions(self, username, subject): """Checks if a given user has access to a given subject.""" if subject in self.authorized_users: return ((username in self.authorized_users[subject]) or self.group_access_manager.MemberOfAuthorizedGroup( username, subject)) # In case the subject is not found, the safest thing to do is to raise. # It's up to the users of this class to handle this exception and # grant/not grant permissions to the user in question. raise InvalidSubject("Subject %s was not found." % subject)
def function[CheckPermissions, parameter[self, username, subject]]: constant[Checks if a given user has access to a given subject.] if compare[name[subject] in name[self].authorized_users] begin[:] return[<ast.BoolOp object at 0x7da1b1c1b8b0>] <ast.Raise object at 0x7da1b1c18430>
keyword[def] identifier[CheckPermissions] ( identifier[self] , identifier[username] , identifier[subject] ): literal[string] keyword[if] identifier[subject] keyword[in] identifier[self] . identifier[authorized_users] : keyword[return] (( identifier[username] keyword[in] identifier[self] . identifier[authorized_users] [ identifier[subject] ]) keyword[or] identifier[self] . identifier[group_access_manager] . identifier[MemberOfAuthorizedGroup] ( identifier[username] , identifier[subject] )) keyword[raise] identifier[InvalidSubject] ( literal[string] % identifier[subject] )
def CheckPermissions(self, username, subject): """Checks if a given user has access to a given subject.""" if subject in self.authorized_users: return username in self.authorized_users[subject] or self.group_access_manager.MemberOfAuthorizedGroup(username, subject) # depends on [control=['if'], data=['subject']] # In case the subject is not found, the safest thing to do is to raise. # It's up to the users of this class to handle this exception and # grant/not grant permissions to the user in question. raise InvalidSubject('Subject %s was not found.' % subject)
def find_end(self, text, start_token, end_token, ignore_end_token=None): '''find the of a token. Returns the offset in the string immediately after the matching end_token''' if not text.startswith(start_token): raise MAVParseError("invalid token start") offset = len(start_token) nesting = 1 while nesting > 0: idx1 = text[offset:].find(start_token) idx2 = text[offset:].find(end_token) # Check for false positives due to another similar token # For example, make sure idx2 points to the second '}' in ${{field: ${name}}} if ignore_end_token: combined_token = ignore_end_token + end_token if text[offset+idx2:offset+idx2+len(combined_token)] == combined_token: idx2 += len(ignore_end_token) if idx1 == -1 and idx2 == -1: raise MAVParseError("token nesting error") if idx1 == -1 or idx1 > idx2: offset += idx2 + len(end_token) nesting -= 1 else: offset += idx1 + len(start_token) nesting += 1 return offset
def function[find_end, parameter[self, text, start_token, end_token, ignore_end_token]]: constant[find the of a token. Returns the offset in the string immediately after the matching end_token] if <ast.UnaryOp object at 0x7da18dc04850> begin[:] <ast.Raise object at 0x7da18dc05ea0> variable[offset] assign[=] call[name[len], parameter[name[start_token]]] variable[nesting] assign[=] constant[1] while compare[name[nesting] greater[>] constant[0]] begin[:] variable[idx1] assign[=] call[call[name[text]][<ast.Slice object at 0x7da18dc04970>].find, parameter[name[start_token]]] variable[idx2] assign[=] call[call[name[text]][<ast.Slice object at 0x7da18dc06140>].find, parameter[name[end_token]]] if name[ignore_end_token] begin[:] variable[combined_token] assign[=] binary_operation[name[ignore_end_token] + name[end_token]] if compare[call[name[text]][<ast.Slice object at 0x7da1b176afb0>] equal[==] name[combined_token]] begin[:] <ast.AugAssign object at 0x7da1b176abf0> if <ast.BoolOp object at 0x7da1b176ae90> begin[:] <ast.Raise object at 0x7da1b176ac80> if <ast.BoolOp object at 0x7da1b176a6e0> begin[:] <ast.AugAssign object at 0x7da1b176b0d0> <ast.AugAssign object at 0x7da1b176a2c0> return[name[offset]]
keyword[def] identifier[find_end] ( identifier[self] , identifier[text] , identifier[start_token] , identifier[end_token] , identifier[ignore_end_token] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[text] . identifier[startswith] ( identifier[start_token] ): keyword[raise] identifier[MAVParseError] ( literal[string] ) identifier[offset] = identifier[len] ( identifier[start_token] ) identifier[nesting] = literal[int] keyword[while] identifier[nesting] > literal[int] : identifier[idx1] = identifier[text] [ identifier[offset] :]. identifier[find] ( identifier[start_token] ) identifier[idx2] = identifier[text] [ identifier[offset] :]. identifier[find] ( identifier[end_token] ) keyword[if] identifier[ignore_end_token] : identifier[combined_token] = identifier[ignore_end_token] + identifier[end_token] keyword[if] identifier[text] [ identifier[offset] + identifier[idx2] : identifier[offset] + identifier[idx2] + identifier[len] ( identifier[combined_token] )]== identifier[combined_token] : identifier[idx2] += identifier[len] ( identifier[ignore_end_token] ) keyword[if] identifier[idx1] ==- literal[int] keyword[and] identifier[idx2] ==- literal[int] : keyword[raise] identifier[MAVParseError] ( literal[string] ) keyword[if] identifier[idx1] ==- literal[int] keyword[or] identifier[idx1] > identifier[idx2] : identifier[offset] += identifier[idx2] + identifier[len] ( identifier[end_token] ) identifier[nesting] -= literal[int] keyword[else] : identifier[offset] += identifier[idx1] + identifier[len] ( identifier[start_token] ) identifier[nesting] += literal[int] keyword[return] identifier[offset]
def find_end(self, text, start_token, end_token, ignore_end_token=None): """find the of a token. Returns the offset in the string immediately after the matching end_token""" if not text.startswith(start_token): raise MAVParseError('invalid token start') # depends on [control=['if'], data=[]] offset = len(start_token) nesting = 1 while nesting > 0: idx1 = text[offset:].find(start_token) idx2 = text[offset:].find(end_token) # Check for false positives due to another similar token # For example, make sure idx2 points to the second '}' in ${{field: ${name}}} if ignore_end_token: combined_token = ignore_end_token + end_token if text[offset + idx2:offset + idx2 + len(combined_token)] == combined_token: idx2 += len(ignore_end_token) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if idx1 == -1 and idx2 == -1: raise MAVParseError('token nesting error') # depends on [control=['if'], data=[]] if idx1 == -1 or idx1 > idx2: offset += idx2 + len(end_token) nesting -= 1 # depends on [control=['if'], data=[]] else: offset += idx1 + len(start_token) nesting += 1 # depends on [control=['while'], data=['nesting']] return offset
def strip_size(self, location='top', num_lines=None): """ Breadth of the strip background in inches Parameters ---------- location : str in ``['top', 'right']`` Location of the strip text num_lines : int Number of text lines """ dpi = 72 theme = self.theme get_property = theme.themeables.property if location == 'right': strip_name = 'strip_text_y' num_lines = num_lines or self.num_vars_y else: strip_name = 'strip_text_x' num_lines = num_lines or self.num_vars_x if not num_lines: return 0 # The facet labels are placed onto the figure using # transAxes dimensions. The line height and line # width are mapped to the same [0, 1] range # i.e (pts) * (inches / pts) * (1 / inches) try: fontsize = get_property(strip_name, 'size') except KeyError: fontsize = float(theme.rcParams.get('font.size', 10)) try: linespacing = get_property(strip_name, 'linespacing') except KeyError: linespacing = 1 # margins on either side of the strip text m1, m2 = self.inner_strip_margins(location) # Using figure.dpi value here does not workout well! breadth = (linespacing*fontsize) * num_lines / dpi breadth = breadth + (m1 + m2) / dpi return breadth
def function[strip_size, parameter[self, location, num_lines]]: constant[ Breadth of the strip background in inches Parameters ---------- location : str in ``['top', 'right']`` Location of the strip text num_lines : int Number of text lines ] variable[dpi] assign[=] constant[72] variable[theme] assign[=] name[self].theme variable[get_property] assign[=] name[theme].themeables.property if compare[name[location] equal[==] constant[right]] begin[:] variable[strip_name] assign[=] constant[strip_text_y] variable[num_lines] assign[=] <ast.BoolOp object at 0x7da204567fd0> if <ast.UnaryOp object at 0x7da204566cb0> begin[:] return[constant[0]] <ast.Try object at 0x7da2045666b0> <ast.Try object at 0x7da2045657e0> <ast.Tuple object at 0x7da2045640d0> assign[=] call[name[self].inner_strip_margins, parameter[name[location]]] variable[breadth] assign[=] binary_operation[binary_operation[binary_operation[name[linespacing] * name[fontsize]] * name[num_lines]] / name[dpi]] variable[breadth] assign[=] binary_operation[name[breadth] + binary_operation[binary_operation[name[m1] + name[m2]] / name[dpi]]] return[name[breadth]]
keyword[def] identifier[strip_size] ( identifier[self] , identifier[location] = literal[string] , identifier[num_lines] = keyword[None] ): literal[string] identifier[dpi] = literal[int] identifier[theme] = identifier[self] . identifier[theme] identifier[get_property] = identifier[theme] . identifier[themeables] . identifier[property] keyword[if] identifier[location] == literal[string] : identifier[strip_name] = literal[string] identifier[num_lines] = identifier[num_lines] keyword[or] identifier[self] . identifier[num_vars_y] keyword[else] : identifier[strip_name] = literal[string] identifier[num_lines] = identifier[num_lines] keyword[or] identifier[self] . identifier[num_vars_x] keyword[if] keyword[not] identifier[num_lines] : keyword[return] literal[int] keyword[try] : identifier[fontsize] = identifier[get_property] ( identifier[strip_name] , literal[string] ) keyword[except] identifier[KeyError] : identifier[fontsize] = identifier[float] ( identifier[theme] . identifier[rcParams] . identifier[get] ( literal[string] , literal[int] )) keyword[try] : identifier[linespacing] = identifier[get_property] ( identifier[strip_name] , literal[string] ) keyword[except] identifier[KeyError] : identifier[linespacing] = literal[int] identifier[m1] , identifier[m2] = identifier[self] . identifier[inner_strip_margins] ( identifier[location] ) identifier[breadth] =( identifier[linespacing] * identifier[fontsize] )* identifier[num_lines] / identifier[dpi] identifier[breadth] = identifier[breadth] +( identifier[m1] + identifier[m2] )/ identifier[dpi] keyword[return] identifier[breadth]
def strip_size(self, location='top', num_lines=None): """ Breadth of the strip background in inches Parameters ---------- location : str in ``['top', 'right']`` Location of the strip text num_lines : int Number of text lines """ dpi = 72 theme = self.theme get_property = theme.themeables.property if location == 'right': strip_name = 'strip_text_y' num_lines = num_lines or self.num_vars_y # depends on [control=['if'], data=[]] else: strip_name = 'strip_text_x' num_lines = num_lines or self.num_vars_x if not num_lines: return 0 # depends on [control=['if'], data=[]] # The facet labels are placed onto the figure using # transAxes dimensions. The line height and line # width are mapped to the same [0, 1] range # i.e (pts) * (inches / pts) * (1 / inches) try: fontsize = get_property(strip_name, 'size') # depends on [control=['try'], data=[]] except KeyError: fontsize = float(theme.rcParams.get('font.size', 10)) # depends on [control=['except'], data=[]] try: linespacing = get_property(strip_name, 'linespacing') # depends on [control=['try'], data=[]] except KeyError: linespacing = 1 # depends on [control=['except'], data=[]] # margins on either side of the strip text (m1, m2) = self.inner_strip_margins(location) # Using figure.dpi value here does not workout well! breadth = linespacing * fontsize * num_lines / dpi breadth = breadth + (m1 + m2) / dpi return breadth
def trace(self, predicate): """ Starts tracing with the given callable. Args: predicate (callable that accepts a single :obj:`hunter.Event` argument): Return: self """ self._handler = predicate if self.threading_support is None or self.threading_support: self._threading_previous = getattr(threading, '_trace_hook', None) threading.settrace(self) self._previous = sys.gettrace() sys.settrace(self) return self
def function[trace, parameter[self, predicate]]: constant[ Starts tracing with the given callable. Args: predicate (callable that accepts a single :obj:`hunter.Event` argument): Return: self ] name[self]._handler assign[=] name[predicate] if <ast.BoolOp object at 0x7da18bcc9450> begin[:] name[self]._threading_previous assign[=] call[name[getattr], parameter[name[threading], constant[_trace_hook], constant[None]]] call[name[threading].settrace, parameter[name[self]]] name[self]._previous assign[=] call[name[sys].gettrace, parameter[]] call[name[sys].settrace, parameter[name[self]]] return[name[self]]
keyword[def] identifier[trace] ( identifier[self] , identifier[predicate] ): literal[string] identifier[self] . identifier[_handler] = identifier[predicate] keyword[if] identifier[self] . identifier[threading_support] keyword[is] keyword[None] keyword[or] identifier[self] . identifier[threading_support] : identifier[self] . identifier[_threading_previous] = identifier[getattr] ( identifier[threading] , literal[string] , keyword[None] ) identifier[threading] . identifier[settrace] ( identifier[self] ) identifier[self] . identifier[_previous] = identifier[sys] . identifier[gettrace] () identifier[sys] . identifier[settrace] ( identifier[self] ) keyword[return] identifier[self]
def trace(self, predicate): """ Starts tracing with the given callable. Args: predicate (callable that accepts a single :obj:`hunter.Event` argument): Return: self """ self._handler = predicate if self.threading_support is None or self.threading_support: self._threading_previous = getattr(threading, '_trace_hook', None) threading.settrace(self) # depends on [control=['if'], data=[]] self._previous = sys.gettrace() sys.settrace(self) return self
def get_office(self, row, division): """ Gets the Office object for the given row of election results. Depends on knowing the division of the row of election results. """ AT_LARGE_STATES = ["AK", "DE", "MT", "ND", "SD", "VT", "WY"] if division.level.name not in [ geography.DivisionLevel.STATE, geography.DivisionLevel.COUNTRY, ]: state = division.parent else: state = division if row["officename"] == "President": return government.Office.objects.get( label="President", name="President of the United States" ) elif row["officename"] == "Governor": jurisdiction = government.Jurisdiction.objects.get(division=state) return government.Office.objects.get( slug__endswith="governor", jurisdiction=jurisdiction ) elif row["officename"] == "U.S. Senate": body = government.Body.objects.get(label="U.S. Senate") if row["seatnum"] == "2" or "Class II" in row["description"]: senate_class = 2 else: senate_class = self.senate_class return government.Office.objects.get( body=body, division=state, senate_class=senate_class ) elif row["officename"].startswith("U.S. House"): body = government.Body.objects.get( label="U.S. House of Representatives" ) if row["statepostal"] in AT_LARGE_STATES: code = "00" else: if row["seatnum"]: code = ( row["seatnum"].zfill(2) if int(row["seatnum"]) < 10 else row["seatnum"] ) else: seatnum = row["seatname"].split(" ")[1] code = seatnum.zfill(2) if int(seatnum) < 10 else seatnum district = state.children.get( level__name=geography.DivisionLevel.DISTRICT, code=code ) return government.Office.objects.get(body=body, division=district)
def function[get_office, parameter[self, row, division]]: constant[ Gets the Office object for the given row of election results. Depends on knowing the division of the row of election results. ] variable[AT_LARGE_STATES] assign[=] list[[<ast.Constant object at 0x7da2045648e0>, <ast.Constant object at 0x7da204565fc0>, <ast.Constant object at 0x7da2045641f0>, <ast.Constant object at 0x7da2045656f0>, <ast.Constant object at 0x7da204565d50>, <ast.Constant object at 0x7da204567c70>, <ast.Constant object at 0x7da204564850>]] if compare[name[division].level.name <ast.NotIn object at 0x7da2590d7190> list[[<ast.Attribute object at 0x7da204567ac0>, <ast.Attribute object at 0x7da204566c20>]]] begin[:] variable[state] assign[=] name[division].parent if compare[call[name[row]][constant[officename]] equal[==] constant[President]] begin[:] return[call[name[government].Office.objects.get, parameter[]]]
keyword[def] identifier[get_office] ( identifier[self] , identifier[row] , identifier[division] ): literal[string] identifier[AT_LARGE_STATES] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ] keyword[if] identifier[division] . identifier[level] . identifier[name] keyword[not] keyword[in] [ identifier[geography] . identifier[DivisionLevel] . identifier[STATE] , identifier[geography] . identifier[DivisionLevel] . identifier[COUNTRY] , ]: identifier[state] = identifier[division] . identifier[parent] keyword[else] : identifier[state] = identifier[division] keyword[if] identifier[row] [ literal[string] ]== literal[string] : keyword[return] identifier[government] . identifier[Office] . identifier[objects] . identifier[get] ( identifier[label] = literal[string] , identifier[name] = literal[string] ) keyword[elif] identifier[row] [ literal[string] ]== literal[string] : identifier[jurisdiction] = identifier[government] . identifier[Jurisdiction] . identifier[objects] . identifier[get] ( identifier[division] = identifier[state] ) keyword[return] identifier[government] . identifier[Office] . identifier[objects] . identifier[get] ( identifier[slug__endswith] = literal[string] , identifier[jurisdiction] = identifier[jurisdiction] ) keyword[elif] identifier[row] [ literal[string] ]== literal[string] : identifier[body] = identifier[government] . identifier[Body] . identifier[objects] . identifier[get] ( identifier[label] = literal[string] ) keyword[if] identifier[row] [ literal[string] ]== literal[string] keyword[or] literal[string] keyword[in] identifier[row] [ literal[string] ]: identifier[senate_class] = literal[int] keyword[else] : identifier[senate_class] = identifier[self] . identifier[senate_class] keyword[return] identifier[government] . identifier[Office] . identifier[objects] . identifier[get] ( identifier[body] = identifier[body] , identifier[division] = identifier[state] , identifier[senate_class] = identifier[senate_class] ) keyword[elif] identifier[row] [ literal[string] ]. identifier[startswith] ( literal[string] ): identifier[body] = identifier[government] . identifier[Body] . identifier[objects] . identifier[get] ( identifier[label] = literal[string] ) keyword[if] identifier[row] [ literal[string] ] keyword[in] identifier[AT_LARGE_STATES] : identifier[code] = literal[string] keyword[else] : keyword[if] identifier[row] [ literal[string] ]: identifier[code] =( identifier[row] [ literal[string] ]. identifier[zfill] ( literal[int] ) keyword[if] identifier[int] ( identifier[row] [ literal[string] ])< literal[int] keyword[else] identifier[row] [ literal[string] ] ) keyword[else] : identifier[seatnum] = identifier[row] [ literal[string] ]. identifier[split] ( literal[string] )[ literal[int] ] identifier[code] = identifier[seatnum] . identifier[zfill] ( literal[int] ) keyword[if] identifier[int] ( identifier[seatnum] )< literal[int] keyword[else] identifier[seatnum] identifier[district] = identifier[state] . identifier[children] . identifier[get] ( identifier[level__name] = identifier[geography] . identifier[DivisionLevel] . identifier[DISTRICT] , identifier[code] = identifier[code] ) keyword[return] identifier[government] . identifier[Office] . identifier[objects] . identifier[get] ( identifier[body] = identifier[body] , identifier[division] = identifier[district] )
def get_office(self, row, division): """ Gets the Office object for the given row of election results. Depends on knowing the division of the row of election results. """ AT_LARGE_STATES = ['AK', 'DE', 'MT', 'ND', 'SD', 'VT', 'WY'] if division.level.name not in [geography.DivisionLevel.STATE, geography.DivisionLevel.COUNTRY]: state = division.parent # depends on [control=['if'], data=[]] else: state = division if row['officename'] == 'President': return government.Office.objects.get(label='President', name='President of the United States') # depends on [control=['if'], data=[]] elif row['officename'] == 'Governor': jurisdiction = government.Jurisdiction.objects.get(division=state) return government.Office.objects.get(slug__endswith='governor', jurisdiction=jurisdiction) # depends on [control=['if'], data=[]] elif row['officename'] == 'U.S. Senate': body = government.Body.objects.get(label='U.S. Senate') if row['seatnum'] == '2' or 'Class II' in row['description']: senate_class = 2 # depends on [control=['if'], data=[]] else: senate_class = self.senate_class return government.Office.objects.get(body=body, division=state, senate_class=senate_class) # depends on [control=['if'], data=[]] elif row['officename'].startswith('U.S. House'): body = government.Body.objects.get(label='U.S. House of Representatives') if row['statepostal'] in AT_LARGE_STATES: code = '00' # depends on [control=['if'], data=[]] elif row['seatnum']: code = row['seatnum'].zfill(2) if int(row['seatnum']) < 10 else row['seatnum'] # depends on [control=['if'], data=[]] else: seatnum = row['seatname'].split(' ')[1] code = seatnum.zfill(2) if int(seatnum) < 10 else seatnum district = state.children.get(level__name=geography.DivisionLevel.DISTRICT, code=code) return government.Office.objects.get(body=body, division=district) # depends on [control=['if'], data=[]]
def _scan_two_qubit_ops_into_matrix( self, circuit: circuits.Circuit, index: Optional[int], qubits: Tuple[ops.Qid, ...] ) -> Tuple[List[ops.Operation], List[int], np.ndarray]: """Accumulates operations affecting the given pair of qubits. The scan terminates when it hits the end of the circuit, finds an operation without a known matrix, or finds an operation that interacts the given qubits with other qubits. Args: circuit: The circuit to scan for operations. index: The index to start scanning forward from. qubits: The pair of qubits we care about. Returns: A tuple containing: 0. The operations. 1. The moment indices those operations were on. 2. A matrix equivalent to the effect of the scanned operations. """ product = np.eye(4, dtype=np.complex128) all_operations = [] touched_indices = [] while index is not None: operations = list({circuit.operation_at(q, index) for q in qubits}) op_data = [ self._op_to_matrix(op, qubits) for op in operations if op is not None ] # Stop at any non-constant or non-local interaction. if any(e is None for e in op_data): break present_ops = [op for op in operations if op] present_op_data = cast(List[np.ndarray], op_data) for op_mat in present_op_data: product = np.dot(op_mat, product) all_operations.extend(present_ops) touched_indices.append(index) index = circuit.next_moment_operating_on(qubits, index + 1) return all_operations, touched_indices, product
def function[_scan_two_qubit_ops_into_matrix, parameter[self, circuit, index, qubits]]: constant[Accumulates operations affecting the given pair of qubits. The scan terminates when it hits the end of the circuit, finds an operation without a known matrix, or finds an operation that interacts the given qubits with other qubits. Args: circuit: The circuit to scan for operations. index: The index to start scanning forward from. qubits: The pair of qubits we care about. Returns: A tuple containing: 0. The operations. 1. The moment indices those operations were on. 2. A matrix equivalent to the effect of the scanned operations. ] variable[product] assign[=] call[name[np].eye, parameter[constant[4]]] variable[all_operations] assign[=] list[[]] variable[touched_indices] assign[=] list[[]] while compare[name[index] is_not constant[None]] begin[:] variable[operations] assign[=] call[name[list], parameter[<ast.SetComp object at 0x7da1b1c3eda0>]] variable[op_data] assign[=] <ast.ListComp object at 0x7da1b1c184f0> if call[name[any], parameter[<ast.GeneratorExp object at 0x7da1b1c18ac0>]] begin[:] break variable[present_ops] assign[=] <ast.ListComp object at 0x7da1b1c18e20> variable[present_op_data] assign[=] call[name[cast], parameter[call[name[List]][name[np].ndarray], name[op_data]]] for taget[name[op_mat]] in starred[name[present_op_data]] begin[:] variable[product] assign[=] call[name[np].dot, parameter[name[op_mat], name[product]]] call[name[all_operations].extend, parameter[name[present_ops]]] call[name[touched_indices].append, parameter[name[index]]] variable[index] assign[=] call[name[circuit].next_moment_operating_on, parameter[name[qubits], binary_operation[name[index] + constant[1]]]] return[tuple[[<ast.Name object at 0x7da1b1c1a470>, <ast.Name object at 0x7da1b1c18d30>, <ast.Name object at 0x7da1b1c18d90>]]]
keyword[def] identifier[_scan_two_qubit_ops_into_matrix] ( identifier[self] , identifier[circuit] : identifier[circuits] . identifier[Circuit] , identifier[index] : identifier[Optional] [ identifier[int] ], identifier[qubits] : identifier[Tuple] [ identifier[ops] . identifier[Qid] ,...] )-> identifier[Tuple] [ identifier[List] [ identifier[ops] . identifier[Operation] ], identifier[List] [ identifier[int] ], identifier[np] . identifier[ndarray] ]: literal[string] identifier[product] = identifier[np] . identifier[eye] ( literal[int] , identifier[dtype] = identifier[np] . identifier[complex128] ) identifier[all_operations] =[] identifier[touched_indices] =[] keyword[while] identifier[index] keyword[is] keyword[not] keyword[None] : identifier[operations] = identifier[list] ({ identifier[circuit] . identifier[operation_at] ( identifier[q] , identifier[index] ) keyword[for] identifier[q] keyword[in] identifier[qubits] }) identifier[op_data] =[ identifier[self] . identifier[_op_to_matrix] ( identifier[op] , identifier[qubits] ) keyword[for] identifier[op] keyword[in] identifier[operations] keyword[if] identifier[op] keyword[is] keyword[not] keyword[None] ] keyword[if] identifier[any] ( identifier[e] keyword[is] keyword[None] keyword[for] identifier[e] keyword[in] identifier[op_data] ): keyword[break] identifier[present_ops] =[ identifier[op] keyword[for] identifier[op] keyword[in] identifier[operations] keyword[if] identifier[op] ] identifier[present_op_data] = identifier[cast] ( identifier[List] [ identifier[np] . identifier[ndarray] ], identifier[op_data] ) keyword[for] identifier[op_mat] keyword[in] identifier[present_op_data] : identifier[product] = identifier[np] . identifier[dot] ( identifier[op_mat] , identifier[product] ) identifier[all_operations] . identifier[extend] ( identifier[present_ops] ) identifier[touched_indices] . identifier[append] ( identifier[index] ) identifier[index] = identifier[circuit] . identifier[next_moment_operating_on] ( identifier[qubits] , identifier[index] + literal[int] ) keyword[return] identifier[all_operations] , identifier[touched_indices] , identifier[product]
def _scan_two_qubit_ops_into_matrix(self, circuit: circuits.Circuit, index: Optional[int], qubits: Tuple[ops.Qid, ...]) -> Tuple[List[ops.Operation], List[int], np.ndarray]: """Accumulates operations affecting the given pair of qubits. The scan terminates when it hits the end of the circuit, finds an operation without a known matrix, or finds an operation that interacts the given qubits with other qubits. Args: circuit: The circuit to scan for operations. index: The index to start scanning forward from. qubits: The pair of qubits we care about. Returns: A tuple containing: 0. The operations. 1. The moment indices those operations were on. 2. A matrix equivalent to the effect of the scanned operations. """ product = np.eye(4, dtype=np.complex128) all_operations = [] touched_indices = [] while index is not None: operations = list({circuit.operation_at(q, index) for q in qubits}) op_data = [self._op_to_matrix(op, qubits) for op in operations if op is not None] # Stop at any non-constant or non-local interaction. if any((e is None for e in op_data)): break # depends on [control=['if'], data=[]] present_ops = [op for op in operations if op] present_op_data = cast(List[np.ndarray], op_data) for op_mat in present_op_data: product = np.dot(op_mat, product) # depends on [control=['for'], data=['op_mat']] all_operations.extend(present_ops) touched_indices.append(index) index = circuit.next_moment_operating_on(qubits, index + 1) # depends on [control=['while'], data=['index']] return (all_operations, touched_indices, product)
def is_valid(self): """ Is the current entity valid. Returns ----------- valid : bool Is the current entity well formed """ valid = np.any((self.points - self.points[0]) != 0) return valid
def function[is_valid, parameter[self]]: constant[ Is the current entity valid. Returns ----------- valid : bool Is the current entity well formed ] variable[valid] assign[=] call[name[np].any, parameter[compare[binary_operation[name[self].points - call[name[self].points][constant[0]]] not_equal[!=] constant[0]]]] return[name[valid]]
keyword[def] identifier[is_valid] ( identifier[self] ): literal[string] identifier[valid] = identifier[np] . identifier[any] (( identifier[self] . identifier[points] - identifier[self] . identifier[points] [ literal[int] ])!= literal[int] ) keyword[return] identifier[valid]
def is_valid(self): """ Is the current entity valid. Returns ----------- valid : bool Is the current entity well formed """ valid = np.any(self.points - self.points[0] != 0) return valid
def add_template_events_to_network(self, columns, vectors): """ Add a vector indexed """ # Just call through to the standard function self.template_events = self.template_event_dict['network'] self.add_template_network_events(columns, vectors) self.template_event_dict['network'] = self.template_events self.template_events = None
def function[add_template_events_to_network, parameter[self, columns, vectors]]: constant[ Add a vector indexed ] name[self].template_events assign[=] call[name[self].template_event_dict][constant[network]] call[name[self].add_template_network_events, parameter[name[columns], name[vectors]]] call[name[self].template_event_dict][constant[network]] assign[=] name[self].template_events name[self].template_events assign[=] constant[None]
keyword[def] identifier[add_template_events_to_network] ( identifier[self] , identifier[columns] , identifier[vectors] ): literal[string] identifier[self] . identifier[template_events] = identifier[self] . identifier[template_event_dict] [ literal[string] ] identifier[self] . identifier[add_template_network_events] ( identifier[columns] , identifier[vectors] ) identifier[self] . identifier[template_event_dict] [ literal[string] ]= identifier[self] . identifier[template_events] identifier[self] . identifier[template_events] = keyword[None]
def add_template_events_to_network(self, columns, vectors): """ Add a vector indexed """ # Just call through to the standard function self.template_events = self.template_event_dict['network'] self.add_template_network_events(columns, vectors) self.template_event_dict['network'] = self.template_events self.template_events = None
def insertComponent(self, comp, row=0, col=0): """Inserts component into model :param comp: Component to insert into the stimulus :type comp: :class:`AbstractStimulusComponent<sparkle.stim.abstract_component.AbstractStimulusComponent>` :param row: Track number to place comp in :type row: int :param col: location in track to insert component to :type col: int """ if row > len(self._segments) -1: self.insertEmptyRow() self._segments[row].insert(col, comp) # in case of samplerate change, just always update self.updateCalibration()
def function[insertComponent, parameter[self, comp, row, col]]: constant[Inserts component into model :param comp: Component to insert into the stimulus :type comp: :class:`AbstractStimulusComponent<sparkle.stim.abstract_component.AbstractStimulusComponent>` :param row: Track number to place comp in :type row: int :param col: location in track to insert component to :type col: int ] if compare[name[row] greater[>] binary_operation[call[name[len], parameter[name[self]._segments]] - constant[1]]] begin[:] call[name[self].insertEmptyRow, parameter[]] call[call[name[self]._segments][name[row]].insert, parameter[name[col], name[comp]]] call[name[self].updateCalibration, parameter[]]
keyword[def] identifier[insertComponent] ( identifier[self] , identifier[comp] , identifier[row] = literal[int] , identifier[col] = literal[int] ): literal[string] keyword[if] identifier[row] > identifier[len] ( identifier[self] . identifier[_segments] )- literal[int] : identifier[self] . identifier[insertEmptyRow] () identifier[self] . identifier[_segments] [ identifier[row] ]. identifier[insert] ( identifier[col] , identifier[comp] ) identifier[self] . identifier[updateCalibration] ()
def insertComponent(self, comp, row=0, col=0): """Inserts component into model :param comp: Component to insert into the stimulus :type comp: :class:`AbstractStimulusComponent<sparkle.stim.abstract_component.AbstractStimulusComponent>` :param row: Track number to place comp in :type row: int :param col: location in track to insert component to :type col: int """ if row > len(self._segments) - 1: self.insertEmptyRow() # depends on [control=['if'], data=[]] self._segments[row].insert(col, comp) # in case of samplerate change, just always update self.updateCalibration()
def likelihood_weighted_sample(self, evidence=None, size=1, return_type="dataframe"): """ Generates weighted sample(s) from joint distribution of the bayesian network, that comply with the given evidence. 'Probabilistic Graphical Model Principles and Techniques', Koller and Friedman, Algorithm 12.2 pp 493. Parameters ---------- evidence: list of `pgmpy.factor.State` namedtuples None if no evidence size: int size of sample to be generated return_type: string (dataframe | recarray) Return type for samples, either of 'dataframe' or 'recarray'. Defaults to 'dataframe' Returns ------- sampled: A pandas.DataFrame or a numpy.recarray object depending upon return_type argument the generated samples with corresponding weights Examples -------- >>> from pgmpy.factors.discrete import State >>> from pgmpy.models.BayesianModel import BayesianModel >>> from pgmpy.factors.discrete import TabularCPD >>> from pgmpy.sampling import BayesianModelSampling >>> student = BayesianModel([('diff', 'grade'), ('intel', 'grade')]) >>> cpd_d = TabularCPD('diff', 2, [[0.6], [0.4]]) >>> cpd_i = TabularCPD('intel', 2, [[0.7], [0.3]]) >>> cpd_g = TabularCPD('grade', 3, [[0.3, 0.05, 0.9, 0.5], [0.4, 0.25, ... 0.08, 0.3], [0.3, 0.7, 0.02, 0.2]], ... ['intel', 'diff'], [2, 2]) >>> student.add_cpds(cpd_d, cpd_i, cpd_g) >>> inference = BayesianModelSampling(student) >>> evidence = [State('diff', 0)] >>> inference.likelihood_weighted_sample(evidence=evidence, size=2, return_type='recarray') rec.array([(0, 0, 1, 0.6), (0, 0, 2, 0.6)], dtype= [('diff', '<i8'), ('intel', '<i8'), ('grade', '<i8'), ('_weight', '<f8')]) """ types = [(var_name, 'int') for var_name in self.topological_order] types.append(('_weight', 'float')) sampled = np.zeros(size, dtype=types).view(np.recarray) sampled['_weight'] = np.ones(size) evidence_dict = {var: st for var, st in evidence} for node in self.topological_order: cpd = self.model.get_cpds(node) states = range(self.cardinality[node]) evidence = cpd.get_evidence() if evidence: evidence_values = np.vstack([sampled[i] for i in evidence]) cached_values = self.pre_compute_reduce(node) weights = list(map(lambda t: cached_values[tuple(t)], evidence_values.T)) if node in evidence_dict: sampled[node] = evidence_dict[node] for i in range(size): sampled['_weight'][i] *= weights[i][evidence_dict[node]] else: sampled[node] = sample_discrete(states, weights) else: if node in evidence_dict: sampled[node] = evidence_dict[node] for i in range(size): sampled['_weight'][i] *= cpd.values[evidence_dict[node]] else: sampled[node] = sample_discrete(states, cpd.values, size) return _return_samples(return_type, sampled)
def function[likelihood_weighted_sample, parameter[self, evidence, size, return_type]]: constant[ Generates weighted sample(s) from joint distribution of the bayesian network, that comply with the given evidence. 'Probabilistic Graphical Model Principles and Techniques', Koller and Friedman, Algorithm 12.2 pp 493. Parameters ---------- evidence: list of `pgmpy.factor.State` namedtuples None if no evidence size: int size of sample to be generated return_type: string (dataframe | recarray) Return type for samples, either of 'dataframe' or 'recarray'. Defaults to 'dataframe' Returns ------- sampled: A pandas.DataFrame or a numpy.recarray object depending upon return_type argument the generated samples with corresponding weights Examples -------- >>> from pgmpy.factors.discrete import State >>> from pgmpy.models.BayesianModel import BayesianModel >>> from pgmpy.factors.discrete import TabularCPD >>> from pgmpy.sampling import BayesianModelSampling >>> student = BayesianModel([('diff', 'grade'), ('intel', 'grade')]) >>> cpd_d = TabularCPD('diff', 2, [[0.6], [0.4]]) >>> cpd_i = TabularCPD('intel', 2, [[0.7], [0.3]]) >>> cpd_g = TabularCPD('grade', 3, [[0.3, 0.05, 0.9, 0.5], [0.4, 0.25, ... 0.08, 0.3], [0.3, 0.7, 0.02, 0.2]], ... ['intel', 'diff'], [2, 2]) >>> student.add_cpds(cpd_d, cpd_i, cpd_g) >>> inference = BayesianModelSampling(student) >>> evidence = [State('diff', 0)] >>> inference.likelihood_weighted_sample(evidence=evidence, size=2, return_type='recarray') rec.array([(0, 0, 1, 0.6), (0, 0, 2, 0.6)], dtype= [('diff', '<i8'), ('intel', '<i8'), ('grade', '<i8'), ('_weight', '<f8')]) ] variable[types] assign[=] <ast.ListComp object at 0x7da18eb55870> call[name[types].append, parameter[tuple[[<ast.Constant object at 0x7da18eb56d10>, <ast.Constant object at 0x7da18eb56410>]]]] variable[sampled] assign[=] call[call[name[np].zeros, parameter[name[size]]].view, parameter[name[np].recarray]] call[name[sampled]][constant[_weight]] assign[=] call[name[np].ones, parameter[name[size]]] variable[evidence_dict] assign[=] <ast.DictComp object at 0x7da18eb56ef0> for taget[name[node]] in starred[name[self].topological_order] begin[:] variable[cpd] assign[=] call[name[self].model.get_cpds, parameter[name[node]]] variable[states] assign[=] call[name[range], parameter[call[name[self].cardinality][name[node]]]] variable[evidence] assign[=] call[name[cpd].get_evidence, parameter[]] if name[evidence] begin[:] variable[evidence_values] assign[=] call[name[np].vstack, parameter[<ast.ListComp object at 0x7da18eb55210>]] variable[cached_values] assign[=] call[name[self].pre_compute_reduce, parameter[name[node]]] variable[weights] assign[=] call[name[list], parameter[call[name[map], parameter[<ast.Lambda object at 0x7da18eb56440>, name[evidence_values].T]]]] if compare[name[node] in name[evidence_dict]] begin[:] call[name[sampled]][name[node]] assign[=] call[name[evidence_dict]][name[node]] for taget[name[i]] in starred[call[name[range], parameter[name[size]]]] begin[:] <ast.AugAssign object at 0x7da18eb57430> return[call[name[_return_samples], parameter[name[return_type], name[sampled]]]]
keyword[def] identifier[likelihood_weighted_sample] ( identifier[self] , identifier[evidence] = keyword[None] , identifier[size] = literal[int] , identifier[return_type] = literal[string] ): literal[string] identifier[types] =[( identifier[var_name] , literal[string] ) keyword[for] identifier[var_name] keyword[in] identifier[self] . identifier[topological_order] ] identifier[types] . identifier[append] (( literal[string] , literal[string] )) identifier[sampled] = identifier[np] . identifier[zeros] ( identifier[size] , identifier[dtype] = identifier[types] ). identifier[view] ( identifier[np] . identifier[recarray] ) identifier[sampled] [ literal[string] ]= identifier[np] . identifier[ones] ( identifier[size] ) identifier[evidence_dict] ={ identifier[var] : identifier[st] keyword[for] identifier[var] , identifier[st] keyword[in] identifier[evidence] } keyword[for] identifier[node] keyword[in] identifier[self] . identifier[topological_order] : identifier[cpd] = identifier[self] . identifier[model] . identifier[get_cpds] ( identifier[node] ) identifier[states] = identifier[range] ( identifier[self] . identifier[cardinality] [ identifier[node] ]) identifier[evidence] = identifier[cpd] . identifier[get_evidence] () keyword[if] identifier[evidence] : identifier[evidence_values] = identifier[np] . identifier[vstack] ([ identifier[sampled] [ identifier[i] ] keyword[for] identifier[i] keyword[in] identifier[evidence] ]) identifier[cached_values] = identifier[self] . identifier[pre_compute_reduce] ( identifier[node] ) identifier[weights] = identifier[list] ( identifier[map] ( keyword[lambda] identifier[t] : identifier[cached_values] [ identifier[tuple] ( identifier[t] )], identifier[evidence_values] . identifier[T] )) keyword[if] identifier[node] keyword[in] identifier[evidence_dict] : identifier[sampled] [ identifier[node] ]= identifier[evidence_dict] [ identifier[node] ] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[size] ): identifier[sampled] [ literal[string] ][ identifier[i] ]*= identifier[weights] [ identifier[i] ][ identifier[evidence_dict] [ identifier[node] ]] keyword[else] : identifier[sampled] [ identifier[node] ]= identifier[sample_discrete] ( identifier[states] , identifier[weights] ) keyword[else] : keyword[if] identifier[node] keyword[in] identifier[evidence_dict] : identifier[sampled] [ identifier[node] ]= identifier[evidence_dict] [ identifier[node] ] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[size] ): identifier[sampled] [ literal[string] ][ identifier[i] ]*= identifier[cpd] . identifier[values] [ identifier[evidence_dict] [ identifier[node] ]] keyword[else] : identifier[sampled] [ identifier[node] ]= identifier[sample_discrete] ( identifier[states] , identifier[cpd] . identifier[values] , identifier[size] ) keyword[return] identifier[_return_samples] ( identifier[return_type] , identifier[sampled] )
def likelihood_weighted_sample(self, evidence=None, size=1, return_type='dataframe'): """ Generates weighted sample(s) from joint distribution of the bayesian network, that comply with the given evidence. 'Probabilistic Graphical Model Principles and Techniques', Koller and Friedman, Algorithm 12.2 pp 493. Parameters ---------- evidence: list of `pgmpy.factor.State` namedtuples None if no evidence size: int size of sample to be generated return_type: string (dataframe | recarray) Return type for samples, either of 'dataframe' or 'recarray'. Defaults to 'dataframe' Returns ------- sampled: A pandas.DataFrame or a numpy.recarray object depending upon return_type argument the generated samples with corresponding weights Examples -------- >>> from pgmpy.factors.discrete import State >>> from pgmpy.models.BayesianModel import BayesianModel >>> from pgmpy.factors.discrete import TabularCPD >>> from pgmpy.sampling import BayesianModelSampling >>> student = BayesianModel([('diff', 'grade'), ('intel', 'grade')]) >>> cpd_d = TabularCPD('diff', 2, [[0.6], [0.4]]) >>> cpd_i = TabularCPD('intel', 2, [[0.7], [0.3]]) >>> cpd_g = TabularCPD('grade', 3, [[0.3, 0.05, 0.9, 0.5], [0.4, 0.25, ... 0.08, 0.3], [0.3, 0.7, 0.02, 0.2]], ... ['intel', 'diff'], [2, 2]) >>> student.add_cpds(cpd_d, cpd_i, cpd_g) >>> inference = BayesianModelSampling(student) >>> evidence = [State('diff', 0)] >>> inference.likelihood_weighted_sample(evidence=evidence, size=2, return_type='recarray') rec.array([(0, 0, 1, 0.6), (0, 0, 2, 0.6)], dtype= [('diff', '<i8'), ('intel', '<i8'), ('grade', '<i8'), ('_weight', '<f8')]) """ types = [(var_name, 'int') for var_name in self.topological_order] types.append(('_weight', 'float')) sampled = np.zeros(size, dtype=types).view(np.recarray) sampled['_weight'] = np.ones(size) evidence_dict = {var: st for (var, st) in evidence} for node in self.topological_order: cpd = self.model.get_cpds(node) states = range(self.cardinality[node]) evidence = cpd.get_evidence() if evidence: evidence_values = np.vstack([sampled[i] for i in evidence]) cached_values = self.pre_compute_reduce(node) weights = list(map(lambda t: cached_values[tuple(t)], evidence_values.T)) if node in evidence_dict: sampled[node] = evidence_dict[node] for i in range(size): sampled['_weight'][i] *= weights[i][evidence_dict[node]] # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=['node', 'evidence_dict']] else: sampled[node] = sample_discrete(states, weights) # depends on [control=['if'], data=[]] elif node in evidence_dict: sampled[node] = evidence_dict[node] for i in range(size): sampled['_weight'][i] *= cpd.values[evidence_dict[node]] # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=['node', 'evidence_dict']] else: sampled[node] = sample_discrete(states, cpd.values, size) # depends on [control=['for'], data=['node']] return _return_samples(return_type, sampled)
def _notify_deleted_file(self): """ Notify user from external file deletion. """ self.file_deleted.emit(self.editor) # file deleted, disable file watcher self.enabled = False
def function[_notify_deleted_file, parameter[self]]: constant[ Notify user from external file deletion. ] call[name[self].file_deleted.emit, parameter[name[self].editor]] name[self].enabled assign[=] constant[False]
keyword[def] identifier[_notify_deleted_file] ( identifier[self] ): literal[string] identifier[self] . identifier[file_deleted] . identifier[emit] ( identifier[self] . identifier[editor] ) identifier[self] . identifier[enabled] = keyword[False]
def _notify_deleted_file(self): """ Notify user from external file deletion. """ self.file_deleted.emit(self.editor) # file deleted, disable file watcher self.enabled = False
def sync_skills_data(self): """ Update internal skill_data_structure from disk. """ self.skills_data = self.load_skills_data() if 'upgraded' in self.skills_data: self.skills_data.pop('upgraded') else: self.skills_data_hash = skills_data_hash(self.skills_data)
def function[sync_skills_data, parameter[self]]: constant[ Update internal skill_data_structure from disk. ] name[self].skills_data assign[=] call[name[self].load_skills_data, parameter[]] if compare[constant[upgraded] in name[self].skills_data] begin[:] call[name[self].skills_data.pop, parameter[constant[upgraded]]]
keyword[def] identifier[sync_skills_data] ( identifier[self] ): literal[string] identifier[self] . identifier[skills_data] = identifier[self] . identifier[load_skills_data] () keyword[if] literal[string] keyword[in] identifier[self] . identifier[skills_data] : identifier[self] . identifier[skills_data] . identifier[pop] ( literal[string] ) keyword[else] : identifier[self] . identifier[skills_data_hash] = identifier[skills_data_hash] ( identifier[self] . identifier[skills_data] )
def sync_skills_data(self): """ Update internal skill_data_structure from disk. """ self.skills_data = self.load_skills_data() if 'upgraded' in self.skills_data: self.skills_data.pop('upgraded') # depends on [control=['if'], data=[]] else: self.skills_data_hash = skills_data_hash(self.skills_data)
def makeDependencyMap(aMap): """ create a dependency data structure as follows: - Each key in aMap represents an item that depends on each item in the iterable which is that key's value - Each Node represents an item which is a precursor to its parents and depends on its children Returns a map whose keys are the items described in aMap and whose values are the dependency (sub)tree for that item Thus, for aMap = {a:(b,c), b:(d,), c:[]}, returns {a:Node(a),b:Node(b),c:Node(c),d:Node(d)} where - Node(a) has no parent and children: Node(b) and Node(c) - Node(b) has parent: Node(a) and child: Node(d) - Node(c) has parent: Node(a) and no child - Node(d) which was not a key in aMap was created. It has parent: Node(b) and no child This map is used to find the precursors for a given item by using BottomUpVisitor on the Node associated with that item """ index = {} for i in aMap.keys(): iNode = index.get(i,None) if not iNode: iNode = Node(i) index[i] = iNode for c in aMap[i]: cNode = index.get(c,None) if not cNode: cNode = Node(c) index[c] = cNode iNode.addChild(cNode) return index
def function[makeDependencyMap, parameter[aMap]]: constant[ create a dependency data structure as follows: - Each key in aMap represents an item that depends on each item in the iterable which is that key's value - Each Node represents an item which is a precursor to its parents and depends on its children Returns a map whose keys are the items described in aMap and whose values are the dependency (sub)tree for that item Thus, for aMap = {a:(b,c), b:(d,), c:[]}, returns {a:Node(a),b:Node(b),c:Node(c),d:Node(d)} where - Node(a) has no parent and children: Node(b) and Node(c) - Node(b) has parent: Node(a) and child: Node(d) - Node(c) has parent: Node(a) and no child - Node(d) which was not a key in aMap was created. It has parent: Node(b) and no child This map is used to find the precursors for a given item by using BottomUpVisitor on the Node associated with that item ] variable[index] assign[=] dictionary[[], []] for taget[name[i]] in starred[call[name[aMap].keys, parameter[]]] begin[:] variable[iNode] assign[=] call[name[index].get, parameter[name[i], constant[None]]] if <ast.UnaryOp object at 0x7da207f9a680> begin[:] variable[iNode] assign[=] call[name[Node], parameter[name[i]]] call[name[index]][name[i]] assign[=] name[iNode] for taget[name[c]] in starred[call[name[aMap]][name[i]]] begin[:] variable[cNode] assign[=] call[name[index].get, parameter[name[c], constant[None]]] if <ast.UnaryOp object at 0x7da207f9aa10> begin[:] variable[cNode] assign[=] call[name[Node], parameter[name[c]]] call[name[index]][name[c]] assign[=] name[cNode] call[name[iNode].addChild, parameter[name[cNode]]] return[name[index]]
keyword[def] identifier[makeDependencyMap] ( identifier[aMap] ): literal[string] identifier[index] ={} keyword[for] identifier[i] keyword[in] identifier[aMap] . identifier[keys] (): identifier[iNode] = identifier[index] . identifier[get] ( identifier[i] , keyword[None] ) keyword[if] keyword[not] identifier[iNode] : identifier[iNode] = identifier[Node] ( identifier[i] ) identifier[index] [ identifier[i] ]= identifier[iNode] keyword[for] identifier[c] keyword[in] identifier[aMap] [ identifier[i] ]: identifier[cNode] = identifier[index] . identifier[get] ( identifier[c] , keyword[None] ) keyword[if] keyword[not] identifier[cNode] : identifier[cNode] = identifier[Node] ( identifier[c] ) identifier[index] [ identifier[c] ]= identifier[cNode] identifier[iNode] . identifier[addChild] ( identifier[cNode] ) keyword[return] identifier[index]
def makeDependencyMap(aMap): """ create a dependency data structure as follows: - Each key in aMap represents an item that depends on each item in the iterable which is that key's value - Each Node represents an item which is a precursor to its parents and depends on its children Returns a map whose keys are the items described in aMap and whose values are the dependency (sub)tree for that item Thus, for aMap = {a:(b,c), b:(d,), c:[]}, returns {a:Node(a),b:Node(b),c:Node(c),d:Node(d)} where - Node(a) has no parent and children: Node(b) and Node(c) - Node(b) has parent: Node(a) and child: Node(d) - Node(c) has parent: Node(a) and no child - Node(d) which was not a key in aMap was created. It has parent: Node(b) and no child This map is used to find the precursors for a given item by using BottomUpVisitor on the Node associated with that item """ index = {} for i in aMap.keys(): iNode = index.get(i, None) if not iNode: iNode = Node(i) index[i] = iNode # depends on [control=['if'], data=[]] for c in aMap[i]: cNode = index.get(c, None) if not cNode: cNode = Node(c) index[c] = cNode # depends on [control=['if'], data=[]] iNode.addChild(cNode) # depends on [control=['for'], data=['c']] # depends on [control=['for'], data=['i']] return index
def get_house_detail(self, html): """Get bedroom, bathroom, sqft and more information. Example: http://www.zillow.com/homedetails/8510-Whittier-Blvd-Bethesda-MD-20817/37183103_zpid/ """ if "I'm not a robot" in html: raise exc.CaptchaError(url) data = {"errors": dict()} soup = self.get_soup(html) # header part, bedroom, bathroom, sqft header = soup.find("header", class_="zsg-content-header addr") if header is None: raise exc.ParseError(url) try: h3 = header.find("h3") if h3 is None: raise exc.ParseError span_list = h3.find_all("span", class_="addr_bbs") if len(span_list) != 3: raise exc.ParseError text = span_list[0].text try: bedroom = float_filter(text) data["bedroom"] = bedroom except: data["errors"][ "bedroom"] = "can't parse bedroom from %r" % text text = span_list[1].text try: bathroom = float_filter(text) data["bathroom"] = bathroom except: data["errors"][ "bathroom"] = "can't parse bathroom from %r" % text text = span_list[2].text try: sqft = int_filter(text) data["sqft"] = sqft except: data["errors"]["sqft"] = "can't parse sqft from %r" % text except: pass # Facts, Features, Construction, Other (FFCO) div_list = soup.find_all( "div", class_=re.compile("fact-group-container zsg-content-component")) for div in div_list: h3 = div.find("h3") if h3.text == "Facts": try: facts = list() for li in div.find_all("li"): facts.append(li.text.strip()) data["facts"] = facts except Exception as e: data["errors"]["facts"] = str(e) if h3.text == "Features": features = list() try: for li in div.find_all("li"): if '"targetDiv"' not in li.text: features.append(li.text.strip()) data["features"] = features except Exception as e: data["errors"]["features"] = repr(e) if h3.text == "Appliances Included": appliances = list() try: for li in div.find_all("li"): appliances.append(li.text.strip()) data["appliances"] = appliances except Exception as e: data["errors"]["appliances"] = repr(e) if h3.text == "Additional Features": additional_features = list() try: for li in div.find_all("li"): additional_features.append(li.text.strip()) data["additional_features"] = additional_features except Exception as e: data["errors"]["additional_features"] = repr(e) if h3.text == "Construction": construction = list() try: for li in div.find_all("li"): construction.append(li.text.strip()) data["construction"] = construction except Exception as e: data["errors"]["construction"] = repr(e) if h3.text == "Other": other = list() try: for li in div.find_all("li"): other.append(li.text.strip()) data["other"] = other except Exception as e: data["errors"]["other"] = repr(e) if len(data["errors"]) == 0: del data["errors"] if data: return data else: return None
def function[get_house_detail, parameter[self, html]]: constant[Get bedroom, bathroom, sqft and more information. Example: http://www.zillow.com/homedetails/8510-Whittier-Blvd-Bethesda-MD-20817/37183103_zpid/ ] if compare[constant[I'm not a robot] in name[html]] begin[:] <ast.Raise object at 0x7da1b28df820> variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b28df6a0>], [<ast.Call object at 0x7da1b28df670>]] variable[soup] assign[=] call[name[self].get_soup, parameter[name[html]]] variable[header] assign[=] call[name[soup].find, parameter[constant[header]]] if compare[name[header] is constant[None]] begin[:] <ast.Raise object at 0x7da1b28df2b0> <ast.Try object at 0x7da1b28df1c0> variable[div_list] assign[=] call[name[soup].find_all, parameter[constant[div]]] for taget[name[div]] in starred[name[div_list]] begin[:] variable[h3] assign[=] call[name[div].find, parameter[constant[h3]]] if compare[name[h3].text equal[==] constant[Facts]] begin[:] <ast.Try object at 0x7da1b28dd780> if compare[name[h3].text equal[==] constant[Features]] begin[:] variable[features] assign[=] call[name[list], parameter[]] <ast.Try object at 0x7da1b28dcf10> if compare[name[h3].text equal[==] constant[Appliances Included]] begin[:] variable[appliances] assign[=] call[name[list], parameter[]] <ast.Try object at 0x7da1b28dc670> if compare[name[h3].text equal[==] constant[Additional Features]] begin[:] variable[additional_features] assign[=] call[name[list], parameter[]] <ast.Try object at 0x7da1b28fbe80> if compare[name[h3].text equal[==] constant[Construction]] begin[:] variable[construction] assign[=] call[name[list], parameter[]] <ast.Try object at 0x7da1b28fb6d0> if compare[name[h3].text equal[==] constant[Other]] begin[:] variable[other] assign[=] call[name[list], parameter[]] <ast.Try object at 0x7da1b28faf20> if compare[call[name[len], parameter[call[name[data]][constant[errors]]]] equal[==] constant[0]] begin[:] <ast.Delete object at 0x7da1b28fa7a0> if name[data] begin[:] return[name[data]]
keyword[def] identifier[get_house_detail] ( identifier[self] , identifier[html] ): literal[string] keyword[if] literal[string] keyword[in] identifier[html] : keyword[raise] identifier[exc] . identifier[CaptchaError] ( identifier[url] ) identifier[data] ={ literal[string] : identifier[dict] ()} identifier[soup] = identifier[self] . identifier[get_soup] ( identifier[html] ) identifier[header] = identifier[soup] . identifier[find] ( literal[string] , identifier[class_] = literal[string] ) keyword[if] identifier[header] keyword[is] keyword[None] : keyword[raise] identifier[exc] . identifier[ParseError] ( identifier[url] ) keyword[try] : identifier[h3] = identifier[header] . identifier[find] ( literal[string] ) keyword[if] identifier[h3] keyword[is] keyword[None] : keyword[raise] identifier[exc] . identifier[ParseError] identifier[span_list] = identifier[h3] . identifier[find_all] ( literal[string] , identifier[class_] = literal[string] ) keyword[if] identifier[len] ( identifier[span_list] )!= literal[int] : keyword[raise] identifier[exc] . identifier[ParseError] identifier[text] = identifier[span_list] [ literal[int] ]. identifier[text] keyword[try] : identifier[bedroom] = identifier[float_filter] ( identifier[text] ) identifier[data] [ literal[string] ]= identifier[bedroom] keyword[except] : identifier[data] [ literal[string] ][ literal[string] ]= literal[string] % identifier[text] identifier[text] = identifier[span_list] [ literal[int] ]. identifier[text] keyword[try] : identifier[bathroom] = identifier[float_filter] ( identifier[text] ) identifier[data] [ literal[string] ]= identifier[bathroom] keyword[except] : identifier[data] [ literal[string] ][ literal[string] ]= literal[string] % identifier[text] identifier[text] = identifier[span_list] [ literal[int] ]. identifier[text] keyword[try] : identifier[sqft] = identifier[int_filter] ( identifier[text] ) identifier[data] [ literal[string] ]= identifier[sqft] keyword[except] : identifier[data] [ literal[string] ][ literal[string] ]= literal[string] % identifier[text] keyword[except] : keyword[pass] identifier[div_list] = identifier[soup] . identifier[find_all] ( literal[string] , identifier[class_] = identifier[re] . identifier[compile] ( literal[string] )) keyword[for] identifier[div] keyword[in] identifier[div_list] : identifier[h3] = identifier[div] . identifier[find] ( literal[string] ) keyword[if] identifier[h3] . identifier[text] == literal[string] : keyword[try] : identifier[facts] = identifier[list] () keyword[for] identifier[li] keyword[in] identifier[div] . identifier[find_all] ( literal[string] ): identifier[facts] . identifier[append] ( identifier[li] . identifier[text] . identifier[strip] ()) identifier[data] [ literal[string] ]= identifier[facts] keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[data] [ literal[string] ][ literal[string] ]= identifier[str] ( identifier[e] ) keyword[if] identifier[h3] . identifier[text] == literal[string] : identifier[features] = identifier[list] () keyword[try] : keyword[for] identifier[li] keyword[in] identifier[div] . identifier[find_all] ( literal[string] ): keyword[if] literal[string] keyword[not] keyword[in] identifier[li] . identifier[text] : identifier[features] . identifier[append] ( identifier[li] . identifier[text] . identifier[strip] ()) identifier[data] [ literal[string] ]= identifier[features] keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[data] [ literal[string] ][ literal[string] ]= identifier[repr] ( identifier[e] ) keyword[if] identifier[h3] . identifier[text] == literal[string] : identifier[appliances] = identifier[list] () keyword[try] : keyword[for] identifier[li] keyword[in] identifier[div] . identifier[find_all] ( literal[string] ): identifier[appliances] . identifier[append] ( identifier[li] . identifier[text] . identifier[strip] ()) identifier[data] [ literal[string] ]= identifier[appliances] keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[data] [ literal[string] ][ literal[string] ]= identifier[repr] ( identifier[e] ) keyword[if] identifier[h3] . identifier[text] == literal[string] : identifier[additional_features] = identifier[list] () keyword[try] : keyword[for] identifier[li] keyword[in] identifier[div] . identifier[find_all] ( literal[string] ): identifier[additional_features] . identifier[append] ( identifier[li] . identifier[text] . identifier[strip] ()) identifier[data] [ literal[string] ]= identifier[additional_features] keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[data] [ literal[string] ][ literal[string] ]= identifier[repr] ( identifier[e] ) keyword[if] identifier[h3] . identifier[text] == literal[string] : identifier[construction] = identifier[list] () keyword[try] : keyword[for] identifier[li] keyword[in] identifier[div] . identifier[find_all] ( literal[string] ): identifier[construction] . identifier[append] ( identifier[li] . identifier[text] . identifier[strip] ()) identifier[data] [ literal[string] ]= identifier[construction] keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[data] [ literal[string] ][ literal[string] ]= identifier[repr] ( identifier[e] ) keyword[if] identifier[h3] . identifier[text] == literal[string] : identifier[other] = identifier[list] () keyword[try] : keyword[for] identifier[li] keyword[in] identifier[div] . identifier[find_all] ( literal[string] ): identifier[other] . identifier[append] ( identifier[li] . identifier[text] . identifier[strip] ()) identifier[data] [ literal[string] ]= identifier[other] keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[data] [ literal[string] ][ literal[string] ]= identifier[repr] ( identifier[e] ) keyword[if] identifier[len] ( identifier[data] [ literal[string] ])== literal[int] : keyword[del] identifier[data] [ literal[string] ] keyword[if] identifier[data] : keyword[return] identifier[data] keyword[else] : keyword[return] keyword[None]
def get_house_detail(self, html): """Get bedroom, bathroom, sqft and more information. Example: http://www.zillow.com/homedetails/8510-Whittier-Blvd-Bethesda-MD-20817/37183103_zpid/ """ if "I'm not a robot" in html: raise exc.CaptchaError(url) # depends on [control=['if'], data=[]] data = {'errors': dict()} soup = self.get_soup(html) # header part, bedroom, bathroom, sqft header = soup.find('header', class_='zsg-content-header addr') if header is None: raise exc.ParseError(url) # depends on [control=['if'], data=[]] try: h3 = header.find('h3') if h3 is None: raise exc.ParseError # depends on [control=['if'], data=[]] span_list = h3.find_all('span', class_='addr_bbs') if len(span_list) != 3: raise exc.ParseError # depends on [control=['if'], data=[]] text = span_list[0].text try: bedroom = float_filter(text) data['bedroom'] = bedroom # depends on [control=['try'], data=[]] except: data['errors']['bedroom'] = "can't parse bedroom from %r" % text # depends on [control=['except'], data=[]] text = span_list[1].text try: bathroom = float_filter(text) data['bathroom'] = bathroom # depends on [control=['try'], data=[]] except: data['errors']['bathroom'] = "can't parse bathroom from %r" % text # depends on [control=['except'], data=[]] text = span_list[2].text try: sqft = int_filter(text) data['sqft'] = sqft # depends on [control=['try'], data=[]] except: data['errors']['sqft'] = "can't parse sqft from %r" % text # depends on [control=['except'], data=[]] # depends on [control=['try'], data=[]] except: pass # depends on [control=['except'], data=[]] # Facts, Features, Construction, Other (FFCO) div_list = soup.find_all('div', class_=re.compile('fact-group-container zsg-content-component')) for div in div_list: h3 = div.find('h3') if h3.text == 'Facts': try: facts = list() for li in div.find_all('li'): facts.append(li.text.strip()) # depends on [control=['for'], data=['li']] data['facts'] = facts # depends on [control=['try'], data=[]] except Exception as e: data['errors']['facts'] = str(e) # depends on [control=['except'], data=['e']] # depends on [control=['if'], data=[]] if h3.text == 'Features': features = list() try: for li in div.find_all('li'): if '"targetDiv"' not in li.text: features.append(li.text.strip()) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['li']] data['features'] = features # depends on [control=['try'], data=[]] except Exception as e: data['errors']['features'] = repr(e) # depends on [control=['except'], data=['e']] # depends on [control=['if'], data=[]] if h3.text == 'Appliances Included': appliances = list() try: for li in div.find_all('li'): appliances.append(li.text.strip()) # depends on [control=['for'], data=['li']] data['appliances'] = appliances # depends on [control=['try'], data=[]] except Exception as e: data['errors']['appliances'] = repr(e) # depends on [control=['except'], data=['e']] # depends on [control=['if'], data=[]] if h3.text == 'Additional Features': additional_features = list() try: for li in div.find_all('li'): additional_features.append(li.text.strip()) # depends on [control=['for'], data=['li']] data['additional_features'] = additional_features # depends on [control=['try'], data=[]] except Exception as e: data['errors']['additional_features'] = repr(e) # depends on [control=['except'], data=['e']] # depends on [control=['if'], data=[]] if h3.text == 'Construction': construction = list() try: for li in div.find_all('li'): construction.append(li.text.strip()) # depends on [control=['for'], data=['li']] data['construction'] = construction # depends on [control=['try'], data=[]] except Exception as e: data['errors']['construction'] = repr(e) # depends on [control=['except'], data=['e']] # depends on [control=['if'], data=[]] if h3.text == 'Other': other = list() try: for li in div.find_all('li'): other.append(li.text.strip()) # depends on [control=['for'], data=['li']] data['other'] = other # depends on [control=['try'], data=[]] except Exception as e: data['errors']['other'] = repr(e) # depends on [control=['except'], data=['e']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['div']] if len(data['errors']) == 0: del data['errors'] # depends on [control=['if'], data=[]] if data: return data # depends on [control=['if'], data=[]] else: return None
def rnoncentral_t(mu, lam, nu, size=None): """ Non-central Student's t random variates. """ tau = rgamma(nu / 2., nu / (2. * lam), size) return rnormal(mu, tau)
def function[rnoncentral_t, parameter[mu, lam, nu, size]]: constant[ Non-central Student's t random variates. ] variable[tau] assign[=] call[name[rgamma], parameter[binary_operation[name[nu] / constant[2.0]], binary_operation[name[nu] / binary_operation[constant[2.0] * name[lam]]], name[size]]] return[call[name[rnormal], parameter[name[mu], name[tau]]]]
keyword[def] identifier[rnoncentral_t] ( identifier[mu] , identifier[lam] , identifier[nu] , identifier[size] = keyword[None] ): literal[string] identifier[tau] = identifier[rgamma] ( identifier[nu] / literal[int] , identifier[nu] /( literal[int] * identifier[lam] ), identifier[size] ) keyword[return] identifier[rnormal] ( identifier[mu] , identifier[tau] )
def rnoncentral_t(mu, lam, nu, size=None): """ Non-central Student's t random variates. """ tau = rgamma(nu / 2.0, nu / (2.0 * lam), size) return rnormal(mu, tau)
def density(self, r, rho0, Ra, Rs): """ computes the density :param x: :param y: :param rho0: :param Ra: :param Rs: :return: """ Ra, Rs = self._sort_ra_rs(Ra, Rs) rho = rho0 / ((1 + (r / Ra) ** 2) * (1 + (r / Rs) ** 2)) return rho
def function[density, parameter[self, r, rho0, Ra, Rs]]: constant[ computes the density :param x: :param y: :param rho0: :param Ra: :param Rs: :return: ] <ast.Tuple object at 0x7da18dc9b3a0> assign[=] call[name[self]._sort_ra_rs, parameter[name[Ra], name[Rs]]] variable[rho] assign[=] binary_operation[name[rho0] / binary_operation[binary_operation[constant[1] + binary_operation[binary_operation[name[r] / name[Ra]] ** constant[2]]] * binary_operation[constant[1] + binary_operation[binary_operation[name[r] / name[Rs]] ** constant[2]]]]] return[name[rho]]
keyword[def] identifier[density] ( identifier[self] , identifier[r] , identifier[rho0] , identifier[Ra] , identifier[Rs] ): literal[string] identifier[Ra] , identifier[Rs] = identifier[self] . identifier[_sort_ra_rs] ( identifier[Ra] , identifier[Rs] ) identifier[rho] = identifier[rho0] /(( literal[int] +( identifier[r] / identifier[Ra] )** literal[int] )*( literal[int] +( identifier[r] / identifier[Rs] )** literal[int] )) keyword[return] identifier[rho]
def density(self, r, rho0, Ra, Rs): """ computes the density :param x: :param y: :param rho0: :param Ra: :param Rs: :return: """ (Ra, Rs) = self._sort_ra_rs(Ra, Rs) rho = rho0 / ((1 + (r / Ra) ** 2) * (1 + (r / Rs) ** 2)) return rho
def schedule_hangup(self, call_params): """REST Schedule Hangup Helper """ path = '/' + self.api_version + '/ScheduleHangup/' method = 'POST' return self.request(path, method, call_params)
def function[schedule_hangup, parameter[self, call_params]]: constant[REST Schedule Hangup Helper ] variable[path] assign[=] binary_operation[binary_operation[constant[/] + name[self].api_version] + constant[/ScheduleHangup/]] variable[method] assign[=] constant[POST] return[call[name[self].request, parameter[name[path], name[method], name[call_params]]]]
keyword[def] identifier[schedule_hangup] ( identifier[self] , identifier[call_params] ): literal[string] identifier[path] = literal[string] + identifier[self] . identifier[api_version] + literal[string] identifier[method] = literal[string] keyword[return] identifier[self] . identifier[request] ( identifier[path] , identifier[method] , identifier[call_params] )
def schedule_hangup(self, call_params): """REST Schedule Hangup Helper """ path = '/' + self.api_version + '/ScheduleHangup/' method = 'POST' return self.request(path, method, call_params)
def type_assert_iter( iterable, cls, cast_from=None, cast_to=None, dynamic=None, objcls=None, choices=None, ctor=None, allow_none=False, ): """ Checks that every object in @iterable is an instance of @cls Will also unmarshal JSON objects to Python objects if items in @iterable are an instance of dict Args: iterable: Any iterable to check. Note that it would not make sense to pass a generator to this function cls: type, The class type to assert each member of @iterable is cast_from: type-or-tuple-of-types, If @obj is an instance of this type(s), cast it to @cast_to cast_to: type, The type to cast @obj to if it's an instance of @cast_from, or None to cast to @cls. If you need more than type(x), use a lambda or factory function. dynamic: @cls, A dynamic default value if @iterable is None, and @dynamic is not None. objcls: None-or-type, a type to assert @iterable is, ie: list, set, etc... choices: iterable-or-None, If not None, each object in @iterable must be in @choices ctor: None-or-static-method: Use this method as the constructor instead of __init__ allow_none: bool, True to allow @iterable to be None, otherwise False Returns: @iterable, note that @iterable will be recreated, which may be a performance concern if @iterable has many items Raises: TypeError: if @obj is not an instance of @cls """ if ( allow_none and iterable is None ): return iterable _check_dstruct(iterable, objcls) if choices is not None: for obj in iterable: _check_choices(obj, choices) if ( iterable is None and dynamic is not None ): iterable = dynamic t = type(iterable) return t( _check( obj, cls, False, cast_from, cast_to, ctor=ctor, ) for obj in iterable )
def function[type_assert_iter, parameter[iterable, cls, cast_from, cast_to, dynamic, objcls, choices, ctor, allow_none]]: constant[ Checks that every object in @iterable is an instance of @cls Will also unmarshal JSON objects to Python objects if items in @iterable are an instance of dict Args: iterable: Any iterable to check. Note that it would not make sense to pass a generator to this function cls: type, The class type to assert each member of @iterable is cast_from: type-or-tuple-of-types, If @obj is an instance of this type(s), cast it to @cast_to cast_to: type, The type to cast @obj to if it's an instance of @cast_from, or None to cast to @cls. If you need more than type(x), use a lambda or factory function. dynamic: @cls, A dynamic default value if @iterable is None, and @dynamic is not None. objcls: None-or-type, a type to assert @iterable is, ie: list, set, etc... choices: iterable-or-None, If not None, each object in @iterable must be in @choices ctor: None-or-static-method: Use this method as the constructor instead of __init__ allow_none: bool, True to allow @iterable to be None, otherwise False Returns: @iterable, note that @iterable will be recreated, which may be a performance concern if @iterable has many items Raises: TypeError: if @obj is not an instance of @cls ] if <ast.BoolOp object at 0x7da204621540> begin[:] return[name[iterable]] call[name[_check_dstruct], parameter[name[iterable], name[objcls]]] if compare[name[choices] is_not constant[None]] begin[:] for taget[name[obj]] in starred[name[iterable]] begin[:] call[name[_check_choices], parameter[name[obj], name[choices]]] if <ast.BoolOp object at 0x7da2046233d0> begin[:] variable[iterable] assign[=] name[dynamic] variable[t] assign[=] call[name[type], parameter[name[iterable]]] return[call[name[t], parameter[<ast.GeneratorExp object at 0x7da204620dc0>]]]
keyword[def] identifier[type_assert_iter] ( identifier[iterable] , identifier[cls] , identifier[cast_from] = keyword[None] , identifier[cast_to] = keyword[None] , identifier[dynamic] = keyword[None] , identifier[objcls] = keyword[None] , identifier[choices] = keyword[None] , identifier[ctor] = keyword[None] , identifier[allow_none] = keyword[False] , ): literal[string] keyword[if] ( identifier[allow_none] keyword[and] identifier[iterable] keyword[is] keyword[None] ): keyword[return] identifier[iterable] identifier[_check_dstruct] ( identifier[iterable] , identifier[objcls] ) keyword[if] identifier[choices] keyword[is] keyword[not] keyword[None] : keyword[for] identifier[obj] keyword[in] identifier[iterable] : identifier[_check_choices] ( identifier[obj] , identifier[choices] ) keyword[if] ( identifier[iterable] keyword[is] keyword[None] keyword[and] identifier[dynamic] keyword[is] keyword[not] keyword[None] ): identifier[iterable] = identifier[dynamic] identifier[t] = identifier[type] ( identifier[iterable] ) keyword[return] identifier[t] ( identifier[_check] ( identifier[obj] , identifier[cls] , keyword[False] , identifier[cast_from] , identifier[cast_to] , identifier[ctor] = identifier[ctor] , ) keyword[for] identifier[obj] keyword[in] identifier[iterable] )
def type_assert_iter(iterable, cls, cast_from=None, cast_to=None, dynamic=None, objcls=None, choices=None, ctor=None, allow_none=False): """ Checks that every object in @iterable is an instance of @cls Will also unmarshal JSON objects to Python objects if items in @iterable are an instance of dict Args: iterable: Any iterable to check. Note that it would not make sense to pass a generator to this function cls: type, The class type to assert each member of @iterable is cast_from: type-or-tuple-of-types, If @obj is an instance of this type(s), cast it to @cast_to cast_to: type, The type to cast @obj to if it's an instance of @cast_from, or None to cast to @cls. If you need more than type(x), use a lambda or factory function. dynamic: @cls, A dynamic default value if @iterable is None, and @dynamic is not None. objcls: None-or-type, a type to assert @iterable is, ie: list, set, etc... choices: iterable-or-None, If not None, each object in @iterable must be in @choices ctor: None-or-static-method: Use this method as the constructor instead of __init__ allow_none: bool, True to allow @iterable to be None, otherwise False Returns: @iterable, note that @iterable will be recreated, which may be a performance concern if @iterable has many items Raises: TypeError: if @obj is not an instance of @cls """ if allow_none and iterable is None: return iterable # depends on [control=['if'], data=[]] _check_dstruct(iterable, objcls) if choices is not None: for obj in iterable: _check_choices(obj, choices) # depends on [control=['for'], data=['obj']] # depends on [control=['if'], data=['choices']] if iterable is None and dynamic is not None: iterable = dynamic # depends on [control=['if'], data=[]] t = type(iterable) return t((_check(obj, cls, False, cast_from, cast_to, ctor=ctor) for obj in iterable))
def solr_advanced_search(self, query, token=None, limit=20): """ Search item metadata using Apache Solr. :param query: The Apache Lucene search query. :type query: string :param token: (optional) A valid token for the user in question. :type token: None | string :param limit: (optional) The limit of the search. :type limit: int | long :returns: The list of items that match the search query. :rtype: list[dict] """ parameters = dict() parameters['query'] = query parameters['limit'] = limit if token: parameters['token'] = token response = self.request('midas.solr.search.advanced', parameters) return response
def function[solr_advanced_search, parameter[self, query, token, limit]]: constant[ Search item metadata using Apache Solr. :param query: The Apache Lucene search query. :type query: string :param token: (optional) A valid token for the user in question. :type token: None | string :param limit: (optional) The limit of the search. :type limit: int | long :returns: The list of items that match the search query. :rtype: list[dict] ] variable[parameters] assign[=] call[name[dict], parameter[]] call[name[parameters]][constant[query]] assign[=] name[query] call[name[parameters]][constant[limit]] assign[=] name[limit] if name[token] begin[:] call[name[parameters]][constant[token]] assign[=] name[token] variable[response] assign[=] call[name[self].request, parameter[constant[midas.solr.search.advanced], name[parameters]]] return[name[response]]
keyword[def] identifier[solr_advanced_search] ( identifier[self] , identifier[query] , identifier[token] = keyword[None] , identifier[limit] = literal[int] ): literal[string] identifier[parameters] = identifier[dict] () identifier[parameters] [ literal[string] ]= identifier[query] identifier[parameters] [ literal[string] ]= identifier[limit] keyword[if] identifier[token] : identifier[parameters] [ literal[string] ]= identifier[token] identifier[response] = identifier[self] . identifier[request] ( literal[string] , identifier[parameters] ) keyword[return] identifier[response]
def solr_advanced_search(self, query, token=None, limit=20): """ Search item metadata using Apache Solr. :param query: The Apache Lucene search query. :type query: string :param token: (optional) A valid token for the user in question. :type token: None | string :param limit: (optional) The limit of the search. :type limit: int | long :returns: The list of items that match the search query. :rtype: list[dict] """ parameters = dict() parameters['query'] = query parameters['limit'] = limit if token: parameters['token'] = token # depends on [control=['if'], data=[]] response = self.request('midas.solr.search.advanced', parameters) return response
def tonos_oxia_converter(text, reverse=False): """For the Ancient Greek language. Converts characters accented with the tonos (meant for Modern Greek) into the oxia equivalent. Without this normalization, string comparisons will fail.""" for char_tonos, char_oxia in TONOS_OXIA.items(): if not reverse: text = text.replace(char_tonos, char_oxia) else: text = text.replace(char_oxia, char_tonos) return text
def function[tonos_oxia_converter, parameter[text, reverse]]: constant[For the Ancient Greek language. Converts characters accented with the tonos (meant for Modern Greek) into the oxia equivalent. Without this normalization, string comparisons will fail.] for taget[tuple[[<ast.Name object at 0x7da2046216c0>, <ast.Name object at 0x7da204623400>]]] in starred[call[name[TONOS_OXIA].items, parameter[]]] begin[:] if <ast.UnaryOp object at 0x7da2046222c0> begin[:] variable[text] assign[=] call[name[text].replace, parameter[name[char_tonos], name[char_oxia]]] return[name[text]]
keyword[def] identifier[tonos_oxia_converter] ( identifier[text] , identifier[reverse] = keyword[False] ): literal[string] keyword[for] identifier[char_tonos] , identifier[char_oxia] keyword[in] identifier[TONOS_OXIA] . identifier[items] (): keyword[if] keyword[not] identifier[reverse] : identifier[text] = identifier[text] . identifier[replace] ( identifier[char_tonos] , identifier[char_oxia] ) keyword[else] : identifier[text] = identifier[text] . identifier[replace] ( identifier[char_oxia] , identifier[char_tonos] ) keyword[return] identifier[text]
def tonos_oxia_converter(text, reverse=False): """For the Ancient Greek language. Converts characters accented with the tonos (meant for Modern Greek) into the oxia equivalent. Without this normalization, string comparisons will fail.""" for (char_tonos, char_oxia) in TONOS_OXIA.items(): if not reverse: text = text.replace(char_tonos, char_oxia) # depends on [control=['if'], data=[]] else: text = text.replace(char_oxia, char_tonos) # depends on [control=['for'], data=[]] return text
def _ior(self, other): """Set multiplicity of each element to the maximum of the two collections. if isinstance(other, _basebag): This runs in O(other.num_unique_elements()) else: This runs in O(len(other)) """ # TODO do we have to create a bag from the other first? if not isinstance(other, _basebag): other = self._from_iterable(other) for elem, other_count in other.counts(): old_count = self.count(elem) new_count = max(other_count, old_count) self._set_count(elem, new_count) return self
def function[_ior, parameter[self, other]]: constant[Set multiplicity of each element to the maximum of the two collections. if isinstance(other, _basebag): This runs in O(other.num_unique_elements()) else: This runs in O(len(other)) ] if <ast.UnaryOp object at 0x7da1b260f910> begin[:] variable[other] assign[=] call[name[self]._from_iterable, parameter[name[other]]] for taget[tuple[[<ast.Name object at 0x7da1b265c910>, <ast.Name object at 0x7da1b265da80>]]] in starred[call[name[other].counts, parameter[]]] begin[:] variable[old_count] assign[=] call[name[self].count, parameter[name[elem]]] variable[new_count] assign[=] call[name[max], parameter[name[other_count], name[old_count]]] call[name[self]._set_count, parameter[name[elem], name[new_count]]] return[name[self]]
keyword[def] identifier[_ior] ( identifier[self] , identifier[other] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[other] , identifier[_basebag] ): identifier[other] = identifier[self] . identifier[_from_iterable] ( identifier[other] ) keyword[for] identifier[elem] , identifier[other_count] keyword[in] identifier[other] . identifier[counts] (): identifier[old_count] = identifier[self] . identifier[count] ( identifier[elem] ) identifier[new_count] = identifier[max] ( identifier[other_count] , identifier[old_count] ) identifier[self] . identifier[_set_count] ( identifier[elem] , identifier[new_count] ) keyword[return] identifier[self]
def _ior(self, other): """Set multiplicity of each element to the maximum of the two collections. if isinstance(other, _basebag): This runs in O(other.num_unique_elements()) else: This runs in O(len(other)) """ # TODO do we have to create a bag from the other first? if not isinstance(other, _basebag): other = self._from_iterable(other) # depends on [control=['if'], data=[]] for (elem, other_count) in other.counts(): old_count = self.count(elem) new_count = max(other_count, old_count) self._set_count(elem, new_count) # depends on [control=['for'], data=[]] return self
def make_constants(builtin_only=False, stoplist=[], verbose=False): """ Return a decorator for optimizing global references. Verify that the first argument is a function. """ if type(builtin_only) == type(make_constants): raise ValueError("The make_constants decorator must have arguments.") return lambda f: _make_constants(f, builtin_only, stoplist, verbose)
def function[make_constants, parameter[builtin_only, stoplist, verbose]]: constant[ Return a decorator for optimizing global references. Verify that the first argument is a function. ] if compare[call[name[type], parameter[name[builtin_only]]] equal[==] call[name[type], parameter[name[make_constants]]]] begin[:] <ast.Raise object at 0x7da2041db1c0> return[<ast.Lambda object at 0x7da2041d9930>]
keyword[def] identifier[make_constants] ( identifier[builtin_only] = keyword[False] , identifier[stoplist] =[], identifier[verbose] = keyword[False] ): literal[string] keyword[if] identifier[type] ( identifier[builtin_only] )== identifier[type] ( identifier[make_constants] ): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[return] keyword[lambda] identifier[f] : identifier[_make_constants] ( identifier[f] , identifier[builtin_only] , identifier[stoplist] , identifier[verbose] )
def make_constants(builtin_only=False, stoplist=[], verbose=False): """ Return a decorator for optimizing global references. Verify that the first argument is a function. """ if type(builtin_only) == type(make_constants): raise ValueError('The make_constants decorator must have arguments.') # depends on [control=['if'], data=[]] return lambda f: _make_constants(f, builtin_only, stoplist, verbose)
def get_variant_id(variant): """Get a variant id on the format chrom_pos_ref_alt""" variant_id = '_'.join([ str(variant.CHROM), str(variant.POS), str(variant.REF), str(variant.ALT[0]) ] ) return variant_id
def function[get_variant_id, parameter[variant]]: constant[Get a variant id on the format chrom_pos_ref_alt] variable[variant_id] assign[=] call[constant[_].join, parameter[list[[<ast.Call object at 0x7da20c7c8e50>, <ast.Call object at 0x7da18fe937c0>, <ast.Call object at 0x7da18fe91d80>, <ast.Call object at 0x7da18fe91c30>]]]] return[name[variant_id]]
keyword[def] identifier[get_variant_id] ( identifier[variant] ): literal[string] identifier[variant_id] = literal[string] . identifier[join] ([ identifier[str] ( identifier[variant] . identifier[CHROM] ), identifier[str] ( identifier[variant] . identifier[POS] ), identifier[str] ( identifier[variant] . identifier[REF] ), identifier[str] ( identifier[variant] . identifier[ALT] [ literal[int] ]) ] ) keyword[return] identifier[variant_id]
def get_variant_id(variant): """Get a variant id on the format chrom_pos_ref_alt""" variant_id = '_'.join([str(variant.CHROM), str(variant.POS), str(variant.REF), str(variant.ALT[0])]) return variant_id
def set_ip(self, ip): """Change the current IP.""" self.set(ip=ip, netmask=self._nm)
def function[set_ip, parameter[self, ip]]: constant[Change the current IP.] call[name[self].set, parameter[]]
keyword[def] identifier[set_ip] ( identifier[self] , identifier[ip] ): literal[string] identifier[self] . identifier[set] ( identifier[ip] = identifier[ip] , identifier[netmask] = identifier[self] . identifier[_nm] )
def set_ip(self, ip): """Change the current IP.""" self.set(ip=ip, netmask=self._nm)
def render(self, is_unicode=False, pretty_print=False): """Last thing to do before rendering""" for f in self.graph.xml_filters: self.root = f(self.root) args = {'encoding': 'utf-8'} svg = b'' if etree.lxml: args['pretty_print'] = pretty_print if not self.graph.disable_xml_declaration: svg = b"<?xml version='1.0' encoding='utf-8'?>\n" if not self.graph.disable_xml_declaration: svg += b'\n'.join([ etree.tostring(pi, **args) for pi in self.processing_instructions ]) svg += etree.tostring(self.root, **args) if self.graph.disable_xml_declaration or is_unicode: svg = svg.decode('utf-8') return svg
def function[render, parameter[self, is_unicode, pretty_print]]: constant[Last thing to do before rendering] for taget[name[f]] in starred[name[self].graph.xml_filters] begin[:] name[self].root assign[=] call[name[f], parameter[name[self].root]] variable[args] assign[=] dictionary[[<ast.Constant object at 0x7da20c7cabf0>], [<ast.Constant object at 0x7da20c7c9690>]] variable[svg] assign[=] constant[b''] if name[etree].lxml begin[:] call[name[args]][constant[pretty_print]] assign[=] name[pretty_print] if <ast.UnaryOp object at 0x7da18f810700> begin[:] variable[svg] assign[=] constant[b"<?xml version='1.0' encoding='utf-8'?>\n"] if <ast.UnaryOp object at 0x7da18f8111e0> begin[:] <ast.AugAssign object at 0x7da18f812980> <ast.AugAssign object at 0x7da18f811060> if <ast.BoolOp object at 0x7da18f8126b0> begin[:] variable[svg] assign[=] call[name[svg].decode, parameter[constant[utf-8]]] return[name[svg]]
keyword[def] identifier[render] ( identifier[self] , identifier[is_unicode] = keyword[False] , identifier[pretty_print] = keyword[False] ): literal[string] keyword[for] identifier[f] keyword[in] identifier[self] . identifier[graph] . identifier[xml_filters] : identifier[self] . identifier[root] = identifier[f] ( identifier[self] . identifier[root] ) identifier[args] ={ literal[string] : literal[string] } identifier[svg] = literal[string] keyword[if] identifier[etree] . identifier[lxml] : identifier[args] [ literal[string] ]= identifier[pretty_print] keyword[if] keyword[not] identifier[self] . identifier[graph] . identifier[disable_xml_declaration] : identifier[svg] = literal[string] keyword[if] keyword[not] identifier[self] . identifier[graph] . identifier[disable_xml_declaration] : identifier[svg] += literal[string] . identifier[join] ([ identifier[etree] . identifier[tostring] ( identifier[pi] ,** identifier[args] ) keyword[for] identifier[pi] keyword[in] identifier[self] . identifier[processing_instructions] ]) identifier[svg] += identifier[etree] . identifier[tostring] ( identifier[self] . identifier[root] ,** identifier[args] ) keyword[if] identifier[self] . identifier[graph] . identifier[disable_xml_declaration] keyword[or] identifier[is_unicode] : identifier[svg] = identifier[svg] . identifier[decode] ( literal[string] ) keyword[return] identifier[svg]
def render(self, is_unicode=False, pretty_print=False): """Last thing to do before rendering""" for f in self.graph.xml_filters: self.root = f(self.root) # depends on [control=['for'], data=['f']] args = {'encoding': 'utf-8'} svg = b'' if etree.lxml: args['pretty_print'] = pretty_print # depends on [control=['if'], data=[]] if not self.graph.disable_xml_declaration: svg = b"<?xml version='1.0' encoding='utf-8'?>\n" # depends on [control=['if'], data=[]] if not self.graph.disable_xml_declaration: svg += b'\n'.join([etree.tostring(pi, **args) for pi in self.processing_instructions]) # depends on [control=['if'], data=[]] svg += etree.tostring(self.root, **args) if self.graph.disable_xml_declaration or is_unicode: svg = svg.decode('utf-8') # depends on [control=['if'], data=[]] return svg
def webex_teams_webhook_events(): """Processes incoming requests to the '/events' URI.""" if request.method == 'GET': return ("""<!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <title>Webex Teams Bot served via Flask</title> </head> <body> <p> <strong>Your Flask web server is up and running!</strong> </p> <p> Here is a nice Cat Fact for you: </p> <blockquote>{}</blockquote> </body> </html> """.format(get_catfact())) elif request.method == 'POST': """Respond to inbound webhook JSON HTTP POST from Webex Teams.""" # Get the POST data sent from Webex Teams json_data = request.json print("\n") print("WEBHOOK POST RECEIVED:") print(json_data) print("\n") # Create a Webhook object from the JSON data webhook_obj = Webhook(json_data) # Get the room details room = api.rooms.get(webhook_obj.data.roomId) # Get the message details message = api.messages.get(webhook_obj.data.id) # Get the sender's details person = api.people.get(message.personId) print("NEW MESSAGE IN ROOM '{}'".format(room.title)) print("FROM '{}'".format(person.displayName)) print("MESSAGE '{}'\n".format(message.text)) # This is a VERY IMPORTANT loop prevention control step. # If you respond to all messages... You will respond to the messages # that the bot posts and thereby create a loop condition. me = api.people.me() if message.personId == me.id: # Message was sent by me (bot); do not respond. return 'OK' else: # Message was sent by someone else; parse message and respond. if "/CAT" in message.text: print("FOUND '/CAT'") # Get a cat fact cat_fact = get_catfact() print("SENDING CAT FACT '{}'".format(cat_fact)) # Post the fact to the room where the request was received api.messages.create(room.id, text=cat_fact) return 'OK'
def function[webex_teams_webhook_events, parameter[]]: constant[Processes incoming requests to the '/events' URI.] if compare[name[request].method equal[==] constant[GET]] begin[:] return[call[constant[<!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <title>Webex Teams Bot served via Flask</title> </head> <body> <p> <strong>Your Flask web server is up and running!</strong> </p> <p> Here is a nice Cat Fact for you: </p> <blockquote>{}</blockquote> </body> </html> ].format, parameter[call[name[get_catfact], parameter[]]]]]
keyword[def] identifier[webex_teams_webhook_events] (): literal[string] keyword[if] identifier[request] . identifier[method] == literal[string] : keyword[return] ( literal[string] . identifier[format] ( identifier[get_catfact] ())) keyword[elif] identifier[request] . identifier[method] == literal[string] : literal[string] identifier[json_data] = identifier[request] . identifier[json] identifier[print] ( literal[string] ) identifier[print] ( literal[string] ) identifier[print] ( identifier[json_data] ) identifier[print] ( literal[string] ) identifier[webhook_obj] = identifier[Webhook] ( identifier[json_data] ) identifier[room] = identifier[api] . identifier[rooms] . identifier[get] ( identifier[webhook_obj] . identifier[data] . identifier[roomId] ) identifier[message] = identifier[api] . identifier[messages] . identifier[get] ( identifier[webhook_obj] . identifier[data] . identifier[id] ) identifier[person] = identifier[api] . identifier[people] . identifier[get] ( identifier[message] . identifier[personId] ) identifier[print] ( literal[string] . identifier[format] ( identifier[room] . identifier[title] )) identifier[print] ( literal[string] . identifier[format] ( identifier[person] . identifier[displayName] )) identifier[print] ( literal[string] . identifier[format] ( identifier[message] . identifier[text] )) identifier[me] = identifier[api] . identifier[people] . identifier[me] () keyword[if] identifier[message] . identifier[personId] == identifier[me] . identifier[id] : keyword[return] literal[string] keyword[else] : keyword[if] literal[string] keyword[in] identifier[message] . identifier[text] : identifier[print] ( literal[string] ) identifier[cat_fact] = identifier[get_catfact] () identifier[print] ( literal[string] . identifier[format] ( identifier[cat_fact] )) identifier[api] . identifier[messages] . identifier[create] ( identifier[room] . identifier[id] , identifier[text] = identifier[cat_fact] ) keyword[return] literal[string]
def webex_teams_webhook_events(): """Processes incoming requests to the '/events' URI.""" if request.method == 'GET': return '<!DOCTYPE html>\n <html lang="en">\n <head>\n <meta charset="UTF-8">\n <title>Webex Teams Bot served via Flask</title>\n </head>\n <body>\n <p>\n <strong>Your Flask web server is up and running!</strong>\n </p>\n <p>\n Here is a nice Cat Fact for you:\n </p>\n <blockquote>{}</blockquote>\n </body>\n </html>\n '.format(get_catfact()) # depends on [control=['if'], data=[]] elif request.method == 'POST': 'Respond to inbound webhook JSON HTTP POST from Webex Teams.' # Get the POST data sent from Webex Teams json_data = request.json print('\n') print('WEBHOOK POST RECEIVED:') print(json_data) print('\n') # Create a Webhook object from the JSON data webhook_obj = Webhook(json_data) # Get the room details room = api.rooms.get(webhook_obj.data.roomId) # Get the message details message = api.messages.get(webhook_obj.data.id) # Get the sender's details person = api.people.get(message.personId) print("NEW MESSAGE IN ROOM '{}'".format(room.title)) print("FROM '{}'".format(person.displayName)) print("MESSAGE '{}'\n".format(message.text)) # This is a VERY IMPORTANT loop prevention control step. # If you respond to all messages... You will respond to the messages # that the bot posts and thereby create a loop condition. me = api.people.me() if message.personId == me.id: # Message was sent by me (bot); do not respond. return 'OK' # depends on [control=['if'], data=[]] else: # Message was sent by someone else; parse message and respond. if '/CAT' in message.text: print("FOUND '/CAT'") # Get a cat fact cat_fact = get_catfact() print("SENDING CAT FACT '{}'".format(cat_fact)) # Post the fact to the room where the request was received api.messages.create(room.id, text=cat_fact) # depends on [control=['if'], data=[]] return 'OK' # depends on [control=['if'], data=[]]
def get_mipmap_pixel( self, left: float, top: float, right: float, bottom: float ) -> Tuple[int, int, int]: """Get the average color of a rectangle in this Image. Parameters should stay within the following limits: * 0 <= left < right < Image.width * 0 <= top < bottom < Image.height Args: left (float): Left corner of the region. top (float): Top corner of the region. right (float): Right corner of the region. bottom (float): Bottom corner of the region. Returns: Tuple[int, int, int]: An (r, g, b) tuple containing the averaged color value. Values are in a 0 to 255 range. """ color = lib.TCOD_image_get_mipmap_pixel( self.image_c, left, top, right, bottom ) return (color.r, color.g, color.b)
def function[get_mipmap_pixel, parameter[self, left, top, right, bottom]]: constant[Get the average color of a rectangle in this Image. Parameters should stay within the following limits: * 0 <= left < right < Image.width * 0 <= top < bottom < Image.height Args: left (float): Left corner of the region. top (float): Top corner of the region. right (float): Right corner of the region. bottom (float): Bottom corner of the region. Returns: Tuple[int, int, int]: An (r, g, b) tuple containing the averaged color value. Values are in a 0 to 255 range. ] variable[color] assign[=] call[name[lib].TCOD_image_get_mipmap_pixel, parameter[name[self].image_c, name[left], name[top], name[right], name[bottom]]] return[tuple[[<ast.Attribute object at 0x7da1b2344d90>, <ast.Attribute object at 0x7da18bccac50>, <ast.Attribute object at 0x7da18bcc9120>]]]
keyword[def] identifier[get_mipmap_pixel] ( identifier[self] , identifier[left] : identifier[float] , identifier[top] : identifier[float] , identifier[right] : identifier[float] , identifier[bottom] : identifier[float] )-> identifier[Tuple] [ identifier[int] , identifier[int] , identifier[int] ]: literal[string] identifier[color] = identifier[lib] . identifier[TCOD_image_get_mipmap_pixel] ( identifier[self] . identifier[image_c] , identifier[left] , identifier[top] , identifier[right] , identifier[bottom] ) keyword[return] ( identifier[color] . identifier[r] , identifier[color] . identifier[g] , identifier[color] . identifier[b] )
def get_mipmap_pixel(self, left: float, top: float, right: float, bottom: float) -> Tuple[int, int, int]: """Get the average color of a rectangle in this Image. Parameters should stay within the following limits: * 0 <= left < right < Image.width * 0 <= top < bottom < Image.height Args: left (float): Left corner of the region. top (float): Top corner of the region. right (float): Right corner of the region. bottom (float): Bottom corner of the region. Returns: Tuple[int, int, int]: An (r, g, b) tuple containing the averaged color value. Values are in a 0 to 255 range. """ color = lib.TCOD_image_get_mipmap_pixel(self.image_c, left, top, right, bottom) return (color.r, color.g, color.b)
def get_seq_cluster(pdb_id_chain): """Get the sequence cluster of a PDB ID plus a pdb_id plus a chain, Parameters ---------- pdb_id_chain : string A string denoting a 4 character PDB ID plus a one character chain offset with a dot: XXXX.X, as in 2F5N.A Returns ------- out : dict A dictionary containing the sequence cluster associated with the PDB entry and chain Examples -------- >>> sclust = get_seq_cluster('2F5N.A') >>> print(sclust['pdbChain'][:10]) [{'@name': '4PD2.A', '@rank': '1'}, {'@name': '3U6P.A', '@rank': '2'}, {'@name': '4PCZ.A', '@rank': '3'}, {'@name': '3GPU.A', '@rank': '4'}, {'@name': '3JR5.A', '@rank': '5'}, {'@name': '3SAU.A', '@rank': '6'}, {'@name': '3GQ4.A', '@rank': '7'}, {'@name': '1R2Z.A', '@rank': '8'}, {'@name': '3U6E.A', '@rank': '9'}, {'@name': '2XZF.A', '@rank': '10'}] """ url_root = 'http://www.rcsb.org/pdb/rest/sequenceCluster?structureId=' out = get_info(pdb_id_chain, url_root = url_root) out = to_dict(out) return remove_at_sign(out['sequenceCluster'])
def function[get_seq_cluster, parameter[pdb_id_chain]]: constant[Get the sequence cluster of a PDB ID plus a pdb_id plus a chain, Parameters ---------- pdb_id_chain : string A string denoting a 4 character PDB ID plus a one character chain offset with a dot: XXXX.X, as in 2F5N.A Returns ------- out : dict A dictionary containing the sequence cluster associated with the PDB entry and chain Examples -------- >>> sclust = get_seq_cluster('2F5N.A') >>> print(sclust['pdbChain'][:10]) [{'@name': '4PD2.A', '@rank': '1'}, {'@name': '3U6P.A', '@rank': '2'}, {'@name': '4PCZ.A', '@rank': '3'}, {'@name': '3GPU.A', '@rank': '4'}, {'@name': '3JR5.A', '@rank': '5'}, {'@name': '3SAU.A', '@rank': '6'}, {'@name': '3GQ4.A', '@rank': '7'}, {'@name': '1R2Z.A', '@rank': '8'}, {'@name': '3U6E.A', '@rank': '9'}, {'@name': '2XZF.A', '@rank': '10'}] ] variable[url_root] assign[=] constant[http://www.rcsb.org/pdb/rest/sequenceCluster?structureId=] variable[out] assign[=] call[name[get_info], parameter[name[pdb_id_chain]]] variable[out] assign[=] call[name[to_dict], parameter[name[out]]] return[call[name[remove_at_sign], parameter[call[name[out]][constant[sequenceCluster]]]]]
keyword[def] identifier[get_seq_cluster] ( identifier[pdb_id_chain] ): literal[string] identifier[url_root] = literal[string] identifier[out] = identifier[get_info] ( identifier[pdb_id_chain] , identifier[url_root] = identifier[url_root] ) identifier[out] = identifier[to_dict] ( identifier[out] ) keyword[return] identifier[remove_at_sign] ( identifier[out] [ literal[string] ])
def get_seq_cluster(pdb_id_chain): """Get the sequence cluster of a PDB ID plus a pdb_id plus a chain, Parameters ---------- pdb_id_chain : string A string denoting a 4 character PDB ID plus a one character chain offset with a dot: XXXX.X, as in 2F5N.A Returns ------- out : dict A dictionary containing the sequence cluster associated with the PDB entry and chain Examples -------- >>> sclust = get_seq_cluster('2F5N.A') >>> print(sclust['pdbChain'][:10]) [{'@name': '4PD2.A', '@rank': '1'}, {'@name': '3U6P.A', '@rank': '2'}, {'@name': '4PCZ.A', '@rank': '3'}, {'@name': '3GPU.A', '@rank': '4'}, {'@name': '3JR5.A', '@rank': '5'}, {'@name': '3SAU.A', '@rank': '6'}, {'@name': '3GQ4.A', '@rank': '7'}, {'@name': '1R2Z.A', '@rank': '8'}, {'@name': '3U6E.A', '@rank': '9'}, {'@name': '2XZF.A', '@rank': '10'}] """ url_root = 'http://www.rcsb.org/pdb/rest/sequenceCluster?structureId=' out = get_info(pdb_id_chain, url_root=url_root) out = to_dict(out) return remove_at_sign(out['sequenceCluster'])
def _objectmethods(self, obj: str, *args) -> list: """ This method parses the SAS log for artifacts (tables and graphics) that were created from the procedure method call :param obj: str -- proc object :param args: list likely none :return: list -- the tables and graphs available for tab complete """ code = "%listdata(" code += obj code += ");" self.logger.debug("Object Method macro call: " + str(code)) res = self.sas.submit(code, "text") meth = res['LOG'].splitlines() for i in range(len(meth)): meth[i] = meth[i].lstrip().rstrip() self.logger.debug('SAS Log: ' + res['LOG']) objlist = meth[meth.index('startparse9878') + 1:meth.index('endparse9878')] self.logger.debug("PROC attr list: " + str(objlist)) return objlist
def function[_objectmethods, parameter[self, obj]]: constant[ This method parses the SAS log for artifacts (tables and graphics) that were created from the procedure method call :param obj: str -- proc object :param args: list likely none :return: list -- the tables and graphs available for tab complete ] variable[code] assign[=] constant[%listdata(] <ast.AugAssign object at 0x7da20c6a9f30> <ast.AugAssign object at 0x7da20c6aace0> call[name[self].logger.debug, parameter[binary_operation[constant[Object Method macro call: ] + call[name[str], parameter[name[code]]]]]] variable[res] assign[=] call[name[self].sas.submit, parameter[name[code], constant[text]]] variable[meth] assign[=] call[call[name[res]][constant[LOG]].splitlines, parameter[]] for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[meth]]]]]] begin[:] call[name[meth]][name[i]] assign[=] call[call[call[name[meth]][name[i]].lstrip, parameter[]].rstrip, parameter[]] call[name[self].logger.debug, parameter[binary_operation[constant[SAS Log: ] + call[name[res]][constant[LOG]]]]] variable[objlist] assign[=] call[name[meth]][<ast.Slice object at 0x7da20e9558d0>] call[name[self].logger.debug, parameter[binary_operation[constant[PROC attr list: ] + call[name[str], parameter[name[objlist]]]]]] return[name[objlist]]
keyword[def] identifier[_objectmethods] ( identifier[self] , identifier[obj] : identifier[str] ,* identifier[args] )-> identifier[list] : literal[string] identifier[code] = literal[string] identifier[code] += identifier[obj] identifier[code] += literal[string] identifier[self] . identifier[logger] . identifier[debug] ( literal[string] + identifier[str] ( identifier[code] )) identifier[res] = identifier[self] . identifier[sas] . identifier[submit] ( identifier[code] , literal[string] ) identifier[meth] = identifier[res] [ literal[string] ]. identifier[splitlines] () keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[meth] )): identifier[meth] [ identifier[i] ]= identifier[meth] [ identifier[i] ]. identifier[lstrip] (). identifier[rstrip] () identifier[self] . identifier[logger] . identifier[debug] ( literal[string] + identifier[res] [ literal[string] ]) identifier[objlist] = identifier[meth] [ identifier[meth] . identifier[index] ( literal[string] )+ literal[int] : identifier[meth] . identifier[index] ( literal[string] )] identifier[self] . identifier[logger] . identifier[debug] ( literal[string] + identifier[str] ( identifier[objlist] )) keyword[return] identifier[objlist]
def _objectmethods(self, obj: str, *args) -> list: """ This method parses the SAS log for artifacts (tables and graphics) that were created from the procedure method call :param obj: str -- proc object :param args: list likely none :return: list -- the tables and graphs available for tab complete """ code = '%listdata(' code += obj code += ');' self.logger.debug('Object Method macro call: ' + str(code)) res = self.sas.submit(code, 'text') meth = res['LOG'].splitlines() for i in range(len(meth)): meth[i] = meth[i].lstrip().rstrip() # depends on [control=['for'], data=['i']] self.logger.debug('SAS Log: ' + res['LOG']) objlist = meth[meth.index('startparse9878') + 1:meth.index('endparse9878')] self.logger.debug('PROC attr list: ' + str(objlist)) return objlist
def _wrap_layer(name, input_layer, build_func, dropout_rate=0.0, trainable=True): """Wrap layers with residual, normalization and dropout. :param name: Prefix of names for internal layers. :param input_layer: Input layer. :param build_func: A callable that takes the input tensor and generates the output tensor. :param dropout_rate: Dropout rate. :param trainable: Whether the layers are trainable. :return: Output layer. """ build_output = build_func(input_layer) if dropout_rate > 0.0: dropout_layer = keras.layers.Dropout( rate=dropout_rate, name='%s-Dropout' % name, )(build_output) else: dropout_layer = build_output if isinstance(input_layer, list): input_layer = input_layer[0] add_layer = keras.layers.Add(name='%s-Add' % name)([input_layer, dropout_layer]) normal_layer = LayerNormalization( trainable=trainable, name='%s-Norm' % name, )(add_layer) return normal_layer
def function[_wrap_layer, parameter[name, input_layer, build_func, dropout_rate, trainable]]: constant[Wrap layers with residual, normalization and dropout. :param name: Prefix of names for internal layers. :param input_layer: Input layer. :param build_func: A callable that takes the input tensor and generates the output tensor. :param dropout_rate: Dropout rate. :param trainable: Whether the layers are trainable. :return: Output layer. ] variable[build_output] assign[=] call[name[build_func], parameter[name[input_layer]]] if compare[name[dropout_rate] greater[>] constant[0.0]] begin[:] variable[dropout_layer] assign[=] call[call[name[keras].layers.Dropout, parameter[]], parameter[name[build_output]]] if call[name[isinstance], parameter[name[input_layer], name[list]]] begin[:] variable[input_layer] assign[=] call[name[input_layer]][constant[0]] variable[add_layer] assign[=] call[call[name[keras].layers.Add, parameter[]], parameter[list[[<ast.Name object at 0x7da1b1ec5870>, <ast.Name object at 0x7da1b1ec4a30>]]]] variable[normal_layer] assign[=] call[call[name[LayerNormalization], parameter[]], parameter[name[add_layer]]] return[name[normal_layer]]
keyword[def] identifier[_wrap_layer] ( identifier[name] , identifier[input_layer] , identifier[build_func] , identifier[dropout_rate] = literal[int] , identifier[trainable] = keyword[True] ): literal[string] identifier[build_output] = identifier[build_func] ( identifier[input_layer] ) keyword[if] identifier[dropout_rate] > literal[int] : identifier[dropout_layer] = identifier[keras] . identifier[layers] . identifier[Dropout] ( identifier[rate] = identifier[dropout_rate] , identifier[name] = literal[string] % identifier[name] , )( identifier[build_output] ) keyword[else] : identifier[dropout_layer] = identifier[build_output] keyword[if] identifier[isinstance] ( identifier[input_layer] , identifier[list] ): identifier[input_layer] = identifier[input_layer] [ literal[int] ] identifier[add_layer] = identifier[keras] . identifier[layers] . identifier[Add] ( identifier[name] = literal[string] % identifier[name] )([ identifier[input_layer] , identifier[dropout_layer] ]) identifier[normal_layer] = identifier[LayerNormalization] ( identifier[trainable] = identifier[trainable] , identifier[name] = literal[string] % identifier[name] , )( identifier[add_layer] ) keyword[return] identifier[normal_layer]
def _wrap_layer(name, input_layer, build_func, dropout_rate=0.0, trainable=True): """Wrap layers with residual, normalization and dropout. :param name: Prefix of names for internal layers. :param input_layer: Input layer. :param build_func: A callable that takes the input tensor and generates the output tensor. :param dropout_rate: Dropout rate. :param trainable: Whether the layers are trainable. :return: Output layer. """ build_output = build_func(input_layer) if dropout_rate > 0.0: dropout_layer = keras.layers.Dropout(rate=dropout_rate, name='%s-Dropout' % name)(build_output) # depends on [control=['if'], data=['dropout_rate']] else: dropout_layer = build_output if isinstance(input_layer, list): input_layer = input_layer[0] # depends on [control=['if'], data=[]] add_layer = keras.layers.Add(name='%s-Add' % name)([input_layer, dropout_layer]) normal_layer = LayerNormalization(trainable=trainable, name='%s-Norm' % name)(add_layer) return normal_layer
def streaming_step(self): """ Sync, handle and streaming for each integration step Returns ------- None """ system = self.system if system.config.dime_enable: system.streaming.sync_and_handle() system.streaming.vars_to_modules() system.streaming.vars_to_pmu()
def function[streaming_step, parameter[self]]: constant[ Sync, handle and streaming for each integration step Returns ------- None ] variable[system] assign[=] name[self].system if name[system].config.dime_enable begin[:] call[name[system].streaming.sync_and_handle, parameter[]] call[name[system].streaming.vars_to_modules, parameter[]] call[name[system].streaming.vars_to_pmu, parameter[]]
keyword[def] identifier[streaming_step] ( identifier[self] ): literal[string] identifier[system] = identifier[self] . identifier[system] keyword[if] identifier[system] . identifier[config] . identifier[dime_enable] : identifier[system] . identifier[streaming] . identifier[sync_and_handle] () identifier[system] . identifier[streaming] . identifier[vars_to_modules] () identifier[system] . identifier[streaming] . identifier[vars_to_pmu] ()
def streaming_step(self): """ Sync, handle and streaming for each integration step Returns ------- None """ system = self.system if system.config.dime_enable: system.streaming.sync_and_handle() system.streaming.vars_to_modules() system.streaming.vars_to_pmu() # depends on [control=['if'], data=[]]
def visit_Compare(self, node): """Replace chained comparisons with calls to :func:`.napi_compare`.""" if len(node.ops) > 1: func = Name(id=self._prefix + 'napi_compare', ctx=Load()) args = [node.left, List(elts=[Str(op.__class__.__name__) for op in node.ops], ctx=Load()), List(elts=node.comparators, ctx=Load())] node = Call(func=func, args=args, keywords=self._kwargs) fml(node) self.generic_visit(node) return node
def function[visit_Compare, parameter[self, node]]: constant[Replace chained comparisons with calls to :func:`.napi_compare`.] if compare[call[name[len], parameter[name[node].ops]] greater[>] constant[1]] begin[:] variable[func] assign[=] call[name[Name], parameter[]] variable[args] assign[=] list[[<ast.Attribute object at 0x7da1b271fa90>, <ast.Call object at 0x7da1b271c790>, <ast.Call object at 0x7da1b277d1e0>]] variable[node] assign[=] call[name[Call], parameter[]] call[name[fml], parameter[name[node]]] call[name[self].generic_visit, parameter[name[node]]] return[name[node]]
keyword[def] identifier[visit_Compare] ( identifier[self] , identifier[node] ): literal[string] keyword[if] identifier[len] ( identifier[node] . identifier[ops] )> literal[int] : identifier[func] = identifier[Name] ( identifier[id] = identifier[self] . identifier[_prefix] + literal[string] , identifier[ctx] = identifier[Load] ()) identifier[args] =[ identifier[node] . identifier[left] , identifier[List] ( identifier[elts] =[ identifier[Str] ( identifier[op] . identifier[__class__] . identifier[__name__] ) keyword[for] identifier[op] keyword[in] identifier[node] . identifier[ops] ], identifier[ctx] = identifier[Load] ()), identifier[List] ( identifier[elts] = identifier[node] . identifier[comparators] , identifier[ctx] = identifier[Load] ())] identifier[node] = identifier[Call] ( identifier[func] = identifier[func] , identifier[args] = identifier[args] , identifier[keywords] = identifier[self] . identifier[_kwargs] ) identifier[fml] ( identifier[node] ) identifier[self] . identifier[generic_visit] ( identifier[node] ) keyword[return] identifier[node]
def visit_Compare(self, node): """Replace chained comparisons with calls to :func:`.napi_compare`.""" if len(node.ops) > 1: func = Name(id=self._prefix + 'napi_compare', ctx=Load()) args = [node.left, List(elts=[Str(op.__class__.__name__) for op in node.ops], ctx=Load()), List(elts=node.comparators, ctx=Load())] node = Call(func=func, args=args, keywords=self._kwargs) fml(node) # depends on [control=['if'], data=[]] self.generic_visit(node) return node
def binned_bitsets_proximity( f, chrom_col=0, start_col=1, end_col=2, strand_col=5, upstream=0, downstream=0 ): """Read a file into a dictionary of bitsets""" last_chrom = None last_bitset = None bitsets = dict() for line in f: if line.startswith("#"): continue # print "input=%s" % ( line ), fields = line.split() strand = "+" if len(fields) >= strand_col + 1: if fields[strand_col] == "-": strand = "-" chrom = fields[chrom_col] if chrom != last_chrom: if chrom not in bitsets: bitsets[chrom] = BinnedBitSet( MAX ) last_chrom = chrom last_bitset = bitsets[chrom] start, end = int( fields[start_col] ), int( fields[end_col] ) if strand == "+": if upstream: start = max( 0, start - upstream ) if downstream: end = min( MAX, end + downstream ) if strand == "-": if upstream: end = min( MAX, end + upstream ) if downstream: start = max( 0, start - downstream ) # print "set: start=%d\tend=%d" % ( start, end ) if end-start > 0: last_bitset.set_range( start, end-start ) return bitsets
def function[binned_bitsets_proximity, parameter[f, chrom_col, start_col, end_col, strand_col, upstream, downstream]]: constant[Read a file into a dictionary of bitsets] variable[last_chrom] assign[=] constant[None] variable[last_bitset] assign[=] constant[None] variable[bitsets] assign[=] call[name[dict], parameter[]] for taget[name[line]] in starred[name[f]] begin[:] if call[name[line].startswith, parameter[constant[#]]] begin[:] continue variable[fields] assign[=] call[name[line].split, parameter[]] variable[strand] assign[=] constant[+] if compare[call[name[len], parameter[name[fields]]] greater_or_equal[>=] binary_operation[name[strand_col] + constant[1]]] begin[:] if compare[call[name[fields]][name[strand_col]] equal[==] constant[-]] begin[:] variable[strand] assign[=] constant[-] variable[chrom] assign[=] call[name[fields]][name[chrom_col]] if compare[name[chrom] not_equal[!=] name[last_chrom]] begin[:] if compare[name[chrom] <ast.NotIn object at 0x7da2590d7190> name[bitsets]] begin[:] call[name[bitsets]][name[chrom]] assign[=] call[name[BinnedBitSet], parameter[name[MAX]]] variable[last_chrom] assign[=] name[chrom] variable[last_bitset] assign[=] call[name[bitsets]][name[chrom]] <ast.Tuple object at 0x7da1b0efc1f0> assign[=] tuple[[<ast.Call object at 0x7da1b0efffa0>, <ast.Call object at 0x7da1b0effc70>]] if compare[name[strand] equal[==] constant[+]] begin[:] if name[upstream] begin[:] variable[start] assign[=] call[name[max], parameter[constant[0], binary_operation[name[start] - name[upstream]]]] if name[downstream] begin[:] variable[end] assign[=] call[name[min], parameter[name[MAX], binary_operation[name[end] + name[downstream]]]] if compare[name[strand] equal[==] constant[-]] begin[:] if name[upstream] begin[:] variable[end] assign[=] call[name[min], parameter[name[MAX], binary_operation[name[end] + name[upstream]]]] if name[downstream] begin[:] variable[start] assign[=] call[name[max], parameter[constant[0], binary_operation[name[start] - name[downstream]]]] if compare[binary_operation[name[end] - name[start]] greater[>] constant[0]] begin[:] call[name[last_bitset].set_range, parameter[name[start], binary_operation[name[end] - name[start]]]] return[name[bitsets]]
keyword[def] identifier[binned_bitsets_proximity] ( identifier[f] , identifier[chrom_col] = literal[int] , identifier[start_col] = literal[int] , identifier[end_col] = literal[int] , identifier[strand_col] = literal[int] , identifier[upstream] = literal[int] , identifier[downstream] = literal[int] ): literal[string] identifier[last_chrom] = keyword[None] identifier[last_bitset] = keyword[None] identifier[bitsets] = identifier[dict] () keyword[for] identifier[line] keyword[in] identifier[f] : keyword[if] identifier[line] . identifier[startswith] ( literal[string] ): keyword[continue] identifier[fields] = identifier[line] . identifier[split] () identifier[strand] = literal[string] keyword[if] identifier[len] ( identifier[fields] )>= identifier[strand_col] + literal[int] : keyword[if] identifier[fields] [ identifier[strand_col] ]== literal[string] : identifier[strand] = literal[string] identifier[chrom] = identifier[fields] [ identifier[chrom_col] ] keyword[if] identifier[chrom] != identifier[last_chrom] : keyword[if] identifier[chrom] keyword[not] keyword[in] identifier[bitsets] : identifier[bitsets] [ identifier[chrom] ]= identifier[BinnedBitSet] ( identifier[MAX] ) identifier[last_chrom] = identifier[chrom] identifier[last_bitset] = identifier[bitsets] [ identifier[chrom] ] identifier[start] , identifier[end] = identifier[int] ( identifier[fields] [ identifier[start_col] ]), identifier[int] ( identifier[fields] [ identifier[end_col] ]) keyword[if] identifier[strand] == literal[string] : keyword[if] identifier[upstream] : identifier[start] = identifier[max] ( literal[int] , identifier[start] - identifier[upstream] ) keyword[if] identifier[downstream] : identifier[end] = identifier[min] ( identifier[MAX] , identifier[end] + identifier[downstream] ) keyword[if] identifier[strand] == literal[string] : keyword[if] identifier[upstream] : identifier[end] = identifier[min] ( identifier[MAX] , identifier[end] + identifier[upstream] ) keyword[if] identifier[downstream] : identifier[start] = identifier[max] ( literal[int] , identifier[start] - identifier[downstream] ) keyword[if] identifier[end] - identifier[start] > literal[int] : identifier[last_bitset] . identifier[set_range] ( identifier[start] , identifier[end] - identifier[start] ) keyword[return] identifier[bitsets]
def binned_bitsets_proximity(f, chrom_col=0, start_col=1, end_col=2, strand_col=5, upstream=0, downstream=0): """Read a file into a dictionary of bitsets""" last_chrom = None last_bitset = None bitsets = dict() for line in f: if line.startswith('#'): continue # depends on [control=['if'], data=[]] # print "input=%s" % ( line ), fields = line.split() strand = '+' if len(fields) >= strand_col + 1: if fields[strand_col] == '-': strand = '-' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] chrom = fields[chrom_col] if chrom != last_chrom: if chrom not in bitsets: bitsets[chrom] = BinnedBitSet(MAX) # depends on [control=['if'], data=['chrom', 'bitsets']] last_chrom = chrom last_bitset = bitsets[chrom] # depends on [control=['if'], data=['chrom', 'last_chrom']] (start, end) = (int(fields[start_col]), int(fields[end_col])) if strand == '+': if upstream: start = max(0, start - upstream) # depends on [control=['if'], data=[]] if downstream: end = min(MAX, end + downstream) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if strand == '-': if upstream: end = min(MAX, end + upstream) # depends on [control=['if'], data=[]] if downstream: start = max(0, start - downstream) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # print "set: start=%d\tend=%d" % ( start, end ) if end - start > 0: last_bitset.set_range(start, end - start) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']] return bitsets
def copy_attr(self, other): """ Copies all other attributes (not methods) from the other object to this instance. """ if not isinstance(other, Symbol): return # Nothing done if not a Symbol object tmp = re.compile('__.*__') for attr in (x for x in dir(other) if not tmp.match(x)): if ( hasattr(self.__class__, attr) and str(type(getattr(self.__class__, attr)) in ('property', 'function', 'instancemethod')) ): continue val = getattr(other, attr) if isinstance(val, str) or str(val)[0] != '<': # Not a value setattr(self, attr, val)
def function[copy_attr, parameter[self, other]]: constant[ Copies all other attributes (not methods) from the other object to this instance. ] if <ast.UnaryOp object at 0x7da18f8127a0> begin[:] return[None] variable[tmp] assign[=] call[name[re].compile, parameter[constant[__.*__]]] for taget[name[attr]] in starred[<ast.GeneratorExp object at 0x7da18f8105e0>] begin[:] if <ast.BoolOp object at 0x7da18f810d90> begin[:] continue variable[val] assign[=] call[name[getattr], parameter[name[other], name[attr]]] if <ast.BoolOp object at 0x7da18f812440> begin[:] call[name[setattr], parameter[name[self], name[attr], name[val]]]
keyword[def] identifier[copy_attr] ( identifier[self] , identifier[other] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[other] , identifier[Symbol] ): keyword[return] identifier[tmp] = identifier[re] . identifier[compile] ( literal[string] ) keyword[for] identifier[attr] keyword[in] ( identifier[x] keyword[for] identifier[x] keyword[in] identifier[dir] ( identifier[other] ) keyword[if] keyword[not] identifier[tmp] . identifier[match] ( identifier[x] )): keyword[if] ( identifier[hasattr] ( identifier[self] . identifier[__class__] , identifier[attr] ) keyword[and] identifier[str] ( identifier[type] ( identifier[getattr] ( identifier[self] . identifier[__class__] , identifier[attr] )) keyword[in] ( literal[string] , literal[string] , literal[string] )) ): keyword[continue] identifier[val] = identifier[getattr] ( identifier[other] , identifier[attr] ) keyword[if] identifier[isinstance] ( identifier[val] , identifier[str] ) keyword[or] identifier[str] ( identifier[val] )[ literal[int] ]!= literal[string] : identifier[setattr] ( identifier[self] , identifier[attr] , identifier[val] )
def copy_attr(self, other): """ Copies all other attributes (not methods) from the other object to this instance. """ if not isinstance(other, Symbol): return # Nothing done if not a Symbol object # depends on [control=['if'], data=[]] tmp = re.compile('__.*__') for attr in (x for x in dir(other) if not tmp.match(x)): if hasattr(self.__class__, attr) and str(type(getattr(self.__class__, attr)) in ('property', 'function', 'instancemethod')): continue # depends on [control=['if'], data=[]] val = getattr(other, attr) if isinstance(val, str) or str(val)[0] != '<': # Not a value setattr(self, attr, val) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['attr']]
def pretty_memory_info(): ''' Pretty format memory info. Returns ------- str Memory info. Examples -------- >>> pretty_memory_info() '5MB memory usage' ''' process = psutil.Process(os.getpid()) return '{}MB memory usage'.format(int(process.memory_info().rss / 2**20))
def function[pretty_memory_info, parameter[]]: constant[ Pretty format memory info. Returns ------- str Memory info. Examples -------- >>> pretty_memory_info() '5MB memory usage' ] variable[process] assign[=] call[name[psutil].Process, parameter[call[name[os].getpid, parameter[]]]] return[call[constant[{}MB memory usage].format, parameter[call[name[int], parameter[binary_operation[call[name[process].memory_info, parameter[]].rss / binary_operation[constant[2] ** constant[20]]]]]]]]
keyword[def] identifier[pretty_memory_info] (): literal[string] identifier[process] = identifier[psutil] . identifier[Process] ( identifier[os] . identifier[getpid] ()) keyword[return] literal[string] . identifier[format] ( identifier[int] ( identifier[process] . identifier[memory_info] (). identifier[rss] / literal[int] ** literal[int] ))
def pretty_memory_info(): """ Pretty format memory info. Returns ------- str Memory info. Examples -------- >>> pretty_memory_info() '5MB memory usage' """ process = psutil.Process(os.getpid()) return '{}MB memory usage'.format(int(process.memory_info().rss / 2 ** 20))
def open(self): '''Opens the stream for reading.''' options = copy(self.__options) # Get scheme and format if not already given compression = None if self.__scheme is None or self.__format is None: detected_scheme, detected_format = helpers.detect_scheme_and_format(self.__source) scheme = self.__scheme or detected_scheme format = self.__format or detected_format # Get compression for type in config.SUPPORTED_COMPRESSION: if self.__compression == type or detected_format == type: compression = type else: scheme = self.__scheme format = self.__format # Initiate loader self.__loader = None if scheme is not None: loader_class = self.__custom_loaders.get(scheme) if loader_class is None: if scheme not in config.LOADERS: message = 'Scheme "%s" is not supported' % scheme raise exceptions.SchemeError(message) loader_path = config.LOADERS[scheme] if loader_path: loader_class = helpers.import_attribute(loader_path) if loader_class is not None: loader_options = helpers.extract_options(options, loader_class.options) if compression and 'http_stream' in loader_class.options: loader_options['http_stream'] = False self.__loader = loader_class( bytes_sample_size=self.__bytes_sample_size, **loader_options) # Zip compression if compression == 'zip' and six.PY3: source = self.__loader.load(self.__source, mode='b') with zipfile.ZipFile(source) as archive: name = archive.namelist()[0] if 'filename' in options.keys(): name = options['filename'] del options['filename'] with archive.open(name) as file: source = tempfile.NamedTemporaryFile(suffix='.' + name) for line in file: source.write(line) source.seek(0) self.__source = source self.__loader = StreamLoader(bytes_sample_size=self.__bytes_sample_size) format = self.__format or helpers.detect_scheme_and_format(source.name)[1] scheme = 'stream' # Gzip compression elif compression == 'gz' and six.PY3: name = self.__source.replace('.gz', '') self.__source = gzip.open(self.__loader.load(self.__source, mode='b')) self.__loader = StreamLoader(bytes_sample_size=self.__bytes_sample_size) format = self.__format or helpers.detect_scheme_and_format(name)[1] scheme = 'stream' # Not supported compression elif compression: message = 'Compression "%s" is not supported for your Python version' raise exceptions.TabulatorException(message % compression) # Initiate parser parser_class = self.__custom_parsers.get(format) if parser_class is None: if format not in config.PARSERS: message = 'Format "%s" is not supported' % format raise exceptions.FormatError(message) parser_class = helpers.import_attribute(config.PARSERS[format]) parser_options = helpers.extract_options(options, parser_class.options) self.__parser = parser_class(self.__loader, force_parse=self.__force_parse, **parser_options) # Bad options if options: message = 'Not supported option(s) "%s" for scheme "%s" and format "%s"' message = message % (', '.join(options), scheme, format) warnings.warn(message, UserWarning) # Open and setup self.__parser.open(self.__source, encoding=self.__encoding) self.__extract_sample() self.__extract_headers() if not self.__allow_html: self.__detect_html() # Set scheme/format/encoding self.__actual_scheme = scheme self.__actual_format = format self.__actual_encoding = self.__parser.encoding return self
def function[open, parameter[self]]: constant[Opens the stream for reading.] variable[options] assign[=] call[name[copy], parameter[name[self].__options]] variable[compression] assign[=] constant[None] if <ast.BoolOp object at 0x7da20c992890> begin[:] <ast.Tuple object at 0x7da20c9905b0> assign[=] call[name[helpers].detect_scheme_and_format, parameter[name[self].__source]] variable[scheme] assign[=] <ast.BoolOp object at 0x7da20c991060> variable[format] assign[=] <ast.BoolOp object at 0x7da20c9910f0> for taget[name[type]] in starred[name[config].SUPPORTED_COMPRESSION] begin[:] if <ast.BoolOp object at 0x7da20c9915d0> begin[:] variable[compression] assign[=] name[type] name[self].__loader assign[=] constant[None] if compare[name[scheme] is_not constant[None]] begin[:] variable[loader_class] assign[=] call[name[self].__custom_loaders.get, parameter[name[scheme]]] if compare[name[loader_class] is constant[None]] begin[:] if compare[name[scheme] <ast.NotIn object at 0x7da2590d7190> name[config].LOADERS] begin[:] variable[message] assign[=] binary_operation[constant[Scheme "%s" is not supported] <ast.Mod object at 0x7da2590d6920> name[scheme]] <ast.Raise object at 0x7da20c992b60> variable[loader_path] assign[=] call[name[config].LOADERS][name[scheme]] if name[loader_path] begin[:] variable[loader_class] assign[=] call[name[helpers].import_attribute, parameter[name[loader_path]]] if compare[name[loader_class] is_not constant[None]] begin[:] variable[loader_options] assign[=] call[name[helpers].extract_options, parameter[name[options], name[loader_class].options]] if <ast.BoolOp object at 0x7da20c9939d0> begin[:] call[name[loader_options]][constant[http_stream]] assign[=] constant[False] name[self].__loader assign[=] call[name[loader_class], parameter[]] if <ast.BoolOp object at 0x7da20c990670> begin[:] variable[source] assign[=] call[name[self].__loader.load, parameter[name[self].__source]] with call[name[zipfile].ZipFile, parameter[name[source]]] begin[:] variable[name] assign[=] call[call[name[archive].namelist, parameter[]]][constant[0]] if compare[constant[filename] in call[name[options].keys, parameter[]]] begin[:] variable[name] assign[=] call[name[options]][constant[filename]] <ast.Delete object at 0x7da20c991690> with call[name[archive].open, parameter[name[name]]] begin[:] variable[source] assign[=] call[name[tempfile].NamedTemporaryFile, parameter[]] for taget[name[line]] in starred[name[file]] begin[:] call[name[source].write, parameter[name[line]]] call[name[source].seek, parameter[constant[0]]] name[self].__source assign[=] name[source] name[self].__loader assign[=] call[name[StreamLoader], parameter[]] variable[format] assign[=] <ast.BoolOp object at 0x7da18f00feb0> variable[scheme] assign[=] constant[stream] variable[parser_class] assign[=] call[name[self].__custom_parsers.get, parameter[name[format]]] if compare[name[parser_class] is constant[None]] begin[:] if compare[name[format] <ast.NotIn object at 0x7da2590d7190> name[config].PARSERS] begin[:] variable[message] assign[=] binary_operation[constant[Format "%s" is not supported] <ast.Mod object at 0x7da2590d6920> name[format]] <ast.Raise object at 0x7da18f00ea40> variable[parser_class] assign[=] call[name[helpers].import_attribute, parameter[call[name[config].PARSERS][name[format]]]] variable[parser_options] assign[=] call[name[helpers].extract_options, parameter[name[options], name[parser_class].options]] name[self].__parser assign[=] call[name[parser_class], parameter[name[self].__loader]] if name[options] begin[:] variable[message] assign[=] constant[Not supported option(s) "%s" for scheme "%s" and format "%s"] variable[message] assign[=] binary_operation[name[message] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da18f00dc30>, <ast.Name object at 0x7da18f00f010>, <ast.Name object at 0x7da18f00e500>]]] call[name[warnings].warn, parameter[name[message], name[UserWarning]]] call[name[self].__parser.open, parameter[name[self].__source]] call[name[self].__extract_sample, parameter[]] call[name[self].__extract_headers, parameter[]] if <ast.UnaryOp object at 0x7da18f00c8e0> begin[:] call[name[self].__detect_html, parameter[]] name[self].__actual_scheme assign[=] name[scheme] name[self].__actual_format assign[=] name[format] name[self].__actual_encoding assign[=] name[self].__parser.encoding return[name[self]]
keyword[def] identifier[open] ( identifier[self] ): literal[string] identifier[options] = identifier[copy] ( identifier[self] . identifier[__options] ) identifier[compression] = keyword[None] keyword[if] identifier[self] . identifier[__scheme] keyword[is] keyword[None] keyword[or] identifier[self] . identifier[__format] keyword[is] keyword[None] : identifier[detected_scheme] , identifier[detected_format] = identifier[helpers] . identifier[detect_scheme_and_format] ( identifier[self] . identifier[__source] ) identifier[scheme] = identifier[self] . identifier[__scheme] keyword[or] identifier[detected_scheme] identifier[format] = identifier[self] . identifier[__format] keyword[or] identifier[detected_format] keyword[for] identifier[type] keyword[in] identifier[config] . identifier[SUPPORTED_COMPRESSION] : keyword[if] identifier[self] . identifier[__compression] == identifier[type] keyword[or] identifier[detected_format] == identifier[type] : identifier[compression] = identifier[type] keyword[else] : identifier[scheme] = identifier[self] . identifier[__scheme] identifier[format] = identifier[self] . identifier[__format] identifier[self] . identifier[__loader] = keyword[None] keyword[if] identifier[scheme] keyword[is] keyword[not] keyword[None] : identifier[loader_class] = identifier[self] . identifier[__custom_loaders] . identifier[get] ( identifier[scheme] ) keyword[if] identifier[loader_class] keyword[is] keyword[None] : keyword[if] identifier[scheme] keyword[not] keyword[in] identifier[config] . identifier[LOADERS] : identifier[message] = literal[string] % identifier[scheme] keyword[raise] identifier[exceptions] . identifier[SchemeError] ( identifier[message] ) identifier[loader_path] = identifier[config] . identifier[LOADERS] [ identifier[scheme] ] keyword[if] identifier[loader_path] : identifier[loader_class] = identifier[helpers] . identifier[import_attribute] ( identifier[loader_path] ) keyword[if] identifier[loader_class] keyword[is] keyword[not] keyword[None] : identifier[loader_options] = identifier[helpers] . identifier[extract_options] ( identifier[options] , identifier[loader_class] . identifier[options] ) keyword[if] identifier[compression] keyword[and] literal[string] keyword[in] identifier[loader_class] . identifier[options] : identifier[loader_options] [ literal[string] ]= keyword[False] identifier[self] . identifier[__loader] = identifier[loader_class] ( identifier[bytes_sample_size] = identifier[self] . identifier[__bytes_sample_size] , ** identifier[loader_options] ) keyword[if] identifier[compression] == literal[string] keyword[and] identifier[six] . identifier[PY3] : identifier[source] = identifier[self] . identifier[__loader] . identifier[load] ( identifier[self] . identifier[__source] , identifier[mode] = literal[string] ) keyword[with] identifier[zipfile] . identifier[ZipFile] ( identifier[source] ) keyword[as] identifier[archive] : identifier[name] = identifier[archive] . identifier[namelist] ()[ literal[int] ] keyword[if] literal[string] keyword[in] identifier[options] . identifier[keys] (): identifier[name] = identifier[options] [ literal[string] ] keyword[del] identifier[options] [ literal[string] ] keyword[with] identifier[archive] . identifier[open] ( identifier[name] ) keyword[as] identifier[file] : identifier[source] = identifier[tempfile] . identifier[NamedTemporaryFile] ( identifier[suffix] = literal[string] + identifier[name] ) keyword[for] identifier[line] keyword[in] identifier[file] : identifier[source] . identifier[write] ( identifier[line] ) identifier[source] . identifier[seek] ( literal[int] ) identifier[self] . identifier[__source] = identifier[source] identifier[self] . identifier[__loader] = identifier[StreamLoader] ( identifier[bytes_sample_size] = identifier[self] . identifier[__bytes_sample_size] ) identifier[format] = identifier[self] . identifier[__format] keyword[or] identifier[helpers] . identifier[detect_scheme_and_format] ( identifier[source] . identifier[name] )[ literal[int] ] identifier[scheme] = literal[string] keyword[elif] identifier[compression] == literal[string] keyword[and] identifier[six] . identifier[PY3] : identifier[name] = identifier[self] . identifier[__source] . identifier[replace] ( literal[string] , literal[string] ) identifier[self] . identifier[__source] = identifier[gzip] . identifier[open] ( identifier[self] . identifier[__loader] . identifier[load] ( identifier[self] . identifier[__source] , identifier[mode] = literal[string] )) identifier[self] . identifier[__loader] = identifier[StreamLoader] ( identifier[bytes_sample_size] = identifier[self] . identifier[__bytes_sample_size] ) identifier[format] = identifier[self] . identifier[__format] keyword[or] identifier[helpers] . identifier[detect_scheme_and_format] ( identifier[name] )[ literal[int] ] identifier[scheme] = literal[string] keyword[elif] identifier[compression] : identifier[message] = literal[string] keyword[raise] identifier[exceptions] . identifier[TabulatorException] ( identifier[message] % identifier[compression] ) identifier[parser_class] = identifier[self] . identifier[__custom_parsers] . identifier[get] ( identifier[format] ) keyword[if] identifier[parser_class] keyword[is] keyword[None] : keyword[if] identifier[format] keyword[not] keyword[in] identifier[config] . identifier[PARSERS] : identifier[message] = literal[string] % identifier[format] keyword[raise] identifier[exceptions] . identifier[FormatError] ( identifier[message] ) identifier[parser_class] = identifier[helpers] . identifier[import_attribute] ( identifier[config] . identifier[PARSERS] [ identifier[format] ]) identifier[parser_options] = identifier[helpers] . identifier[extract_options] ( identifier[options] , identifier[parser_class] . identifier[options] ) identifier[self] . identifier[__parser] = identifier[parser_class] ( identifier[self] . identifier[__loader] , identifier[force_parse] = identifier[self] . identifier[__force_parse] , ** identifier[parser_options] ) keyword[if] identifier[options] : identifier[message] = literal[string] identifier[message] = identifier[message] %( literal[string] . identifier[join] ( identifier[options] ), identifier[scheme] , identifier[format] ) identifier[warnings] . identifier[warn] ( identifier[message] , identifier[UserWarning] ) identifier[self] . identifier[__parser] . identifier[open] ( identifier[self] . identifier[__source] , identifier[encoding] = identifier[self] . identifier[__encoding] ) identifier[self] . identifier[__extract_sample] () identifier[self] . identifier[__extract_headers] () keyword[if] keyword[not] identifier[self] . identifier[__allow_html] : identifier[self] . identifier[__detect_html] () identifier[self] . identifier[__actual_scheme] = identifier[scheme] identifier[self] . identifier[__actual_format] = identifier[format] identifier[self] . identifier[__actual_encoding] = identifier[self] . identifier[__parser] . identifier[encoding] keyword[return] identifier[self]
def open(self): """Opens the stream for reading.""" options = copy(self.__options) # Get scheme and format if not already given compression = None if self.__scheme is None or self.__format is None: (detected_scheme, detected_format) = helpers.detect_scheme_and_format(self.__source) scheme = self.__scheme or detected_scheme format = self.__format or detected_format # Get compression for type in config.SUPPORTED_COMPRESSION: if self.__compression == type or detected_format == type: compression = type # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['type']] # depends on [control=['if'], data=[]] else: scheme = self.__scheme format = self.__format # Initiate loader self.__loader = None if scheme is not None: loader_class = self.__custom_loaders.get(scheme) if loader_class is None: if scheme not in config.LOADERS: message = 'Scheme "%s" is not supported' % scheme raise exceptions.SchemeError(message) # depends on [control=['if'], data=['scheme']] loader_path = config.LOADERS[scheme] if loader_path: loader_class = helpers.import_attribute(loader_path) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['loader_class']] if loader_class is not None: loader_options = helpers.extract_options(options, loader_class.options) if compression and 'http_stream' in loader_class.options: loader_options['http_stream'] = False # depends on [control=['if'], data=[]] self.__loader = loader_class(bytes_sample_size=self.__bytes_sample_size, **loader_options) # depends on [control=['if'], data=['loader_class']] # depends on [control=['if'], data=['scheme']] # Zip compression if compression == 'zip' and six.PY3: source = self.__loader.load(self.__source, mode='b') with zipfile.ZipFile(source) as archive: name = archive.namelist()[0] if 'filename' in options.keys(): name = options['filename'] del options['filename'] # depends on [control=['if'], data=[]] with archive.open(name) as file: source = tempfile.NamedTemporaryFile(suffix='.' + name) for line in file: source.write(line) # depends on [control=['for'], data=['line']] source.seek(0) # depends on [control=['with'], data=['file']] # depends on [control=['with'], data=['archive']] self.__source = source self.__loader = StreamLoader(bytes_sample_size=self.__bytes_sample_size) format = self.__format or helpers.detect_scheme_and_format(source.name)[1] scheme = 'stream' # depends on [control=['if'], data=[]] # Gzip compression elif compression == 'gz' and six.PY3: name = self.__source.replace('.gz', '') self.__source = gzip.open(self.__loader.load(self.__source, mode='b')) self.__loader = StreamLoader(bytes_sample_size=self.__bytes_sample_size) format = self.__format or helpers.detect_scheme_and_format(name)[1] scheme = 'stream' # depends on [control=['if'], data=[]] # Not supported compression elif compression: message = 'Compression "%s" is not supported for your Python version' raise exceptions.TabulatorException(message % compression) # depends on [control=['if'], data=[]] # Initiate parser parser_class = self.__custom_parsers.get(format) if parser_class is None: if format not in config.PARSERS: message = 'Format "%s" is not supported' % format raise exceptions.FormatError(message) # depends on [control=['if'], data=['format']] parser_class = helpers.import_attribute(config.PARSERS[format]) # depends on [control=['if'], data=['parser_class']] parser_options = helpers.extract_options(options, parser_class.options) self.__parser = parser_class(self.__loader, force_parse=self.__force_parse, **parser_options) # Bad options if options: message = 'Not supported option(s) "%s" for scheme "%s" and format "%s"' message = message % (', '.join(options), scheme, format) warnings.warn(message, UserWarning) # depends on [control=['if'], data=[]] # Open and setup self.__parser.open(self.__source, encoding=self.__encoding) self.__extract_sample() self.__extract_headers() if not self.__allow_html: self.__detect_html() # depends on [control=['if'], data=[]] # Set scheme/format/encoding self.__actual_scheme = scheme self.__actual_format = format self.__actual_encoding = self.__parser.encoding return self
def get_instance(self, payload): """ Build an instance of DayInstance :param dict payload: Payload response from the API :returns: twilio.rest.preview.bulk_exports.export.day.DayInstance :rtype: twilio.rest.preview.bulk_exports.export.day.DayInstance """ return DayInstance(self._version, payload, resource_type=self._solution['resource_type'], )
def function[get_instance, parameter[self, payload]]: constant[ Build an instance of DayInstance :param dict payload: Payload response from the API :returns: twilio.rest.preview.bulk_exports.export.day.DayInstance :rtype: twilio.rest.preview.bulk_exports.export.day.DayInstance ] return[call[name[DayInstance], parameter[name[self]._version, name[payload]]]]
keyword[def] identifier[get_instance] ( identifier[self] , identifier[payload] ): literal[string] keyword[return] identifier[DayInstance] ( identifier[self] . identifier[_version] , identifier[payload] , identifier[resource_type] = identifier[self] . identifier[_solution] [ literal[string] ],)
def get_instance(self, payload): """ Build an instance of DayInstance :param dict payload: Payload response from the API :returns: twilio.rest.preview.bulk_exports.export.day.DayInstance :rtype: twilio.rest.preview.bulk_exports.export.day.DayInstance """ return DayInstance(self._version, payload, resource_type=self._solution['resource_type'])
def search(query, parent, options): """ Perform Trac search """ # Extend the default max number of tickets to be fetched query = "{0}&max={1}".format(query, MAX_TICKETS) log.debug("Search query: {0}".format(query)) try: result = parent.proxy.ticket.query(query) except xmlrpclib.Fault as error: log.error("An error encountered, while searching for tickets.") raise ReportError(error) except xmlrpclib.ProtocolError as error: log.debug(error) log.error("Trac url: {0}".format(parent.url)) raise ReportError( "Unable to contact Trac server. Is the url above correct?") log.debug("Search result: {0}".format(result)) # Fetch tickets and their history using multicall multicall = xmlrpclib.MultiCall(parent.proxy) for ticket_id in sorted(result): multicall.ticket.get(ticket_id) multicall.ticket.changeLog(ticket_id) log.debug(u"Fetching trac tickets and their history") result = list(multicall()) tickets = result[::2] changelogs = result[1::2] # Print debugging info for ticket, changelog in zip(tickets, changelogs): log.debug("Fetched ticket #{0}".format(ticket[0])) log.debug(pretty(ticket)) log.debug("Changelog:") log.debug(pretty(changelog)) # Return the list of ticket objects return [ Trac(ticket, changelg, parent=parent, options=options) for ticket, changelg in zip(tickets, changelogs)]
def function[search, parameter[query, parent, options]]: constant[ Perform Trac search ] variable[query] assign[=] call[constant[{0}&max={1}].format, parameter[name[query], name[MAX_TICKETS]]] call[name[log].debug, parameter[call[constant[Search query: {0}].format, parameter[name[query]]]]] <ast.Try object at 0x7da1b1ebaf20> call[name[log].debug, parameter[call[constant[Search result: {0}].format, parameter[name[result]]]]] variable[multicall] assign[=] call[name[xmlrpclib].MultiCall, parameter[name[parent].proxy]] for taget[name[ticket_id]] in starred[call[name[sorted], parameter[name[result]]]] begin[:] call[name[multicall].ticket.get, parameter[name[ticket_id]]] call[name[multicall].ticket.changeLog, parameter[name[ticket_id]]] call[name[log].debug, parameter[constant[Fetching trac tickets and their history]]] variable[result] assign[=] call[name[list], parameter[call[name[multicall], parameter[]]]] variable[tickets] assign[=] call[name[result]][<ast.Slice object at 0x7da1b1e98040>] variable[changelogs] assign[=] call[name[result]][<ast.Slice object at 0x7da1b1e99510>] for taget[tuple[[<ast.Name object at 0x7da1b1e98bb0>, <ast.Name object at 0x7da1b1e984c0>]]] in starred[call[name[zip], parameter[name[tickets], name[changelogs]]]] begin[:] call[name[log].debug, parameter[call[constant[Fetched ticket #{0}].format, parameter[call[name[ticket]][constant[0]]]]]] call[name[log].debug, parameter[call[name[pretty], parameter[name[ticket]]]]] call[name[log].debug, parameter[constant[Changelog:]]] call[name[log].debug, parameter[call[name[pretty], parameter[name[changelog]]]]] return[<ast.ListComp object at 0x7da1b1e99750>]
keyword[def] identifier[search] ( identifier[query] , identifier[parent] , identifier[options] ): literal[string] identifier[query] = literal[string] . identifier[format] ( identifier[query] , identifier[MAX_TICKETS] ) identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[query] )) keyword[try] : identifier[result] = identifier[parent] . identifier[proxy] . identifier[ticket] . identifier[query] ( identifier[query] ) keyword[except] identifier[xmlrpclib] . identifier[Fault] keyword[as] identifier[error] : identifier[log] . identifier[error] ( literal[string] ) keyword[raise] identifier[ReportError] ( identifier[error] ) keyword[except] identifier[xmlrpclib] . identifier[ProtocolError] keyword[as] identifier[error] : identifier[log] . identifier[debug] ( identifier[error] ) identifier[log] . identifier[error] ( literal[string] . identifier[format] ( identifier[parent] . identifier[url] )) keyword[raise] identifier[ReportError] ( literal[string] ) identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[result] )) identifier[multicall] = identifier[xmlrpclib] . identifier[MultiCall] ( identifier[parent] . identifier[proxy] ) keyword[for] identifier[ticket_id] keyword[in] identifier[sorted] ( identifier[result] ): identifier[multicall] . identifier[ticket] . identifier[get] ( identifier[ticket_id] ) identifier[multicall] . identifier[ticket] . identifier[changeLog] ( identifier[ticket_id] ) identifier[log] . identifier[debug] ( literal[string] ) identifier[result] = identifier[list] ( identifier[multicall] ()) identifier[tickets] = identifier[result] [:: literal[int] ] identifier[changelogs] = identifier[result] [ literal[int] :: literal[int] ] keyword[for] identifier[ticket] , identifier[changelog] keyword[in] identifier[zip] ( identifier[tickets] , identifier[changelogs] ): identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[ticket] [ literal[int] ])) identifier[log] . identifier[debug] ( identifier[pretty] ( identifier[ticket] )) identifier[log] . identifier[debug] ( literal[string] ) identifier[log] . identifier[debug] ( identifier[pretty] ( identifier[changelog] )) keyword[return] [ identifier[Trac] ( identifier[ticket] , identifier[changelg] , identifier[parent] = identifier[parent] , identifier[options] = identifier[options] ) keyword[for] identifier[ticket] , identifier[changelg] keyword[in] identifier[zip] ( identifier[tickets] , identifier[changelogs] )]
def search(query, parent, options): """ Perform Trac search """ # Extend the default max number of tickets to be fetched query = '{0}&max={1}'.format(query, MAX_TICKETS) log.debug('Search query: {0}'.format(query)) try: result = parent.proxy.ticket.query(query) # depends on [control=['try'], data=[]] except xmlrpclib.Fault as error: log.error('An error encountered, while searching for tickets.') raise ReportError(error) # depends on [control=['except'], data=['error']] except xmlrpclib.ProtocolError as error: log.debug(error) log.error('Trac url: {0}'.format(parent.url)) raise ReportError('Unable to contact Trac server. Is the url above correct?') # depends on [control=['except'], data=['error']] log.debug('Search result: {0}'.format(result)) # Fetch tickets and their history using multicall multicall = xmlrpclib.MultiCall(parent.proxy) for ticket_id in sorted(result): multicall.ticket.get(ticket_id) multicall.ticket.changeLog(ticket_id) # depends on [control=['for'], data=['ticket_id']] log.debug(u'Fetching trac tickets and their history') result = list(multicall()) tickets = result[::2] changelogs = result[1::2] # Print debugging info for (ticket, changelog) in zip(tickets, changelogs): log.debug('Fetched ticket #{0}'.format(ticket[0])) log.debug(pretty(ticket)) log.debug('Changelog:') log.debug(pretty(changelog)) # depends on [control=['for'], data=[]] # Return the list of ticket objects return [Trac(ticket, changelg, parent=parent, options=options) for (ticket, changelg) in zip(tickets, changelogs)]
def _split_keys_v2(joined): """ Split two keys out a string created by _join_keys_v2. """ left, _, right = joined.rpartition('::') return _decode_v2(left), _decode_v2(right)
def function[_split_keys_v2, parameter[joined]]: constant[ Split two keys out a string created by _join_keys_v2. ] <ast.Tuple object at 0x7da20c76eb60> assign[=] call[name[joined].rpartition, parameter[constant[::]]] return[tuple[[<ast.Call object at 0x7da20c76d2a0>, <ast.Call object at 0x7da20c76fcd0>]]]
keyword[def] identifier[_split_keys_v2] ( identifier[joined] ): literal[string] identifier[left] , identifier[_] , identifier[right] = identifier[joined] . identifier[rpartition] ( literal[string] ) keyword[return] identifier[_decode_v2] ( identifier[left] ), identifier[_decode_v2] ( identifier[right] )
def _split_keys_v2(joined): """ Split two keys out a string created by _join_keys_v2. """ (left, _, right) = joined.rpartition('::') return (_decode_v2(left), _decode_v2(right))
def recursive_apply(inval, func): '''Recursively apply a function to all levels of nested iterables :param inval: the object to run the function on :param func: the function that will be run on the inval ''' if isinstance(inval, dict): return {k: recursive_apply(v, func) for k, v in inval.items()} elif isinstance(inval, list): return [recursive_apply(v, func) for v in inval] else: return func(inval)
def function[recursive_apply, parameter[inval, func]]: constant[Recursively apply a function to all levels of nested iterables :param inval: the object to run the function on :param func: the function that will be run on the inval ] if call[name[isinstance], parameter[name[inval], name[dict]]] begin[:] return[<ast.DictComp object at 0x7da1b2554a00>]
keyword[def] identifier[recursive_apply] ( identifier[inval] , identifier[func] ): literal[string] keyword[if] identifier[isinstance] ( identifier[inval] , identifier[dict] ): keyword[return] { identifier[k] : identifier[recursive_apply] ( identifier[v] , identifier[func] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[inval] . identifier[items] ()} keyword[elif] identifier[isinstance] ( identifier[inval] , identifier[list] ): keyword[return] [ identifier[recursive_apply] ( identifier[v] , identifier[func] ) keyword[for] identifier[v] keyword[in] identifier[inval] ] keyword[else] : keyword[return] identifier[func] ( identifier[inval] )
def recursive_apply(inval, func): """Recursively apply a function to all levels of nested iterables :param inval: the object to run the function on :param func: the function that will be run on the inval """ if isinstance(inval, dict): return {k: recursive_apply(v, func) for (k, v) in inval.items()} # depends on [control=['if'], data=[]] elif isinstance(inval, list): return [recursive_apply(v, func) for v in inval] # depends on [control=['if'], data=[]] else: return func(inval)
def export_tour(tour_steps, name=None, filename="my_tour.js", url=None): """ Exports a tour as a JS file. It will include necessary resources as well, such as jQuery. You'll be able to copy the tour directly into the Console of any web browser to play the tour outside of SeleniumBase runs. """ if not name: name = "default" if name not in tour_steps: raise Exception("Tour {%s} does not exist!" % name) if not filename.endswith('.js'): raise Exception('Tour file must end in ".js"!') if not url: url = "data:," tour_type = None if "Bootstrap" in tour_steps[name][0]: tour_type = "bootstrap" elif "Hopscotch" in tour_steps[name][0]: tour_type = "hopscotch" elif "IntroJS" in tour_steps[name][0]: tour_type = "introjs" elif "Shepherd" in tour_steps[name][0]: tour_type = "shepherd" else: raise Exception('Unknown tour type!') instructions = ( '''//////// Load Tour Start Page (if not there now) ////////\n\n''' '''if (window.location.href != "%s") {\n''' ''' window.location.href="%s";\n''' '''}\n\n''' '''//////// Resources ////////\n\n''' '''function injectCSS(css_link) {''' '''var head = document.getElementsByTagName("head")[0];''' '''var link = document.createElement("link");''' '''link.rel = "stylesheet";''' '''link.type = "text/css";''' '''link.href = css_link;''' '''link.crossorigin = "anonymous";''' '''head.appendChild(link);''' '''};\n''' '''function injectJS(js_link) {''' '''var head = document.getElementsByTagName("head")[0];''' '''var script = document.createElement("script");''' '''script.src = js_link;''' '''script.defer;''' '''script.type="text/javascript";''' '''script.crossorigin = "anonymous";''' '''script.onload = function() { null };''' '''head.appendChild(script);''' '''};\n''' '''function injectStyle(css) {''' '''var head = document.getElementsByTagName("head")[0];''' '''var style = document.createElement("style");''' '''style.type = "text/css";''' '''style.appendChild(document.createTextNode(css));''' '''head.appendChild(style);''' '''};\n''' % (url, url)) if tour_type == "bootstrap": jquery_js = constants.JQuery.MIN_JS bootstrap_tour_css = constants.BootstrapTour.MIN_CSS bootstrap_tour_js = constants.BootstrapTour.MIN_JS backdrop_style = style_sheet.bt_backdrop_style backdrop_style = backdrop_style.replace('\n', '') backdrop_style = js_utils.escape_quotes_if_needed(backdrop_style) instructions += 'injectJS("%s");' % jquery_js instructions += '\n\n//////// Resources - Load 2 ////////\n\n' instructions += 'injectCSS("%s");\n' % bootstrap_tour_css instructions += 'injectStyle("%s");\n' % backdrop_style instructions += 'injectJS("%s");' % bootstrap_tour_js elif tour_type == "hopscotch": hopscotch_css = constants.Hopscotch.MIN_CSS hopscotch_js = constants.Hopscotch.MIN_JS backdrop_style = style_sheet.hops_backdrop_style backdrop_style = backdrop_style.replace('\n', '') backdrop_style = js_utils.escape_quotes_if_needed(backdrop_style) instructions += 'injectCSS("%s");\n' % hopscotch_css instructions += 'injectStyle("%s");\n' % backdrop_style instructions += 'injectJS("%s");' % hopscotch_js elif tour_type == "introjs": intro_css = constants.IntroJS.MIN_CSS intro_js = constants.IntroJS.MIN_JS instructions += 'injectCSS("%s");\n' % intro_css instructions += 'injectJS("%s");' % intro_js elif tour_type == "shepherd": jquery_js = constants.JQuery.MIN_JS shepherd_js = constants.Shepherd.MIN_JS sh_theme_arrows_css = constants.Shepherd.THEME_ARROWS_CSS sh_theme_arrows_fix_css = constants.Shepherd.THEME_ARR_FIX_CSS sh_theme_default_css = constants.Shepherd.THEME_DEFAULT_CSS sh_theme_dark_css = constants.Shepherd.THEME_DARK_CSS sh_theme_sq_css = constants.Shepherd.THEME_SQ_CSS sh_theme_sq_dark_css = constants.Shepherd.THEME_SQ_DK_CSS tether_js = constants.Tether.MIN_JS spinner_css = constants.Messenger.SPINNER_CSS backdrop_style = style_sheet.sh_backdrop_style backdrop_style = backdrop_style.replace('\n', '') backdrop_style = js_utils.escape_quotes_if_needed(backdrop_style) instructions += 'injectCSS("%s");\n' % spinner_css instructions += 'injectJS("%s");\n' % jquery_js instructions += 'injectJS("%s");' % tether_js instructions += '\n\n//////// Resources - Load 2 ////////\n\n' instructions += 'injectCSS("%s");' % sh_theme_arrows_css instructions += 'injectCSS("%s");' % sh_theme_arrows_fix_css instructions += 'injectCSS("%s");' % sh_theme_default_css instructions += 'injectCSS("%s");' % sh_theme_dark_css instructions += 'injectCSS("%s");' % sh_theme_sq_css instructions += 'injectCSS("%s");\n' % sh_theme_sq_dark_css instructions += 'injectStyle("%s");\n' % backdrop_style instructions += 'injectJS("%s");' % shepherd_js instructions += '\n\n//////// Tour Code ////////\n\n' for tour_step in tour_steps[name]: instructions += tour_step if tour_type == "bootstrap": instructions += ( """]); // Initialize the tour tour.init(); // Start the tour tour.start(); $tour = tour; $tour.restart();\n""") elif tour_type == "hopscotch": instructions += ( """] }; // Start the tour! hopscotch.startTour(tour); $tour = hopscotch;\n""") elif tour_type == "introjs": instructions += ( """] }); intro.setOption("disableInteraction", true); intro.setOption("overlayOpacity", .29); intro.setOption("scrollToElement", true); intro.setOption("keyboardNavigation", true); intro.setOption("exitOnEsc", false); intro.setOption("exitOnOverlayClick", false); intro.setOption("showStepNumbers", false); intro.setOption("showProgress", false); intro.start(); $tour = intro; }; startIntro();\n""") elif tour_type == "shepherd": instructions += ( """ tour.start(); $tour = tour;\n""") else: pass exported_tours_folder = EXPORTED_TOURS_FOLDER if exported_tours_folder.endswith("/"): exported_tours_folder = exported_tours_folder[:-1] if not os.path.exists(exported_tours_folder): try: os.makedirs(exported_tours_folder) except Exception: pass import codecs file_path = exported_tours_folder + "/" + filename out_file = codecs.open(file_path, "w+") out_file.writelines(instructions) out_file.close() print('\n>>> [%s] was saved!\n' % file_path)
def function[export_tour, parameter[tour_steps, name, filename, url]]: constant[ Exports a tour as a JS file. It will include necessary resources as well, such as jQuery. You'll be able to copy the tour directly into the Console of any web browser to play the tour outside of SeleniumBase runs. ] if <ast.UnaryOp object at 0x7da1b1baaa10> begin[:] variable[name] assign[=] constant[default] if compare[name[name] <ast.NotIn object at 0x7da2590d7190> name[tour_steps]] begin[:] <ast.Raise object at 0x7da1b1baa710> if <ast.UnaryOp object at 0x7da1b1baa860> begin[:] <ast.Raise object at 0x7da1b1baa800> if <ast.UnaryOp object at 0x7da1b1baa4a0> begin[:] variable[url] assign[=] constant[data:,] variable[tour_type] assign[=] constant[None] if compare[constant[Bootstrap] in call[call[name[tour_steps]][name[name]]][constant[0]]] begin[:] variable[tour_type] assign[=] constant[bootstrap] variable[instructions] assign[=] binary_operation[constant[//////// Load Tour Start Page (if not there now) //////// if (window.location.href != "%s") { window.location.href="%s"; } //////// Resources //////// function injectCSS(css_link) {var head = document.getElementsByTagName("head")[0];var link = document.createElement("link");link.rel = "stylesheet";link.type = "text/css";link.href = css_link;link.crossorigin = "anonymous";head.appendChild(link);}; function injectJS(js_link) {var head = document.getElementsByTagName("head")[0];var script = document.createElement("script");script.src = js_link;script.defer;script.type="text/javascript";script.crossorigin = "anonymous";script.onload = function() { null };head.appendChild(script);}; function injectStyle(css) {var head = document.getElementsByTagName("head")[0];var style = document.createElement("style");style.type = "text/css";style.appendChild(document.createTextNode(css));head.appendChild(style);}; ] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1ba80d0>, <ast.Name object at 0x7da1b1ba8490>]]] if compare[name[tour_type] equal[==] constant[bootstrap]] begin[:] variable[jquery_js] assign[=] name[constants].JQuery.MIN_JS variable[bootstrap_tour_css] assign[=] name[constants].BootstrapTour.MIN_CSS variable[bootstrap_tour_js] assign[=] name[constants].BootstrapTour.MIN_JS variable[backdrop_style] assign[=] name[style_sheet].bt_backdrop_style variable[backdrop_style] assign[=] call[name[backdrop_style].replace, parameter[constant[ ], constant[]]] variable[backdrop_style] assign[=] call[name[js_utils].escape_quotes_if_needed, parameter[name[backdrop_style]]] <ast.AugAssign object at 0x7da1b1ba8ac0> <ast.AugAssign object at 0x7da1b1ba8c10> <ast.AugAssign object at 0x7da1b1ba8be0> <ast.AugAssign object at 0x7da1b1ba9180> <ast.AugAssign object at 0x7da1b1ba9450> <ast.AugAssign object at 0x7da1b1b63d30> for taget[name[tour_step]] in starred[call[name[tour_steps]][name[name]]] begin[:] <ast.AugAssign object at 0x7da1b1b62ad0> if compare[name[tour_type] equal[==] constant[bootstrap]] begin[:] <ast.AugAssign object at 0x7da1b1b638b0> variable[exported_tours_folder] assign[=] name[EXPORTED_TOURS_FOLDER] if call[name[exported_tours_folder].endswith, parameter[constant[/]]] begin[:] variable[exported_tours_folder] assign[=] call[name[exported_tours_folder]][<ast.Slice object at 0x7da1b1b60130>] if <ast.UnaryOp object at 0x7da1b1b635e0> begin[:] <ast.Try object at 0x7da1b1b63070> import module[codecs] variable[file_path] assign[=] binary_operation[binary_operation[name[exported_tours_folder] + constant[/]] + name[filename]] variable[out_file] assign[=] call[name[codecs].open, parameter[name[file_path], constant[w+]]] call[name[out_file].writelines, parameter[name[instructions]]] call[name[out_file].close, parameter[]] call[name[print], parameter[binary_operation[constant[ >>> [%s] was saved! ] <ast.Mod object at 0x7da2590d6920> name[file_path]]]]
keyword[def] identifier[export_tour] ( identifier[tour_steps] , identifier[name] = keyword[None] , identifier[filename] = literal[string] , identifier[url] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[name] : identifier[name] = literal[string] keyword[if] identifier[name] keyword[not] keyword[in] identifier[tour_steps] : keyword[raise] identifier[Exception] ( literal[string] % identifier[name] ) keyword[if] keyword[not] identifier[filename] . identifier[endswith] ( literal[string] ): keyword[raise] identifier[Exception] ( literal[string] ) keyword[if] keyword[not] identifier[url] : identifier[url] = literal[string] identifier[tour_type] = keyword[None] keyword[if] literal[string] keyword[in] identifier[tour_steps] [ identifier[name] ][ literal[int] ]: identifier[tour_type] = literal[string] keyword[elif] literal[string] keyword[in] identifier[tour_steps] [ identifier[name] ][ literal[int] ]: identifier[tour_type] = literal[string] keyword[elif] literal[string] keyword[in] identifier[tour_steps] [ identifier[name] ][ literal[int] ]: identifier[tour_type] = literal[string] keyword[elif] literal[string] keyword[in] identifier[tour_steps] [ identifier[name] ][ literal[int] ]: identifier[tour_type] = literal[string] keyword[else] : keyword[raise] identifier[Exception] ( literal[string] ) identifier[instructions] =( literal[string] literal[string] literal[string] literal[string] literal[string] literal[string] literal[string] literal[string] literal[string] literal[string] literal[string] literal[string] literal[string] literal[string] literal[string] literal[string] literal[string] literal[string] literal[string] literal[string] literal[string] literal[string] literal[string] literal[string] literal[string] literal[string] literal[string] literal[string] literal[string] literal[string] literal[string] %( identifier[url] , identifier[url] )) keyword[if] identifier[tour_type] == literal[string] : identifier[jquery_js] = identifier[constants] . identifier[JQuery] . identifier[MIN_JS] identifier[bootstrap_tour_css] = identifier[constants] . identifier[BootstrapTour] . identifier[MIN_CSS] identifier[bootstrap_tour_js] = identifier[constants] . identifier[BootstrapTour] . identifier[MIN_JS] identifier[backdrop_style] = identifier[style_sheet] . identifier[bt_backdrop_style] identifier[backdrop_style] = identifier[backdrop_style] . identifier[replace] ( literal[string] , literal[string] ) identifier[backdrop_style] = identifier[js_utils] . identifier[escape_quotes_if_needed] ( identifier[backdrop_style] ) identifier[instructions] += literal[string] % identifier[jquery_js] identifier[instructions] += literal[string] identifier[instructions] += literal[string] % identifier[bootstrap_tour_css] identifier[instructions] += literal[string] % identifier[backdrop_style] identifier[instructions] += literal[string] % identifier[bootstrap_tour_js] keyword[elif] identifier[tour_type] == literal[string] : identifier[hopscotch_css] = identifier[constants] . identifier[Hopscotch] . identifier[MIN_CSS] identifier[hopscotch_js] = identifier[constants] . identifier[Hopscotch] . identifier[MIN_JS] identifier[backdrop_style] = identifier[style_sheet] . identifier[hops_backdrop_style] identifier[backdrop_style] = identifier[backdrop_style] . identifier[replace] ( literal[string] , literal[string] ) identifier[backdrop_style] = identifier[js_utils] . identifier[escape_quotes_if_needed] ( identifier[backdrop_style] ) identifier[instructions] += literal[string] % identifier[hopscotch_css] identifier[instructions] += literal[string] % identifier[backdrop_style] identifier[instructions] += literal[string] % identifier[hopscotch_js] keyword[elif] identifier[tour_type] == literal[string] : identifier[intro_css] = identifier[constants] . identifier[IntroJS] . identifier[MIN_CSS] identifier[intro_js] = identifier[constants] . identifier[IntroJS] . identifier[MIN_JS] identifier[instructions] += literal[string] % identifier[intro_css] identifier[instructions] += literal[string] % identifier[intro_js] keyword[elif] identifier[tour_type] == literal[string] : identifier[jquery_js] = identifier[constants] . identifier[JQuery] . identifier[MIN_JS] identifier[shepherd_js] = identifier[constants] . identifier[Shepherd] . identifier[MIN_JS] identifier[sh_theme_arrows_css] = identifier[constants] . identifier[Shepherd] . identifier[THEME_ARROWS_CSS] identifier[sh_theme_arrows_fix_css] = identifier[constants] . identifier[Shepherd] . identifier[THEME_ARR_FIX_CSS] identifier[sh_theme_default_css] = identifier[constants] . identifier[Shepherd] . identifier[THEME_DEFAULT_CSS] identifier[sh_theme_dark_css] = identifier[constants] . identifier[Shepherd] . identifier[THEME_DARK_CSS] identifier[sh_theme_sq_css] = identifier[constants] . identifier[Shepherd] . identifier[THEME_SQ_CSS] identifier[sh_theme_sq_dark_css] = identifier[constants] . identifier[Shepherd] . identifier[THEME_SQ_DK_CSS] identifier[tether_js] = identifier[constants] . identifier[Tether] . identifier[MIN_JS] identifier[spinner_css] = identifier[constants] . identifier[Messenger] . identifier[SPINNER_CSS] identifier[backdrop_style] = identifier[style_sheet] . identifier[sh_backdrop_style] identifier[backdrop_style] = identifier[backdrop_style] . identifier[replace] ( literal[string] , literal[string] ) identifier[backdrop_style] = identifier[js_utils] . identifier[escape_quotes_if_needed] ( identifier[backdrop_style] ) identifier[instructions] += literal[string] % identifier[spinner_css] identifier[instructions] += literal[string] % identifier[jquery_js] identifier[instructions] += literal[string] % identifier[tether_js] identifier[instructions] += literal[string] identifier[instructions] += literal[string] % identifier[sh_theme_arrows_css] identifier[instructions] += literal[string] % identifier[sh_theme_arrows_fix_css] identifier[instructions] += literal[string] % identifier[sh_theme_default_css] identifier[instructions] += literal[string] % identifier[sh_theme_dark_css] identifier[instructions] += literal[string] % identifier[sh_theme_sq_css] identifier[instructions] += literal[string] % identifier[sh_theme_sq_dark_css] identifier[instructions] += literal[string] % identifier[backdrop_style] identifier[instructions] += literal[string] % identifier[shepherd_js] identifier[instructions] += literal[string] keyword[for] identifier[tour_step] keyword[in] identifier[tour_steps] [ identifier[name] ]: identifier[instructions] += identifier[tour_step] keyword[if] identifier[tour_type] == literal[string] : identifier[instructions] +=( literal[string] ) keyword[elif] identifier[tour_type] == literal[string] : identifier[instructions] +=( literal[string] ) keyword[elif] identifier[tour_type] == literal[string] : identifier[instructions] +=( literal[string] ) keyword[elif] identifier[tour_type] == literal[string] : identifier[instructions] +=( literal[string] ) keyword[else] : keyword[pass] identifier[exported_tours_folder] = identifier[EXPORTED_TOURS_FOLDER] keyword[if] identifier[exported_tours_folder] . identifier[endswith] ( literal[string] ): identifier[exported_tours_folder] = identifier[exported_tours_folder] [:- literal[int] ] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[exported_tours_folder] ): keyword[try] : identifier[os] . identifier[makedirs] ( identifier[exported_tours_folder] ) keyword[except] identifier[Exception] : keyword[pass] keyword[import] identifier[codecs] identifier[file_path] = identifier[exported_tours_folder] + literal[string] + identifier[filename] identifier[out_file] = identifier[codecs] . identifier[open] ( identifier[file_path] , literal[string] ) identifier[out_file] . identifier[writelines] ( identifier[instructions] ) identifier[out_file] . identifier[close] () identifier[print] ( literal[string] % identifier[file_path] )
def export_tour(tour_steps, name=None, filename='my_tour.js', url=None): """ Exports a tour as a JS file. It will include necessary resources as well, such as jQuery. You'll be able to copy the tour directly into the Console of any web browser to play the tour outside of SeleniumBase runs. """ if not name: name = 'default' # depends on [control=['if'], data=[]] if name not in tour_steps: raise Exception('Tour {%s} does not exist!' % name) # depends on [control=['if'], data=['name']] if not filename.endswith('.js'): raise Exception('Tour file must end in ".js"!') # depends on [control=['if'], data=[]] if not url: url = 'data:,' # depends on [control=['if'], data=[]] tour_type = None if 'Bootstrap' in tour_steps[name][0]: tour_type = 'bootstrap' # depends on [control=['if'], data=[]] elif 'Hopscotch' in tour_steps[name][0]: tour_type = 'hopscotch' # depends on [control=['if'], data=[]] elif 'IntroJS' in tour_steps[name][0]: tour_type = 'introjs' # depends on [control=['if'], data=[]] elif 'Shepherd' in tour_steps[name][0]: tour_type = 'shepherd' # depends on [control=['if'], data=[]] else: raise Exception('Unknown tour type!') instructions = '//////// Load Tour Start Page (if not there now) ////////\n\nif (window.location.href != "%s") {\n window.location.href="%s";\n}\n\n//////// Resources ////////\n\nfunction injectCSS(css_link) {var head = document.getElementsByTagName("head")[0];var link = document.createElement("link");link.rel = "stylesheet";link.type = "text/css";link.href = css_link;link.crossorigin = "anonymous";head.appendChild(link);};\nfunction injectJS(js_link) {var head = document.getElementsByTagName("head")[0];var script = document.createElement("script");script.src = js_link;script.defer;script.type="text/javascript";script.crossorigin = "anonymous";script.onload = function() { null };head.appendChild(script);};\nfunction injectStyle(css) {var head = document.getElementsByTagName("head")[0];var style = document.createElement("style");style.type = "text/css";style.appendChild(document.createTextNode(css));head.appendChild(style);};\n' % (url, url) if tour_type == 'bootstrap': jquery_js = constants.JQuery.MIN_JS bootstrap_tour_css = constants.BootstrapTour.MIN_CSS bootstrap_tour_js = constants.BootstrapTour.MIN_JS backdrop_style = style_sheet.bt_backdrop_style backdrop_style = backdrop_style.replace('\n', '') backdrop_style = js_utils.escape_quotes_if_needed(backdrop_style) instructions += 'injectJS("%s");' % jquery_js instructions += '\n\n//////// Resources - Load 2 ////////\n\n' instructions += 'injectCSS("%s");\n' % bootstrap_tour_css instructions += 'injectStyle("%s");\n' % backdrop_style instructions += 'injectJS("%s");' % bootstrap_tour_js # depends on [control=['if'], data=[]] elif tour_type == 'hopscotch': hopscotch_css = constants.Hopscotch.MIN_CSS hopscotch_js = constants.Hopscotch.MIN_JS backdrop_style = style_sheet.hops_backdrop_style backdrop_style = backdrop_style.replace('\n', '') backdrop_style = js_utils.escape_quotes_if_needed(backdrop_style) instructions += 'injectCSS("%s");\n' % hopscotch_css instructions += 'injectStyle("%s");\n' % backdrop_style instructions += 'injectJS("%s");' % hopscotch_js # depends on [control=['if'], data=[]] elif tour_type == 'introjs': intro_css = constants.IntroJS.MIN_CSS intro_js = constants.IntroJS.MIN_JS instructions += 'injectCSS("%s");\n' % intro_css instructions += 'injectJS("%s");' % intro_js # depends on [control=['if'], data=[]] elif tour_type == 'shepherd': jquery_js = constants.JQuery.MIN_JS shepherd_js = constants.Shepherd.MIN_JS sh_theme_arrows_css = constants.Shepherd.THEME_ARROWS_CSS sh_theme_arrows_fix_css = constants.Shepherd.THEME_ARR_FIX_CSS sh_theme_default_css = constants.Shepherd.THEME_DEFAULT_CSS sh_theme_dark_css = constants.Shepherd.THEME_DARK_CSS sh_theme_sq_css = constants.Shepherd.THEME_SQ_CSS sh_theme_sq_dark_css = constants.Shepherd.THEME_SQ_DK_CSS tether_js = constants.Tether.MIN_JS spinner_css = constants.Messenger.SPINNER_CSS backdrop_style = style_sheet.sh_backdrop_style backdrop_style = backdrop_style.replace('\n', '') backdrop_style = js_utils.escape_quotes_if_needed(backdrop_style) instructions += 'injectCSS("%s");\n' % spinner_css instructions += 'injectJS("%s");\n' % jquery_js instructions += 'injectJS("%s");' % tether_js instructions += '\n\n//////// Resources - Load 2 ////////\n\n' instructions += 'injectCSS("%s");' % sh_theme_arrows_css instructions += 'injectCSS("%s");' % sh_theme_arrows_fix_css instructions += 'injectCSS("%s");' % sh_theme_default_css instructions += 'injectCSS("%s");' % sh_theme_dark_css instructions += 'injectCSS("%s");' % sh_theme_sq_css instructions += 'injectCSS("%s");\n' % sh_theme_sq_dark_css instructions += 'injectStyle("%s");\n' % backdrop_style instructions += 'injectJS("%s");' % shepherd_js # depends on [control=['if'], data=[]] instructions += '\n\n//////// Tour Code ////////\n\n' for tour_step in tour_steps[name]: instructions += tour_step # depends on [control=['for'], data=['tour_step']] if tour_type == 'bootstrap': instructions += ']);\n // Initialize the tour\n tour.init();\n // Start the tour\n tour.start();\n $tour = tour;\n $tour.restart();\n' # depends on [control=['if'], data=[]] elif tour_type == 'hopscotch': instructions += ']\n };\n // Start the tour!\n hopscotch.startTour(tour);\n $tour = hopscotch;\n' # depends on [control=['if'], data=[]] elif tour_type == 'introjs': instructions += ']\n });\n intro.setOption("disableInteraction", true);\n intro.setOption("overlayOpacity", .29);\n intro.setOption("scrollToElement", true);\n intro.setOption("keyboardNavigation", true);\n intro.setOption("exitOnEsc", false);\n intro.setOption("exitOnOverlayClick", false);\n intro.setOption("showStepNumbers", false);\n intro.setOption("showProgress", false);\n intro.start();\n $tour = intro;\n };\n startIntro();\n' # depends on [control=['if'], data=[]] elif tour_type == 'shepherd': instructions += '\n tour.start();\n $tour = tour;\n' # depends on [control=['if'], data=[]] else: pass exported_tours_folder = EXPORTED_TOURS_FOLDER if exported_tours_folder.endswith('/'): exported_tours_folder = exported_tours_folder[:-1] # depends on [control=['if'], data=[]] if not os.path.exists(exported_tours_folder): try: os.makedirs(exported_tours_folder) # depends on [control=['try'], data=[]] except Exception: pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] import codecs file_path = exported_tours_folder + '/' + filename out_file = codecs.open(file_path, 'w+') out_file.writelines(instructions) out_file.close() print('\n>>> [%s] was saved!\n' % file_path)
def _update_cache(self): """ INPUT: hyperparams: dictionary OUTPUT: dictionary with the fields K: kernel Kinv: inverse of the kernel L: chol(K) alpha: solve(K,y) W: D*Kinv * alpha*alpha^T """ cov_params_have_changed = self.covar.params_have_changed if cov_params_have_changed or self.Y_has_changed: K = self.covar.K() L = LA.cholesky(K).T# lower triangular Kinv = LA.cho_solve((L,True),SP.eye(L.shape[0])) alpha = LA.cho_solve((L,True),self.Y) W = self.t*Kinv - SP.dot(alpha,alpha.T) self._covar_cache = {} self._covar_cache['K'] = K self._covar_cache['Kinv'] = Kinv self._covar_cache['L'] = L self._covar_cache['alpha'] = alpha self._covar_cache['W'] = W return self._covar_cache
def function[_update_cache, parameter[self]]: constant[ INPUT: hyperparams: dictionary OUTPUT: dictionary with the fields K: kernel Kinv: inverse of the kernel L: chol(K) alpha: solve(K,y) W: D*Kinv * alpha*alpha^T ] variable[cov_params_have_changed] assign[=] name[self].covar.params_have_changed if <ast.BoolOp object at 0x7da18c4cc610> begin[:] variable[K] assign[=] call[name[self].covar.K, parameter[]] variable[L] assign[=] call[name[LA].cholesky, parameter[name[K]]].T variable[Kinv] assign[=] call[name[LA].cho_solve, parameter[tuple[[<ast.Name object at 0x7da18c4cf430>, <ast.Constant object at 0x7da18c4cdcc0>]], call[name[SP].eye, parameter[call[name[L].shape][constant[0]]]]]] variable[alpha] assign[=] call[name[LA].cho_solve, parameter[tuple[[<ast.Name object at 0x7da18c4cc970>, <ast.Constant object at 0x7da18c4cfee0>]], name[self].Y]] variable[W] assign[=] binary_operation[binary_operation[name[self].t * name[Kinv]] - call[name[SP].dot, parameter[name[alpha], name[alpha].T]]] name[self]._covar_cache assign[=] dictionary[[], []] call[name[self]._covar_cache][constant[K]] assign[=] name[K] call[name[self]._covar_cache][constant[Kinv]] assign[=] name[Kinv] call[name[self]._covar_cache][constant[L]] assign[=] name[L] call[name[self]._covar_cache][constant[alpha]] assign[=] name[alpha] call[name[self]._covar_cache][constant[W]] assign[=] name[W] return[name[self]._covar_cache]
keyword[def] identifier[_update_cache] ( identifier[self] ): literal[string] identifier[cov_params_have_changed] = identifier[self] . identifier[covar] . identifier[params_have_changed] keyword[if] identifier[cov_params_have_changed] keyword[or] identifier[self] . identifier[Y_has_changed] : identifier[K] = identifier[self] . identifier[covar] . identifier[K] () identifier[L] = identifier[LA] . identifier[cholesky] ( identifier[K] ). identifier[T] identifier[Kinv] = identifier[LA] . identifier[cho_solve] (( identifier[L] , keyword[True] ), identifier[SP] . identifier[eye] ( identifier[L] . identifier[shape] [ literal[int] ])) identifier[alpha] = identifier[LA] . identifier[cho_solve] (( identifier[L] , keyword[True] ), identifier[self] . identifier[Y] ) identifier[W] = identifier[self] . identifier[t] * identifier[Kinv] - identifier[SP] . identifier[dot] ( identifier[alpha] , identifier[alpha] . identifier[T] ) identifier[self] . identifier[_covar_cache] ={} identifier[self] . identifier[_covar_cache] [ literal[string] ]= identifier[K] identifier[self] . identifier[_covar_cache] [ literal[string] ]= identifier[Kinv] identifier[self] . identifier[_covar_cache] [ literal[string] ]= identifier[L] identifier[self] . identifier[_covar_cache] [ literal[string] ]= identifier[alpha] identifier[self] . identifier[_covar_cache] [ literal[string] ]= identifier[W] keyword[return] identifier[self] . identifier[_covar_cache]
def _update_cache(self): """ INPUT: hyperparams: dictionary OUTPUT: dictionary with the fields K: kernel Kinv: inverse of the kernel L: chol(K) alpha: solve(K,y) W: D*Kinv * alpha*alpha^T """ cov_params_have_changed = self.covar.params_have_changed if cov_params_have_changed or self.Y_has_changed: K = self.covar.K() L = LA.cholesky(K).T # lower triangular Kinv = LA.cho_solve((L, True), SP.eye(L.shape[0])) alpha = LA.cho_solve((L, True), self.Y) W = self.t * Kinv - SP.dot(alpha, alpha.T) self._covar_cache = {} self._covar_cache['K'] = K self._covar_cache['Kinv'] = Kinv self._covar_cache['L'] = L self._covar_cache['alpha'] = alpha self._covar_cache['W'] = W # depends on [control=['if'], data=[]] return self._covar_cache
def from_structures(structures, transformations=None, extend_collection=0): """ Alternative constructor from structures rather than TransformedStructures. Args: structures: Sequence of structures transformations: New transformations to be applied to all structures extend_collection: Whether to use more than one output structure from one-to-many transformations. extend_collection can be a number, which determines the maximum branching for each transformation. Returns: StandardTransmuter """ tstruct = [TransformedStructure(s, []) for s in structures] return StandardTransmuter(tstruct, transformations, extend_collection)
def function[from_structures, parameter[structures, transformations, extend_collection]]: constant[ Alternative constructor from structures rather than TransformedStructures. Args: structures: Sequence of structures transformations: New transformations to be applied to all structures extend_collection: Whether to use more than one output structure from one-to-many transformations. extend_collection can be a number, which determines the maximum branching for each transformation. Returns: StandardTransmuter ] variable[tstruct] assign[=] <ast.ListComp object at 0x7da18f810c10> return[call[name[StandardTransmuter], parameter[name[tstruct], name[transformations], name[extend_collection]]]]
keyword[def] identifier[from_structures] ( identifier[structures] , identifier[transformations] = keyword[None] , identifier[extend_collection] = literal[int] ): literal[string] identifier[tstruct] =[ identifier[TransformedStructure] ( identifier[s] ,[]) keyword[for] identifier[s] keyword[in] identifier[structures] ] keyword[return] identifier[StandardTransmuter] ( identifier[tstruct] , identifier[transformations] , identifier[extend_collection] )
def from_structures(structures, transformations=None, extend_collection=0): """ Alternative constructor from structures rather than TransformedStructures. Args: structures: Sequence of structures transformations: New transformations to be applied to all structures extend_collection: Whether to use more than one output structure from one-to-many transformations. extend_collection can be a number, which determines the maximum branching for each transformation. Returns: StandardTransmuter """ tstruct = [TransformedStructure(s, []) for s in structures] return StandardTransmuter(tstruct, transformations, extend_collection)
def calibrate_dom( dom_id, data, detector, livetime=None, fit_ang_dist=False, scale_mc_to_data=True, ad_fit_shape='pexp', fit_background=True, ctmin=-1. ): """Calibrate intra DOM PMT time offsets, efficiencies and sigmas Parameters ---------- dom_id: DOM ID data: dict of coincidences or root or hdf5 file detector: instance of detector class livetime: data-taking duration [s] fixed_ang_dist: fixing angular distribution e.g. for data mc comparison auto_scale: auto scales the fixed angular distribution to the data Returns ------- return_data: dictionary with fit results """ if isinstance(data, str): filename = data loaders = { '.h5': load_k40_coincidences_from_hdf5, '.root': load_k40_coincidences_from_rootfile } try: loader = loaders[os.path.splitext(filename)[1]] except KeyError: log.critical('File format not supported.') raise IOError else: data, livetime = loader(filename, dom_id) combs = np.array(list(combinations(range(31), 2))) angles = calculate_angles(detector, combs) cos_angles = np.cos(angles) angles = angles[cos_angles >= ctmin] data = data[cos_angles >= ctmin] combs = combs[cos_angles >= ctmin] try: fit_res = fit_delta_ts(data, livetime, fit_background=fit_background) rates, means, sigmas, popts, pcovs = fit_res except: return 0 rate_errors = np.array([np.diag(pc)[2] for pc in pcovs]) # mean_errors = np.array([np.diag(pc)[0] for pc in pcovs]) scale_factor = None if fit_ang_dist: fit_res = fit_angular_distribution( angles, rates, rate_errors, shape=ad_fit_shape ) fitted_rates, exp_popts, exp_pcov = fit_res else: mc_fitted_rates = exponential_polinomial(np.cos(angles), *MC_ANG_DIST) if scale_mc_to_data: scale_factor = np.mean(rates[angles < 1.5]) / \ np.mean(mc_fitted_rates[angles < 1.5]) else: scale_factor = 1. fitted_rates = mc_fitted_rates * scale_factor exp_popts = [] exp_pcov = [] print('Using angular distribution from Monte Carlo') # t0_weights = np.array([0. if a>1. else 1. for a in angles]) if not fit_background: minimize_weights = calculate_weights(fitted_rates, data) else: minimize_weights = fitted_rates opt_t0s = minimize_t0s(means, minimize_weights, combs) opt_sigmas = minimize_sigmas(sigmas, minimize_weights, combs) opt_qes = minimize_qes(fitted_rates, rates, minimize_weights, combs) corrected_means = correct_means(means, opt_t0s.x, combs) corrected_rates = correct_rates(rates, opt_qes.x, combs) rms_means, rms_corrected_means = calculate_rms_means( means, corrected_means ) rms_rates, rms_corrected_rates = calculate_rms_rates( rates, fitted_rates, corrected_rates ) cos_angles = np.cos(angles) return_data = { 'opt_t0s': opt_t0s, 'opt_qes': opt_qes, 'data': data, 'means': means, 'rates': rates, 'fitted_rates': fitted_rates, 'angles': angles, 'corrected_means': corrected_means, 'corrected_rates': corrected_rates, 'rms_means': rms_means, 'rms_corrected_means': rms_corrected_means, 'rms_rates': rms_rates, 'rms_corrected_rates': rms_corrected_rates, 'gaussian_popts': popts, 'livetime': livetime, 'exp_popts': exp_popts, 'exp_pcov': exp_pcov, 'scale_factor': scale_factor, 'opt_sigmas': opt_sigmas, 'sigmas': sigmas, 'combs': combs } return return_data
def function[calibrate_dom, parameter[dom_id, data, detector, livetime, fit_ang_dist, scale_mc_to_data, ad_fit_shape, fit_background, ctmin]]: constant[Calibrate intra DOM PMT time offsets, efficiencies and sigmas Parameters ---------- dom_id: DOM ID data: dict of coincidences or root or hdf5 file detector: instance of detector class livetime: data-taking duration [s] fixed_ang_dist: fixing angular distribution e.g. for data mc comparison auto_scale: auto scales the fixed angular distribution to the data Returns ------- return_data: dictionary with fit results ] if call[name[isinstance], parameter[name[data], name[str]]] begin[:] variable[filename] assign[=] name[data] variable[loaders] assign[=] dictionary[[<ast.Constant object at 0x7da207f98310>, <ast.Constant object at 0x7da207f9a3e0>], [<ast.Name object at 0x7da207f98c70>, <ast.Name object at 0x7da207f98d90>]] <ast.Try object at 0x7da207f9a980> variable[combs] assign[=] call[name[np].array, parameter[call[name[list], parameter[call[name[combinations], parameter[call[name[range], parameter[constant[31]]], constant[2]]]]]]] variable[angles] assign[=] call[name[calculate_angles], parameter[name[detector], name[combs]]] variable[cos_angles] assign[=] call[name[np].cos, parameter[name[angles]]] variable[angles] assign[=] call[name[angles]][compare[name[cos_angles] greater_or_equal[>=] name[ctmin]]] variable[data] assign[=] call[name[data]][compare[name[cos_angles] greater_or_equal[>=] name[ctmin]]] variable[combs] assign[=] call[name[combs]][compare[name[cos_angles] greater_or_equal[>=] name[ctmin]]] <ast.Try object at 0x7da207f994b0> variable[rate_errors] assign[=] call[name[np].array, parameter[<ast.ListComp object at 0x7da207f99bd0>]] variable[scale_factor] assign[=] constant[None] if name[fit_ang_dist] begin[:] variable[fit_res] assign[=] call[name[fit_angular_distribution], parameter[name[angles], name[rates], name[rate_errors]]] <ast.Tuple object at 0x7da207f99270> assign[=] name[fit_res] if <ast.UnaryOp object at 0x7da207f02110> begin[:] variable[minimize_weights] assign[=] call[name[calculate_weights], parameter[name[fitted_rates], name[data]]] variable[opt_t0s] assign[=] call[name[minimize_t0s], parameter[name[means], name[minimize_weights], name[combs]]] variable[opt_sigmas] assign[=] call[name[minimize_sigmas], parameter[name[sigmas], name[minimize_weights], name[combs]]] variable[opt_qes] assign[=] call[name[minimize_qes], parameter[name[fitted_rates], name[rates], name[minimize_weights], name[combs]]] variable[corrected_means] assign[=] call[name[correct_means], parameter[name[means], name[opt_t0s].x, name[combs]]] variable[corrected_rates] assign[=] call[name[correct_rates], parameter[name[rates], name[opt_qes].x, name[combs]]] <ast.Tuple object at 0x7da207f01e70> assign[=] call[name[calculate_rms_means], parameter[name[means], name[corrected_means]]] <ast.Tuple object at 0x7da207f02380> assign[=] call[name[calculate_rms_rates], parameter[name[rates], name[fitted_rates], name[corrected_rates]]] variable[cos_angles] assign[=] call[name[np].cos, parameter[name[angles]]] variable[return_data] assign[=] dictionary[[<ast.Constant object at 0x7da207f00ca0>, <ast.Constant object at 0x7da207f036d0>, <ast.Constant object at 0x7da207f00d60>, <ast.Constant object at 0x7da207f00160>, <ast.Constant object at 0x7da207f019f0>, <ast.Constant object at 0x7da207f027a0>, <ast.Constant object at 0x7da207f03e50>, <ast.Constant object at 0x7da207f00490>, <ast.Constant object at 0x7da207f038b0>, <ast.Constant object at 0x7da207f03af0>, <ast.Constant object at 0x7da207f00d90>, <ast.Constant object at 0x7da207f037c0>, <ast.Constant object at 0x7da207f03970>, <ast.Constant object at 0x7da207f01fc0>, <ast.Constant object at 0x7da207f039d0>, <ast.Constant object at 0x7da207f031f0>, <ast.Constant object at 0x7da207f01540>, <ast.Constant object at 0x7da207f017e0>, <ast.Constant object at 0x7da207f01660>, <ast.Constant object at 0x7da207f018d0>, <ast.Constant object at 0x7da207f03460>], [<ast.Name object at 0x7da207f012a0>, <ast.Name object at 0x7da207f00eb0>, <ast.Name object at 0x7da207f01f30>, <ast.Name object at 0x7da207f02500>, <ast.Name object at 0x7da207f03130>, <ast.Name object at 0x7da207f010f0>, <ast.Name object at 0x7da207f036a0>, <ast.Name object at 0x7da207f03bb0>, <ast.Name object at 0x7da207f03340>, <ast.Name object at 0x7da207f01de0>, <ast.Name object at 0x7da207f038e0>, <ast.Name object at 0x7da207f01210>, <ast.Name object at 0x7da207f03b80>, <ast.Name object at 0x7da207f02d10>, <ast.Name object at 0x7da207f001c0>, <ast.Name object at 0x7da207f03d90>, <ast.Name object at 0x7da207f00640>, <ast.Name object at 0x7da207f00e20>, <ast.Name object at 0x7da207f00c10>, <ast.Name object at 0x7da207f03640>, <ast.Name object at 0x7da207f00d00>]] return[name[return_data]]
keyword[def] identifier[calibrate_dom] ( identifier[dom_id] , identifier[data] , identifier[detector] , identifier[livetime] = keyword[None] , identifier[fit_ang_dist] = keyword[False] , identifier[scale_mc_to_data] = keyword[True] , identifier[ad_fit_shape] = literal[string] , identifier[fit_background] = keyword[True] , identifier[ctmin] =- literal[int] ): literal[string] keyword[if] identifier[isinstance] ( identifier[data] , identifier[str] ): identifier[filename] = identifier[data] identifier[loaders] ={ literal[string] : identifier[load_k40_coincidences_from_hdf5] , literal[string] : identifier[load_k40_coincidences_from_rootfile] } keyword[try] : identifier[loader] = identifier[loaders] [ identifier[os] . identifier[path] . identifier[splitext] ( identifier[filename] )[ literal[int] ]] keyword[except] identifier[KeyError] : identifier[log] . identifier[critical] ( literal[string] ) keyword[raise] identifier[IOError] keyword[else] : identifier[data] , identifier[livetime] = identifier[loader] ( identifier[filename] , identifier[dom_id] ) identifier[combs] = identifier[np] . identifier[array] ( identifier[list] ( identifier[combinations] ( identifier[range] ( literal[int] ), literal[int] ))) identifier[angles] = identifier[calculate_angles] ( identifier[detector] , identifier[combs] ) identifier[cos_angles] = identifier[np] . identifier[cos] ( identifier[angles] ) identifier[angles] = identifier[angles] [ identifier[cos_angles] >= identifier[ctmin] ] identifier[data] = identifier[data] [ identifier[cos_angles] >= identifier[ctmin] ] identifier[combs] = identifier[combs] [ identifier[cos_angles] >= identifier[ctmin] ] keyword[try] : identifier[fit_res] = identifier[fit_delta_ts] ( identifier[data] , identifier[livetime] , identifier[fit_background] = identifier[fit_background] ) identifier[rates] , identifier[means] , identifier[sigmas] , identifier[popts] , identifier[pcovs] = identifier[fit_res] keyword[except] : keyword[return] literal[int] identifier[rate_errors] = identifier[np] . identifier[array] ([ identifier[np] . identifier[diag] ( identifier[pc] )[ literal[int] ] keyword[for] identifier[pc] keyword[in] identifier[pcovs] ]) identifier[scale_factor] = keyword[None] keyword[if] identifier[fit_ang_dist] : identifier[fit_res] = identifier[fit_angular_distribution] ( identifier[angles] , identifier[rates] , identifier[rate_errors] , identifier[shape] = identifier[ad_fit_shape] ) identifier[fitted_rates] , identifier[exp_popts] , identifier[exp_pcov] = identifier[fit_res] keyword[else] : identifier[mc_fitted_rates] = identifier[exponential_polinomial] ( identifier[np] . identifier[cos] ( identifier[angles] ),* identifier[MC_ANG_DIST] ) keyword[if] identifier[scale_mc_to_data] : identifier[scale_factor] = identifier[np] . identifier[mean] ( identifier[rates] [ identifier[angles] < literal[int] ])/ identifier[np] . identifier[mean] ( identifier[mc_fitted_rates] [ identifier[angles] < literal[int] ]) keyword[else] : identifier[scale_factor] = literal[int] identifier[fitted_rates] = identifier[mc_fitted_rates] * identifier[scale_factor] identifier[exp_popts] =[] identifier[exp_pcov] =[] identifier[print] ( literal[string] ) keyword[if] keyword[not] identifier[fit_background] : identifier[minimize_weights] = identifier[calculate_weights] ( identifier[fitted_rates] , identifier[data] ) keyword[else] : identifier[minimize_weights] = identifier[fitted_rates] identifier[opt_t0s] = identifier[minimize_t0s] ( identifier[means] , identifier[minimize_weights] , identifier[combs] ) identifier[opt_sigmas] = identifier[minimize_sigmas] ( identifier[sigmas] , identifier[minimize_weights] , identifier[combs] ) identifier[opt_qes] = identifier[minimize_qes] ( identifier[fitted_rates] , identifier[rates] , identifier[minimize_weights] , identifier[combs] ) identifier[corrected_means] = identifier[correct_means] ( identifier[means] , identifier[opt_t0s] . identifier[x] , identifier[combs] ) identifier[corrected_rates] = identifier[correct_rates] ( identifier[rates] , identifier[opt_qes] . identifier[x] , identifier[combs] ) identifier[rms_means] , identifier[rms_corrected_means] = identifier[calculate_rms_means] ( identifier[means] , identifier[corrected_means] ) identifier[rms_rates] , identifier[rms_corrected_rates] = identifier[calculate_rms_rates] ( identifier[rates] , identifier[fitted_rates] , identifier[corrected_rates] ) identifier[cos_angles] = identifier[np] . identifier[cos] ( identifier[angles] ) identifier[return_data] ={ literal[string] : identifier[opt_t0s] , literal[string] : identifier[opt_qes] , literal[string] : identifier[data] , literal[string] : identifier[means] , literal[string] : identifier[rates] , literal[string] : identifier[fitted_rates] , literal[string] : identifier[angles] , literal[string] : identifier[corrected_means] , literal[string] : identifier[corrected_rates] , literal[string] : identifier[rms_means] , literal[string] : identifier[rms_corrected_means] , literal[string] : identifier[rms_rates] , literal[string] : identifier[rms_corrected_rates] , literal[string] : identifier[popts] , literal[string] : identifier[livetime] , literal[string] : identifier[exp_popts] , literal[string] : identifier[exp_pcov] , literal[string] : identifier[scale_factor] , literal[string] : identifier[opt_sigmas] , literal[string] : identifier[sigmas] , literal[string] : identifier[combs] } keyword[return] identifier[return_data]
def calibrate_dom(dom_id, data, detector, livetime=None, fit_ang_dist=False, scale_mc_to_data=True, ad_fit_shape='pexp', fit_background=True, ctmin=-1.0): """Calibrate intra DOM PMT time offsets, efficiencies and sigmas Parameters ---------- dom_id: DOM ID data: dict of coincidences or root or hdf5 file detector: instance of detector class livetime: data-taking duration [s] fixed_ang_dist: fixing angular distribution e.g. for data mc comparison auto_scale: auto scales the fixed angular distribution to the data Returns ------- return_data: dictionary with fit results """ if isinstance(data, str): filename = data loaders = {'.h5': load_k40_coincidences_from_hdf5, '.root': load_k40_coincidences_from_rootfile} try: loader = loaders[os.path.splitext(filename)[1]] # depends on [control=['try'], data=[]] except KeyError: log.critical('File format not supported.') raise IOError # depends on [control=['except'], data=[]] else: (data, livetime) = loader(filename, dom_id) # depends on [control=['if'], data=[]] combs = np.array(list(combinations(range(31), 2))) angles = calculate_angles(detector, combs) cos_angles = np.cos(angles) angles = angles[cos_angles >= ctmin] data = data[cos_angles >= ctmin] combs = combs[cos_angles >= ctmin] try: fit_res = fit_delta_ts(data, livetime, fit_background=fit_background) (rates, means, sigmas, popts, pcovs) = fit_res # depends on [control=['try'], data=[]] except: return 0 # depends on [control=['except'], data=[]] rate_errors = np.array([np.diag(pc)[2] for pc in pcovs]) # mean_errors = np.array([np.diag(pc)[0] for pc in pcovs]) scale_factor = None if fit_ang_dist: fit_res = fit_angular_distribution(angles, rates, rate_errors, shape=ad_fit_shape) (fitted_rates, exp_popts, exp_pcov) = fit_res # depends on [control=['if'], data=[]] else: mc_fitted_rates = exponential_polinomial(np.cos(angles), *MC_ANG_DIST) if scale_mc_to_data: scale_factor = np.mean(rates[angles < 1.5]) / np.mean(mc_fitted_rates[angles < 1.5]) # depends on [control=['if'], data=[]] else: scale_factor = 1.0 fitted_rates = mc_fitted_rates * scale_factor exp_popts = [] exp_pcov = [] print('Using angular distribution from Monte Carlo') # t0_weights = np.array([0. if a>1. else 1. for a in angles]) if not fit_background: minimize_weights = calculate_weights(fitted_rates, data) # depends on [control=['if'], data=[]] else: minimize_weights = fitted_rates opt_t0s = minimize_t0s(means, minimize_weights, combs) opt_sigmas = minimize_sigmas(sigmas, minimize_weights, combs) opt_qes = minimize_qes(fitted_rates, rates, minimize_weights, combs) corrected_means = correct_means(means, opt_t0s.x, combs) corrected_rates = correct_rates(rates, opt_qes.x, combs) (rms_means, rms_corrected_means) = calculate_rms_means(means, corrected_means) (rms_rates, rms_corrected_rates) = calculate_rms_rates(rates, fitted_rates, corrected_rates) cos_angles = np.cos(angles) return_data = {'opt_t0s': opt_t0s, 'opt_qes': opt_qes, 'data': data, 'means': means, 'rates': rates, 'fitted_rates': fitted_rates, 'angles': angles, 'corrected_means': corrected_means, 'corrected_rates': corrected_rates, 'rms_means': rms_means, 'rms_corrected_means': rms_corrected_means, 'rms_rates': rms_rates, 'rms_corrected_rates': rms_corrected_rates, 'gaussian_popts': popts, 'livetime': livetime, 'exp_popts': exp_popts, 'exp_pcov': exp_pcov, 'scale_factor': scale_factor, 'opt_sigmas': opt_sigmas, 'sigmas': sigmas, 'combs': combs} return return_data
def percent_encode_plus(text, encode_set=QUERY_ENCODE_SET, encoding='utf-8'): '''Percent encode text for query strings. Unlike Python's ``quote_plus``, this function accepts a blacklist instead of a whitelist of safe characters. ''' if ' ' not in text: return percent_encode(text, encode_set, encoding) else: result = percent_encode(text, encode_set, encoding) return result.replace(' ', '+')
def function[percent_encode_plus, parameter[text, encode_set, encoding]]: constant[Percent encode text for query strings. Unlike Python's ``quote_plus``, this function accepts a blacklist instead of a whitelist of safe characters. ] if compare[constant[ ] <ast.NotIn object at 0x7da2590d7190> name[text]] begin[:] return[call[name[percent_encode], parameter[name[text], name[encode_set], name[encoding]]]]
keyword[def] identifier[percent_encode_plus] ( identifier[text] , identifier[encode_set] = identifier[QUERY_ENCODE_SET] , identifier[encoding] = literal[string] ): literal[string] keyword[if] literal[string] keyword[not] keyword[in] identifier[text] : keyword[return] identifier[percent_encode] ( identifier[text] , identifier[encode_set] , identifier[encoding] ) keyword[else] : identifier[result] = identifier[percent_encode] ( identifier[text] , identifier[encode_set] , identifier[encoding] ) keyword[return] identifier[result] . identifier[replace] ( literal[string] , literal[string] )
def percent_encode_plus(text, encode_set=QUERY_ENCODE_SET, encoding='utf-8'): """Percent encode text for query strings. Unlike Python's ``quote_plus``, this function accepts a blacklist instead of a whitelist of safe characters. """ if ' ' not in text: return percent_encode(text, encode_set, encoding) # depends on [control=['if'], data=['text']] else: result = percent_encode(text, encode_set, encoding) return result.replace(' ', '+')
def agent_version(self): """Get the version of the Juju machine agent. May return None if the agent is not yet available. """ version = self.safe_data['agent-status']['version'] if version: return client.Number.from_json(version) else: return None
def function[agent_version, parameter[self]]: constant[Get the version of the Juju machine agent. May return None if the agent is not yet available. ] variable[version] assign[=] call[call[name[self].safe_data][constant[agent-status]]][constant[version]] if name[version] begin[:] return[call[name[client].Number.from_json, parameter[name[version]]]]
keyword[def] identifier[agent_version] ( identifier[self] ): literal[string] identifier[version] = identifier[self] . identifier[safe_data] [ literal[string] ][ literal[string] ] keyword[if] identifier[version] : keyword[return] identifier[client] . identifier[Number] . identifier[from_json] ( identifier[version] ) keyword[else] : keyword[return] keyword[None]
def agent_version(self): """Get the version of the Juju machine agent. May return None if the agent is not yet available. """ version = self.safe_data['agent-status']['version'] if version: return client.Number.from_json(version) # depends on [control=['if'], data=[]] else: return None
def print_action(self, test_succeed, action): ''' Print the detailed info of failed or always print tests. ''' #self.info_print(">>> {0}",action.keys()) if not test_succeed or action['info']['always_show_run_output']: output = action['output'].strip() if output != "": p = self.fail_print if action['result'] == 'fail' else self.p_print self.info_print("") self.info_print("({0}) {1}",action['info']['name'],action['info']['path']) p("") p("{0}",action['command'].strip()) p("") for line in output.splitlines(): p("{0}",line.encode('utf-8'))
def function[print_action, parameter[self, test_succeed, action]]: constant[ Print the detailed info of failed or always print tests. ] if <ast.BoolOp object at 0x7da1b2058f40> begin[:] variable[output] assign[=] call[call[name[action]][constant[output]].strip, parameter[]] if compare[name[output] not_equal[!=] constant[]] begin[:] variable[p] assign[=] <ast.IfExp object at 0x7da1b2064b20> call[name[self].info_print, parameter[constant[]]] call[name[self].info_print, parameter[constant[({0}) {1}], call[call[name[action]][constant[info]]][constant[name]], call[call[name[action]][constant[info]]][constant[path]]]] call[name[p], parameter[constant[]]] call[name[p], parameter[constant[{0}], call[call[name[action]][constant[command]].strip, parameter[]]]] call[name[p], parameter[constant[]]] for taget[name[line]] in starred[call[name[output].splitlines, parameter[]]] begin[:] call[name[p], parameter[constant[{0}], call[name[line].encode, parameter[constant[utf-8]]]]]
keyword[def] identifier[print_action] ( identifier[self] , identifier[test_succeed] , identifier[action] ): literal[string] keyword[if] keyword[not] identifier[test_succeed] keyword[or] identifier[action] [ literal[string] ][ literal[string] ]: identifier[output] = identifier[action] [ literal[string] ]. identifier[strip] () keyword[if] identifier[output] != literal[string] : identifier[p] = identifier[self] . identifier[fail_print] keyword[if] identifier[action] [ literal[string] ]== literal[string] keyword[else] identifier[self] . identifier[p_print] identifier[self] . identifier[info_print] ( literal[string] ) identifier[self] . identifier[info_print] ( literal[string] , identifier[action] [ literal[string] ][ literal[string] ], identifier[action] [ literal[string] ][ literal[string] ]) identifier[p] ( literal[string] ) identifier[p] ( literal[string] , identifier[action] [ literal[string] ]. identifier[strip] ()) identifier[p] ( literal[string] ) keyword[for] identifier[line] keyword[in] identifier[output] . identifier[splitlines] (): identifier[p] ( literal[string] , identifier[line] . identifier[encode] ( literal[string] ))
def print_action(self, test_succeed, action): """ Print the detailed info of failed or always print tests. """ #self.info_print(">>> {0}",action.keys()) if not test_succeed or action['info']['always_show_run_output']: output = action['output'].strip() if output != '': p = self.fail_print if action['result'] == 'fail' else self.p_print self.info_print('') self.info_print('({0}) {1}', action['info']['name'], action['info']['path']) p('') p('{0}', action['command'].strip()) p('') for line in output.splitlines(): p('{0}', line.encode('utf-8')) # depends on [control=['for'], data=['line']] # depends on [control=['if'], data=['output']] # depends on [control=['if'], data=[]]
def _get_memmap(self): """Get the memory map for the SEVIRI data""" with open(self.filename) as fp: data_dtype = self._get_data_dtype() hdr_size = native_header.itemsize return np.memmap(fp, dtype=data_dtype, shape=(self.mda['number_of_lines'],), offset=hdr_size, mode="r")
def function[_get_memmap, parameter[self]]: constant[Get the memory map for the SEVIRI data] with call[name[open], parameter[name[self].filename]] begin[:] variable[data_dtype] assign[=] call[name[self]._get_data_dtype, parameter[]] variable[hdr_size] assign[=] name[native_header].itemsize return[call[name[np].memmap, parameter[name[fp]]]]
keyword[def] identifier[_get_memmap] ( identifier[self] ): literal[string] keyword[with] identifier[open] ( identifier[self] . identifier[filename] ) keyword[as] identifier[fp] : identifier[data_dtype] = identifier[self] . identifier[_get_data_dtype] () identifier[hdr_size] = identifier[native_header] . identifier[itemsize] keyword[return] identifier[np] . identifier[memmap] ( identifier[fp] , identifier[dtype] = identifier[data_dtype] , identifier[shape] =( identifier[self] . identifier[mda] [ literal[string] ],), identifier[offset] = identifier[hdr_size] , identifier[mode] = literal[string] )
def _get_memmap(self): """Get the memory map for the SEVIRI data""" with open(self.filename) as fp: data_dtype = self._get_data_dtype() hdr_size = native_header.itemsize return np.memmap(fp, dtype=data_dtype, shape=(self.mda['number_of_lines'],), offset=hdr_size, mode='r') # depends on [control=['with'], data=['fp']]
def check_attr(node, n): """ Check if ATTR has to be normalized after this instruction has been translated to intermediate code. """ if len(node.children) > n: return node.children[n]
def function[check_attr, parameter[node, n]]: constant[ Check if ATTR has to be normalized after this instruction has been translated to intermediate code. ] if compare[call[name[len], parameter[name[node].children]] greater[>] name[n]] begin[:] return[call[name[node].children][name[n]]]
keyword[def] identifier[check_attr] ( identifier[node] , identifier[n] ): literal[string] keyword[if] identifier[len] ( identifier[node] . identifier[children] )> identifier[n] : keyword[return] identifier[node] . identifier[children] [ identifier[n] ]
def check_attr(node, n): """ Check if ATTR has to be normalized after this instruction has been translated to intermediate code. """ if len(node.children) > n: return node.children[n] # depends on [control=['if'], data=['n']]
def C_wedge_meter_Miller(D, H): r'''Calculates the coefficient of discharge of an wedge flow meter used for measuring flow rate of fluid, based on the geometry of the differential pressure flow meter. For half-inch lines: .. math:: C = 0.7883 + 0.107(1 - \beta^2) For 1 to 1.5 inch lines: .. math:: C = 0.6143 + 0.718(1 - \beta^2) For 1.5 to 24 inch lines: .. math:: C = 0.5433 + 0.2453(1 - \beta^2) Parameters ---------- D : float Upstream internal pipe diameter, [m] H : float Portion of the diameter of the clear segment of the pipe up to the wedge blocking flow; the height of the pipe up to the wedge, [m] Returns ------- C : float Coefficient of discharge of the wedge flow meter, [-] Notes ----- There is an ISO standard being developed to cover wedge meters as of 2018. Wedge meters can have varying angles; 60 and 90 degree wedge meters have been reported. Tap locations 1 or 2 diameters (upstream and downstream), and 2D upstream/1D downstream have been used. Some wedges are sharp; some are smooth. [2]_ gives some experimental values. Examples -------- >>> C_wedge_meter_Miller(D=0.1524, H=0.3*0.1524) 0.7267069372687651 References ---------- .. [1] Miller, Richard W. Flow Measurement Engineering Handbook. 3rd edition. New York: McGraw-Hill Education, 1996. .. [2] Seshadri, V., S. N. Singh, and S. Bhargava. "Effect of Wedge Shape and Pressure Tap Locations on the Characteristics of a Wedge Flowmeter." IJEMS Vol.01(5), October 1994. ''' beta = diameter_ratio_wedge_meter(D, H) if D <= 0.7*inch: # suggested limit 0.5 inch for this equation C = 0.7883 + 0.107*(1.0 - beta*beta) elif D <= 1.4*inch: # Suggested limit is under 1.5 inches C = 0.6143 + 0.718*(1.0 - beta*beta) else: C = 0.5433 + 0.2453*(1.0 - beta*beta) return C
def function[C_wedge_meter_Miller, parameter[D, H]]: constant[Calculates the coefficient of discharge of an wedge flow meter used for measuring flow rate of fluid, based on the geometry of the differential pressure flow meter. For half-inch lines: .. math:: C = 0.7883 + 0.107(1 - \beta^2) For 1 to 1.5 inch lines: .. math:: C = 0.6143 + 0.718(1 - \beta^2) For 1.5 to 24 inch lines: .. math:: C = 0.5433 + 0.2453(1 - \beta^2) Parameters ---------- D : float Upstream internal pipe diameter, [m] H : float Portion of the diameter of the clear segment of the pipe up to the wedge blocking flow; the height of the pipe up to the wedge, [m] Returns ------- C : float Coefficient of discharge of the wedge flow meter, [-] Notes ----- There is an ISO standard being developed to cover wedge meters as of 2018. Wedge meters can have varying angles; 60 and 90 degree wedge meters have been reported. Tap locations 1 or 2 diameters (upstream and downstream), and 2D upstream/1D downstream have been used. Some wedges are sharp; some are smooth. [2]_ gives some experimental values. Examples -------- >>> C_wedge_meter_Miller(D=0.1524, H=0.3*0.1524) 0.7267069372687651 References ---------- .. [1] Miller, Richard W. Flow Measurement Engineering Handbook. 3rd edition. New York: McGraw-Hill Education, 1996. .. [2] Seshadri, V., S. N. Singh, and S. Bhargava. "Effect of Wedge Shape and Pressure Tap Locations on the Characteristics of a Wedge Flowmeter." IJEMS Vol.01(5), October 1994. ] variable[beta] assign[=] call[name[diameter_ratio_wedge_meter], parameter[name[D], name[H]]] if compare[name[D] less_or_equal[<=] binary_operation[constant[0.7] * name[inch]]] begin[:] variable[C] assign[=] binary_operation[constant[0.7883] + binary_operation[constant[0.107] * binary_operation[constant[1.0] - binary_operation[name[beta] * name[beta]]]]] return[name[C]]
keyword[def] identifier[C_wedge_meter_Miller] ( identifier[D] , identifier[H] ): literal[string] identifier[beta] = identifier[diameter_ratio_wedge_meter] ( identifier[D] , identifier[H] ) keyword[if] identifier[D] <= literal[int] * identifier[inch] : identifier[C] = literal[int] + literal[int] *( literal[int] - identifier[beta] * identifier[beta] ) keyword[elif] identifier[D] <= literal[int] * identifier[inch] : identifier[C] = literal[int] + literal[int] *( literal[int] - identifier[beta] * identifier[beta] ) keyword[else] : identifier[C] = literal[int] + literal[int] *( literal[int] - identifier[beta] * identifier[beta] ) keyword[return] identifier[C]
def C_wedge_meter_Miller(D, H): """Calculates the coefficient of discharge of an wedge flow meter used for measuring flow rate of fluid, based on the geometry of the differential pressure flow meter. For half-inch lines: .. math:: C = 0.7883 + 0.107(1 - \\beta^2) For 1 to 1.5 inch lines: .. math:: C = 0.6143 + 0.718(1 - \\beta^2) For 1.5 to 24 inch lines: .. math:: C = 0.5433 + 0.2453(1 - \\beta^2) Parameters ---------- D : float Upstream internal pipe diameter, [m] H : float Portion of the diameter of the clear segment of the pipe up to the wedge blocking flow; the height of the pipe up to the wedge, [m] Returns ------- C : float Coefficient of discharge of the wedge flow meter, [-] Notes ----- There is an ISO standard being developed to cover wedge meters as of 2018. Wedge meters can have varying angles; 60 and 90 degree wedge meters have been reported. Tap locations 1 or 2 diameters (upstream and downstream), and 2D upstream/1D downstream have been used. Some wedges are sharp; some are smooth. [2]_ gives some experimental values. Examples -------- >>> C_wedge_meter_Miller(D=0.1524, H=0.3*0.1524) 0.7267069372687651 References ---------- .. [1] Miller, Richard W. Flow Measurement Engineering Handbook. 3rd edition. New York: McGraw-Hill Education, 1996. .. [2] Seshadri, V., S. N. Singh, and S. Bhargava. "Effect of Wedge Shape and Pressure Tap Locations on the Characteristics of a Wedge Flowmeter." IJEMS Vol.01(5), October 1994. """ beta = diameter_ratio_wedge_meter(D, H) if D <= 0.7 * inch: # suggested limit 0.5 inch for this equation C = 0.7883 + 0.107 * (1.0 - beta * beta) # depends on [control=['if'], data=[]] elif D <= 1.4 * inch: # Suggested limit is under 1.5 inches C = 0.6143 + 0.718 * (1.0 - beta * beta) # depends on [control=['if'], data=[]] else: C = 0.5433 + 0.2453 * (1.0 - beta * beta) return C
def link_href(self, rel): """Look for link with specified rel, return href from it or None.""" link = self.link(rel) if (link is not None): link = link['href'] return(link)
def function[link_href, parameter[self, rel]]: constant[Look for link with specified rel, return href from it or None.] variable[link] assign[=] call[name[self].link, parameter[name[rel]]] if compare[name[link] is_not constant[None]] begin[:] variable[link] assign[=] call[name[link]][constant[href]] return[name[link]]
keyword[def] identifier[link_href] ( identifier[self] , identifier[rel] ): literal[string] identifier[link] = identifier[self] . identifier[link] ( identifier[rel] ) keyword[if] ( identifier[link] keyword[is] keyword[not] keyword[None] ): identifier[link] = identifier[link] [ literal[string] ] keyword[return] ( identifier[link] )
def link_href(self, rel): """Look for link with specified rel, return href from it or None.""" link = self.link(rel) if link is not None: link = link['href'] # depends on [control=['if'], data=['link']] return link
def disable_selinux(): """ disables selinux """ if contains(filename='/etc/selinux/config', text='SELINUX=enforcing'): sed('/etc/selinux/config', 'SELINUX=enforcing', 'SELINUX=disabled', use_sudo=True) if contains(filename='/etc/selinux/config', text='SELINUX=permissive'): sed('/etc/selinux/config', 'SELINUX=permissive', 'SELINUX=disabled', use_sudo=True) if sudo('getenforce').lower() != 'disabled': with settings(warn_only=True, capture=True): sudo('/sbin/reboot') sleep_for_one_minute()
def function[disable_selinux, parameter[]]: constant[ disables selinux ] if call[name[contains], parameter[]] begin[:] call[name[sed], parameter[constant[/etc/selinux/config], constant[SELINUX=enforcing], constant[SELINUX=disabled]]] if call[name[contains], parameter[]] begin[:] call[name[sed], parameter[constant[/etc/selinux/config], constant[SELINUX=permissive], constant[SELINUX=disabled]]] if compare[call[call[name[sudo], parameter[constant[getenforce]]].lower, parameter[]] not_equal[!=] constant[disabled]] begin[:] with call[name[settings], parameter[]] begin[:] call[name[sudo], parameter[constant[/sbin/reboot]]] call[name[sleep_for_one_minute], parameter[]]
keyword[def] identifier[disable_selinux] (): literal[string] keyword[if] identifier[contains] ( identifier[filename] = literal[string] , identifier[text] = literal[string] ): identifier[sed] ( literal[string] , literal[string] , literal[string] , identifier[use_sudo] = keyword[True] ) keyword[if] identifier[contains] ( identifier[filename] = literal[string] , identifier[text] = literal[string] ): identifier[sed] ( literal[string] , literal[string] , literal[string] , identifier[use_sudo] = keyword[True] ) keyword[if] identifier[sudo] ( literal[string] ). identifier[lower] ()!= literal[string] : keyword[with] identifier[settings] ( identifier[warn_only] = keyword[True] , identifier[capture] = keyword[True] ): identifier[sudo] ( literal[string] ) identifier[sleep_for_one_minute] ()
def disable_selinux(): """ disables selinux """ if contains(filename='/etc/selinux/config', text='SELINUX=enforcing'): sed('/etc/selinux/config', 'SELINUX=enforcing', 'SELINUX=disabled', use_sudo=True) # depends on [control=['if'], data=[]] if contains(filename='/etc/selinux/config', text='SELINUX=permissive'): sed('/etc/selinux/config', 'SELINUX=permissive', 'SELINUX=disabled', use_sudo=True) # depends on [control=['if'], data=[]] if sudo('getenforce').lower() != 'disabled': with settings(warn_only=True, capture=True): sudo('/sbin/reboot') # depends on [control=['with'], data=[]] sleep_for_one_minute() # depends on [control=['if'], data=[]]
def update_readme_for_modules(modules): """ Update README.md updating the sections for the module names listed. """ readme = parse_readme() module_docstrings = core_module_docstrings() if modules == ["__all__"]: modules = core_module_docstrings().keys() for module in modules: if module in module_docstrings: print_stderr("Updating README.md for module {}".format(module)) readme[module] = module_docstrings[module] else: print_stderr("Module {} not in core modules".format(module)) # write the file readme_file = os.path.join(modules_directory(), "README.md") with open(readme_file, "w") as f: f.write(create_readme(readme))
def function[update_readme_for_modules, parameter[modules]]: constant[ Update README.md updating the sections for the module names listed. ] variable[readme] assign[=] call[name[parse_readme], parameter[]] variable[module_docstrings] assign[=] call[name[core_module_docstrings], parameter[]] if compare[name[modules] equal[==] list[[<ast.Constant object at 0x7da18bc736d0>]]] begin[:] variable[modules] assign[=] call[call[name[core_module_docstrings], parameter[]].keys, parameter[]] for taget[name[module]] in starred[name[modules]] begin[:] if compare[name[module] in name[module_docstrings]] begin[:] call[name[print_stderr], parameter[call[constant[Updating README.md for module {}].format, parameter[name[module]]]]] call[name[readme]][name[module]] assign[=] call[name[module_docstrings]][name[module]] variable[readme_file] assign[=] call[name[os].path.join, parameter[call[name[modules_directory], parameter[]], constant[README.md]]] with call[name[open], parameter[name[readme_file], constant[w]]] begin[:] call[name[f].write, parameter[call[name[create_readme], parameter[name[readme]]]]]
keyword[def] identifier[update_readme_for_modules] ( identifier[modules] ): literal[string] identifier[readme] = identifier[parse_readme] () identifier[module_docstrings] = identifier[core_module_docstrings] () keyword[if] identifier[modules] ==[ literal[string] ]: identifier[modules] = identifier[core_module_docstrings] (). identifier[keys] () keyword[for] identifier[module] keyword[in] identifier[modules] : keyword[if] identifier[module] keyword[in] identifier[module_docstrings] : identifier[print_stderr] ( literal[string] . identifier[format] ( identifier[module] )) identifier[readme] [ identifier[module] ]= identifier[module_docstrings] [ identifier[module] ] keyword[else] : identifier[print_stderr] ( literal[string] . identifier[format] ( identifier[module] )) identifier[readme_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[modules_directory] (), literal[string] ) keyword[with] identifier[open] ( identifier[readme_file] , literal[string] ) keyword[as] identifier[f] : identifier[f] . identifier[write] ( identifier[create_readme] ( identifier[readme] ))
def update_readme_for_modules(modules): """ Update README.md updating the sections for the module names listed. """ readme = parse_readme() module_docstrings = core_module_docstrings() if modules == ['__all__']: modules = core_module_docstrings().keys() # depends on [control=['if'], data=['modules']] for module in modules: if module in module_docstrings: print_stderr('Updating README.md for module {}'.format(module)) readme[module] = module_docstrings[module] # depends on [control=['if'], data=['module', 'module_docstrings']] else: print_stderr('Module {} not in core modules'.format(module)) # depends on [control=['for'], data=['module']] # write the file readme_file = os.path.join(modules_directory(), 'README.md') with open(readme_file, 'w') as f: f.write(create_readme(readme)) # depends on [control=['with'], data=['f']]
def orchestration_restore(self, saved_artifact_info, custom_params=None): """Orchestration restore :param saved_artifact_info: json with all required data to restore configuration on the device :param custom_params: custom parameters """ if saved_artifact_info is None or saved_artifact_info == '': raise Exception('ConfigurationOperations', 'saved_artifact_info is None or empty') saved_artifact_info = JsonRequestDeserializer(jsonpickle.decode(saved_artifact_info)) if not hasattr(saved_artifact_info, 'saved_artifacts_info'): raise Exception('ConfigurationOperations', 'Saved_artifacts_info is missing') saved_config = saved_artifact_info.saved_artifacts_info params = None if custom_params: params = JsonRequestDeserializer(jsonpickle.decode(custom_params)) _validate_custom_params(params) self._validate_artifact_info(saved_config) if saved_config.restore_rules.requires_same_resource \ and saved_config.resource_name.lower() != self.resource_config.name.lower(): raise Exception('ConfigurationOperations', 'Incompatible resource, expected {}'.format(self.resource_config.name)) restore_params = {'configuration_type': 'running', 'restore_method': 'override', 'vrf_management_name': None, 'path': '{}:{}'.format(saved_config.saved_artifact.artifact_type, saved_config.saved_artifact.identifier)} if hasattr(params, 'custom_params'): if hasattr(params.custom_params, 'restore_method'): restore_params['restore_method'] = params.custom_params.restore_method if hasattr(params.custom_params, 'configuration_type'): restore_params['configuration_type'] = params.custom_params.configuration_type if hasattr(params.custom_params, 'vrf_management_name'): restore_params['vrf_management_name'] = params.custom_params.vrf_management_name if 'startup' in saved_config.saved_artifact.identifier.split('/')[-1]: restore_params['configuration_type'] = 'startup' self.restore(**restore_params)
def function[orchestration_restore, parameter[self, saved_artifact_info, custom_params]]: constant[Orchestration restore :param saved_artifact_info: json with all required data to restore configuration on the device :param custom_params: custom parameters ] if <ast.BoolOp object at 0x7da20c7c8a90> begin[:] <ast.Raise object at 0x7da20c7cbaf0> variable[saved_artifact_info] assign[=] call[name[JsonRequestDeserializer], parameter[call[name[jsonpickle].decode, parameter[name[saved_artifact_info]]]]] if <ast.UnaryOp object at 0x7da20c7cb940> begin[:] <ast.Raise object at 0x7da20c7cb250> variable[saved_config] assign[=] name[saved_artifact_info].saved_artifacts_info variable[params] assign[=] constant[None] if name[custom_params] begin[:] variable[params] assign[=] call[name[JsonRequestDeserializer], parameter[call[name[jsonpickle].decode, parameter[name[custom_params]]]]] call[name[_validate_custom_params], parameter[name[params]]] call[name[self]._validate_artifact_info, parameter[name[saved_config]]] if <ast.BoolOp object at 0x7da20c7cb610> begin[:] <ast.Raise object at 0x7da20c7cb190> variable[restore_params] assign[=] dictionary[[<ast.Constant object at 0x7da20c7c9b10>, <ast.Constant object at 0x7da20c7cb1c0>, <ast.Constant object at 0x7da20c7caf50>, <ast.Constant object at 0x7da20c7ca350>], [<ast.Constant object at 0x7da20c7c9f00>, <ast.Constant object at 0x7da20c7cbb50>, <ast.Constant object at 0x7da20c7c8190>, <ast.Call object at 0x7da20c7c97e0>]] if call[name[hasattr], parameter[name[params], constant[custom_params]]] begin[:] if call[name[hasattr], parameter[name[params].custom_params, constant[restore_method]]] begin[:] call[name[restore_params]][constant[restore_method]] assign[=] name[params].custom_params.restore_method if call[name[hasattr], parameter[name[params].custom_params, constant[configuration_type]]] begin[:] call[name[restore_params]][constant[configuration_type]] assign[=] name[params].custom_params.configuration_type if call[name[hasattr], parameter[name[params].custom_params, constant[vrf_management_name]]] begin[:] call[name[restore_params]][constant[vrf_management_name]] assign[=] name[params].custom_params.vrf_management_name if compare[constant[startup] in call[call[name[saved_config].saved_artifact.identifier.split, parameter[constant[/]]]][<ast.UnaryOp object at 0x7da20c7ca020>]] begin[:] call[name[restore_params]][constant[configuration_type]] assign[=] constant[startup] call[name[self].restore, parameter[]]
keyword[def] identifier[orchestration_restore] ( identifier[self] , identifier[saved_artifact_info] , identifier[custom_params] = keyword[None] ): literal[string] keyword[if] identifier[saved_artifact_info] keyword[is] keyword[None] keyword[or] identifier[saved_artifact_info] == literal[string] : keyword[raise] identifier[Exception] ( literal[string] , literal[string] ) identifier[saved_artifact_info] = identifier[JsonRequestDeserializer] ( identifier[jsonpickle] . identifier[decode] ( identifier[saved_artifact_info] )) keyword[if] keyword[not] identifier[hasattr] ( identifier[saved_artifact_info] , literal[string] ): keyword[raise] identifier[Exception] ( literal[string] , literal[string] ) identifier[saved_config] = identifier[saved_artifact_info] . identifier[saved_artifacts_info] identifier[params] = keyword[None] keyword[if] identifier[custom_params] : identifier[params] = identifier[JsonRequestDeserializer] ( identifier[jsonpickle] . identifier[decode] ( identifier[custom_params] )) identifier[_validate_custom_params] ( identifier[params] ) identifier[self] . identifier[_validate_artifact_info] ( identifier[saved_config] ) keyword[if] identifier[saved_config] . identifier[restore_rules] . identifier[requires_same_resource] keyword[and] identifier[saved_config] . identifier[resource_name] . identifier[lower] ()!= identifier[self] . identifier[resource_config] . identifier[name] . identifier[lower] (): keyword[raise] identifier[Exception] ( literal[string] , literal[string] . identifier[format] ( identifier[self] . identifier[resource_config] . identifier[name] )) identifier[restore_params] ={ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : literal[string] . identifier[format] ( identifier[saved_config] . identifier[saved_artifact] . identifier[artifact_type] , identifier[saved_config] . identifier[saved_artifact] . identifier[identifier] )} keyword[if] identifier[hasattr] ( identifier[params] , literal[string] ): keyword[if] identifier[hasattr] ( identifier[params] . identifier[custom_params] , literal[string] ): identifier[restore_params] [ literal[string] ]= identifier[params] . identifier[custom_params] . identifier[restore_method] keyword[if] identifier[hasattr] ( identifier[params] . identifier[custom_params] , literal[string] ): identifier[restore_params] [ literal[string] ]= identifier[params] . identifier[custom_params] . identifier[configuration_type] keyword[if] identifier[hasattr] ( identifier[params] . identifier[custom_params] , literal[string] ): identifier[restore_params] [ literal[string] ]= identifier[params] . identifier[custom_params] . identifier[vrf_management_name] keyword[if] literal[string] keyword[in] identifier[saved_config] . identifier[saved_artifact] . identifier[identifier] . identifier[split] ( literal[string] )[- literal[int] ]: identifier[restore_params] [ literal[string] ]= literal[string] identifier[self] . identifier[restore] (** identifier[restore_params] )
def orchestration_restore(self, saved_artifact_info, custom_params=None): """Orchestration restore :param saved_artifact_info: json with all required data to restore configuration on the device :param custom_params: custom parameters """ if saved_artifact_info is None or saved_artifact_info == '': raise Exception('ConfigurationOperations', 'saved_artifact_info is None or empty') # depends on [control=['if'], data=[]] saved_artifact_info = JsonRequestDeserializer(jsonpickle.decode(saved_artifact_info)) if not hasattr(saved_artifact_info, 'saved_artifacts_info'): raise Exception('ConfigurationOperations', 'Saved_artifacts_info is missing') # depends on [control=['if'], data=[]] saved_config = saved_artifact_info.saved_artifacts_info params = None if custom_params: params = JsonRequestDeserializer(jsonpickle.decode(custom_params)) _validate_custom_params(params) # depends on [control=['if'], data=[]] self._validate_artifact_info(saved_config) if saved_config.restore_rules.requires_same_resource and saved_config.resource_name.lower() != self.resource_config.name.lower(): raise Exception('ConfigurationOperations', 'Incompatible resource, expected {}'.format(self.resource_config.name)) # depends on [control=['if'], data=[]] restore_params = {'configuration_type': 'running', 'restore_method': 'override', 'vrf_management_name': None, 'path': '{}:{}'.format(saved_config.saved_artifact.artifact_type, saved_config.saved_artifact.identifier)} if hasattr(params, 'custom_params'): if hasattr(params.custom_params, 'restore_method'): restore_params['restore_method'] = params.custom_params.restore_method # depends on [control=['if'], data=[]] if hasattr(params.custom_params, 'configuration_type'): restore_params['configuration_type'] = params.custom_params.configuration_type # depends on [control=['if'], data=[]] if hasattr(params.custom_params, 'vrf_management_name'): restore_params['vrf_management_name'] = params.custom_params.vrf_management_name # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if 'startup' in saved_config.saved_artifact.identifier.split('/')[-1]: restore_params['configuration_type'] = 'startup' # depends on [control=['if'], data=[]] self.restore(**restore_params)
def get_fk_popup_field(cls, *args, **kwargs): """ generate fk field related to class wait popup crud """ kwargs['popup_name'] = cls.get_class_verbose_name() kwargs['permissions_required'] = cls.permissions_required if cls.template_name_fk is not None: kwargs['template_name'] = cls.template_name_fk return ForeignKeyWidget('{}_popup_create'.format(cls.get_class_name()), *args, **kwargs)
def function[get_fk_popup_field, parameter[cls]]: constant[ generate fk field related to class wait popup crud ] call[name[kwargs]][constant[popup_name]] assign[=] call[name[cls].get_class_verbose_name, parameter[]] call[name[kwargs]][constant[permissions_required]] assign[=] name[cls].permissions_required if compare[name[cls].template_name_fk is_not constant[None]] begin[:] call[name[kwargs]][constant[template_name]] assign[=] name[cls].template_name_fk return[call[name[ForeignKeyWidget], parameter[call[constant[{}_popup_create].format, parameter[call[name[cls].get_class_name, parameter[]]]], <ast.Starred object at 0x7da20c6e7df0>]]]
keyword[def] identifier[get_fk_popup_field] ( identifier[cls] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[kwargs] [ literal[string] ]= identifier[cls] . identifier[get_class_verbose_name] () identifier[kwargs] [ literal[string] ]= identifier[cls] . identifier[permissions_required] keyword[if] identifier[cls] . identifier[template_name_fk] keyword[is] keyword[not] keyword[None] : identifier[kwargs] [ literal[string] ]= identifier[cls] . identifier[template_name_fk] keyword[return] identifier[ForeignKeyWidget] ( literal[string] . identifier[format] ( identifier[cls] . identifier[get_class_name] ()),* identifier[args] ,** identifier[kwargs] )
def get_fk_popup_field(cls, *args, **kwargs): """ generate fk field related to class wait popup crud """ kwargs['popup_name'] = cls.get_class_verbose_name() kwargs['permissions_required'] = cls.permissions_required if cls.template_name_fk is not None: kwargs['template_name'] = cls.template_name_fk # depends on [control=['if'], data=[]] return ForeignKeyWidget('{}_popup_create'.format(cls.get_class_name()), *args, **kwargs)
def start(self): """Sub-command start.""" self.discover() # startup_info only gets loaded from protocol version 2 on, # check if it's loaded if not self.startup_info: # hack to make environment startable with older protocol # versions < 2: try to start nodes via init if all nodes are down if len(self.get_tagged(['down'])) == len(self.get_tagged(['all'])): self.args = self.loaded_args print("upgrading mlaunch environment meta-data.") return self.init() else: raise SystemExit("These nodes were created with an older " "version of mlaunch (v1.1.1 or below). To " "upgrade this environment and make use of " "the start/stop/list commands, stop all " "nodes manually, then run 'mlaunch start' " "again. You only have to do this once.") # if new unknown_args are present, compare them with loaded ones # (here we can be certain of protocol v2+) if (self.args['binarypath'] is not None or (self.unknown_args and set(self.unknown_args) != set(self.loaded_unknown_args))): # store current args, use self.args from file (self.loaded_args) start_args = self.args self.args = self.loaded_args self.args['binarypath'] = start_args['binarypath'] # construct new startup strings with updated unknown args. # They are for this start only and will not be persisted in # the .mlaunch_startup file self._construct_cmdlines() # reset to original args for this start command self.args = start_args matches = self._get_ports_from_args(self.args, 'down') if len(matches) == 0: raise SystemExit('no nodes started.') # start config servers first config_matches = self.get_tagged(['config']).intersection(matches) self._start_on_ports(config_matches, wait=True) # start shards next mongod_matches = (self.get_tagged(['mongod']) - self.get_tagged(['config'])) mongod_matches = mongod_matches.intersection(matches) self._start_on_ports(mongod_matches, wait=True) # now start mongos mongos_matches = self.get_tagged(['mongos']).intersection(matches) self._start_on_ports(mongos_matches) # wait for all matched nodes to be running self.wait_for(matches) # refresh discover self.discover()
def function[start, parameter[self]]: constant[Sub-command start.] call[name[self].discover, parameter[]] if <ast.UnaryOp object at 0x7da1b1602e60> begin[:] if compare[call[name[len], parameter[call[name[self].get_tagged, parameter[list[[<ast.Constant object at 0x7da1b1601930>]]]]]] equal[==] call[name[len], parameter[call[name[self].get_tagged, parameter[list[[<ast.Constant object at 0x7da1b1600370>]]]]]]] begin[:] name[self].args assign[=] name[self].loaded_args call[name[print], parameter[constant[upgrading mlaunch environment meta-data.]]] return[call[name[self].init, parameter[]]] if <ast.BoolOp object at 0x7da1b1603b50> begin[:] variable[start_args] assign[=] name[self].args name[self].args assign[=] name[self].loaded_args call[name[self].args][constant[binarypath]] assign[=] call[name[start_args]][constant[binarypath]] call[name[self]._construct_cmdlines, parameter[]] name[self].args assign[=] name[start_args] variable[matches] assign[=] call[name[self]._get_ports_from_args, parameter[name[self].args, constant[down]]] if compare[call[name[len], parameter[name[matches]]] equal[==] constant[0]] begin[:] <ast.Raise object at 0x7da1b1601990> variable[config_matches] assign[=] call[call[name[self].get_tagged, parameter[list[[<ast.Constant object at 0x7da1b1602590>]]]].intersection, parameter[name[matches]]] call[name[self]._start_on_ports, parameter[name[config_matches]]] variable[mongod_matches] assign[=] binary_operation[call[name[self].get_tagged, parameter[list[[<ast.Constant object at 0x7da1b16023b0>]]]] - call[name[self].get_tagged, parameter[list[[<ast.Constant object at 0x7da1b1720e20>]]]]] variable[mongod_matches] assign[=] call[name[mongod_matches].intersection, parameter[name[matches]]] call[name[self]._start_on_ports, parameter[name[mongod_matches]]] variable[mongos_matches] assign[=] call[call[name[self].get_tagged, parameter[list[[<ast.Constant object at 0x7da1b1720a00>]]]].intersection, parameter[name[matches]]] call[name[self]._start_on_ports, parameter[name[mongos_matches]]] call[name[self].wait_for, parameter[name[matches]]] call[name[self].discover, parameter[]]
keyword[def] identifier[start] ( identifier[self] ): literal[string] identifier[self] . identifier[discover] () keyword[if] keyword[not] identifier[self] . identifier[startup_info] : keyword[if] identifier[len] ( identifier[self] . identifier[get_tagged] ([ literal[string] ]))== identifier[len] ( identifier[self] . identifier[get_tagged] ([ literal[string] ])): identifier[self] . identifier[args] = identifier[self] . identifier[loaded_args] identifier[print] ( literal[string] ) keyword[return] identifier[self] . identifier[init] () keyword[else] : keyword[raise] identifier[SystemExit] ( literal[string] literal[string] literal[string] literal[string] literal[string] literal[string] ) keyword[if] ( identifier[self] . identifier[args] [ literal[string] ] keyword[is] keyword[not] keyword[None] keyword[or] ( identifier[self] . identifier[unknown_args] keyword[and] identifier[set] ( identifier[self] . identifier[unknown_args] )!= identifier[set] ( identifier[self] . identifier[loaded_unknown_args] ))): identifier[start_args] = identifier[self] . identifier[args] identifier[self] . identifier[args] = identifier[self] . identifier[loaded_args] identifier[self] . identifier[args] [ literal[string] ]= identifier[start_args] [ literal[string] ] identifier[self] . identifier[_construct_cmdlines] () identifier[self] . identifier[args] = identifier[start_args] identifier[matches] = identifier[self] . identifier[_get_ports_from_args] ( identifier[self] . identifier[args] , literal[string] ) keyword[if] identifier[len] ( identifier[matches] )== literal[int] : keyword[raise] identifier[SystemExit] ( literal[string] ) identifier[config_matches] = identifier[self] . identifier[get_tagged] ([ literal[string] ]). identifier[intersection] ( identifier[matches] ) identifier[self] . identifier[_start_on_ports] ( identifier[config_matches] , identifier[wait] = keyword[True] ) identifier[mongod_matches] =( identifier[self] . identifier[get_tagged] ([ literal[string] ])- identifier[self] . identifier[get_tagged] ([ literal[string] ])) identifier[mongod_matches] = identifier[mongod_matches] . identifier[intersection] ( identifier[matches] ) identifier[self] . identifier[_start_on_ports] ( identifier[mongod_matches] , identifier[wait] = keyword[True] ) identifier[mongos_matches] = identifier[self] . identifier[get_tagged] ([ literal[string] ]). identifier[intersection] ( identifier[matches] ) identifier[self] . identifier[_start_on_ports] ( identifier[mongos_matches] ) identifier[self] . identifier[wait_for] ( identifier[matches] ) identifier[self] . identifier[discover] ()
def start(self): """Sub-command start.""" self.discover() # startup_info only gets loaded from protocol version 2 on, # check if it's loaded if not self.startup_info: # hack to make environment startable with older protocol # versions < 2: try to start nodes via init if all nodes are down if len(self.get_tagged(['down'])) == len(self.get_tagged(['all'])): self.args = self.loaded_args print('upgrading mlaunch environment meta-data.') return self.init() # depends on [control=['if'], data=[]] else: raise SystemExit("These nodes were created with an older version of mlaunch (v1.1.1 or below). To upgrade this environment and make use of the start/stop/list commands, stop all nodes manually, then run 'mlaunch start' again. You only have to do this once.") # depends on [control=['if'], data=[]] # if new unknown_args are present, compare them with loaded ones # (here we can be certain of protocol v2+) if self.args['binarypath'] is not None or (self.unknown_args and set(self.unknown_args) != set(self.loaded_unknown_args)): # store current args, use self.args from file (self.loaded_args) start_args = self.args self.args = self.loaded_args self.args['binarypath'] = start_args['binarypath'] # construct new startup strings with updated unknown args. # They are for this start only and will not be persisted in # the .mlaunch_startup file self._construct_cmdlines() # reset to original args for this start command self.args = start_args # depends on [control=['if'], data=[]] matches = self._get_ports_from_args(self.args, 'down') if len(matches) == 0: raise SystemExit('no nodes started.') # depends on [control=['if'], data=[]] # start config servers first config_matches = self.get_tagged(['config']).intersection(matches) self._start_on_ports(config_matches, wait=True) # start shards next mongod_matches = self.get_tagged(['mongod']) - self.get_tagged(['config']) mongod_matches = mongod_matches.intersection(matches) self._start_on_ports(mongod_matches, wait=True) # now start mongos mongos_matches = self.get_tagged(['mongos']).intersection(matches) self._start_on_ports(mongos_matches) # wait for all matched nodes to be running self.wait_for(matches) # refresh discover self.discover()
def add_hyperedges(self, hyperedges, attr_dict=None, **attr): """Adds multiple hyperedges to the graph, along with any related attributes of the hyperedges. If any node in the tail or head of any hyperedge has not previously been added to the hypergraph, it will automatically be added here. Hyperedges without a "weight" attribute specified will be assigned the default value of 1. :param hyperedges: iterable container to either tuples of (tail reference, head reference) OR tuples of (tail reference, head reference, attribute dictionary); if an attribute dictionary is provided in the tuple, its values will override both attr_dict's and attr's values. :param attr_dict: dictionary of attributes shared by all the hyperedges. :param attr: keyword arguments of attributes of the hyperedges; attr's values will override attr_dict's values if both are provided. :returns: list -- the IDs of the hyperedges added in the order specified by the hyperedges container's iterator. See also: add_hyperedge Examples: :: >>> H = DirectedHypergraph() >>> xyz = hyperedge_list = ((["A", "B"], ["C", "D"]), (("A", "C"), ("B"), {'weight': 2}), (set(["D"]), set(["A", "C"]))) >>> H.add_hyperedges(hyperedge_list) """ attr_dict = self._combine_attribute_arguments(attr_dict, attr) hyperedge_ids = [] for hyperedge in hyperedges: if len(hyperedge) == 3: # See ("A", "C"), ("B"), {weight: 2}) in the # documentation example tail, head, hyperedge_attr_dict = hyperedge # Create a new dictionary and load it with node_attr_dict and # attr_dict, with the former (node_attr_dict) taking precedence new_dict = attr_dict.copy() new_dict.update(hyperedge_attr_dict) hyperedge_id = self.add_hyperedge(tail, head, new_dict) else: # See (["A", "B"], ["C", "D"]) in the documentation example tail, head = hyperedge hyperedge_id = \ self.add_hyperedge(tail, head, attr_dict.copy()) hyperedge_ids.append(hyperedge_id) return hyperedge_ids
def function[add_hyperedges, parameter[self, hyperedges, attr_dict]]: constant[Adds multiple hyperedges to the graph, along with any related attributes of the hyperedges. If any node in the tail or head of any hyperedge has not previously been added to the hypergraph, it will automatically be added here. Hyperedges without a "weight" attribute specified will be assigned the default value of 1. :param hyperedges: iterable container to either tuples of (tail reference, head reference) OR tuples of (tail reference, head reference, attribute dictionary); if an attribute dictionary is provided in the tuple, its values will override both attr_dict's and attr's values. :param attr_dict: dictionary of attributes shared by all the hyperedges. :param attr: keyword arguments of attributes of the hyperedges; attr's values will override attr_dict's values if both are provided. :returns: list -- the IDs of the hyperedges added in the order specified by the hyperedges container's iterator. See also: add_hyperedge Examples: :: >>> H = DirectedHypergraph() >>> xyz = hyperedge_list = ((["A", "B"], ["C", "D"]), (("A", "C"), ("B"), {'weight': 2}), (set(["D"]), set(["A", "C"]))) >>> H.add_hyperedges(hyperedge_list) ] variable[attr_dict] assign[=] call[name[self]._combine_attribute_arguments, parameter[name[attr_dict], name[attr]]] variable[hyperedge_ids] assign[=] list[[]] for taget[name[hyperedge]] in starred[name[hyperedges]] begin[:] if compare[call[name[len], parameter[name[hyperedge]]] equal[==] constant[3]] begin[:] <ast.Tuple object at 0x7da1b11d6170> assign[=] name[hyperedge] variable[new_dict] assign[=] call[name[attr_dict].copy, parameter[]] call[name[new_dict].update, parameter[name[hyperedge_attr_dict]]] variable[hyperedge_id] assign[=] call[name[self].add_hyperedge, parameter[name[tail], name[head], name[new_dict]]] call[name[hyperedge_ids].append, parameter[name[hyperedge_id]]] return[name[hyperedge_ids]]
keyword[def] identifier[add_hyperedges] ( identifier[self] , identifier[hyperedges] , identifier[attr_dict] = keyword[None] ,** identifier[attr] ): literal[string] identifier[attr_dict] = identifier[self] . identifier[_combine_attribute_arguments] ( identifier[attr_dict] , identifier[attr] ) identifier[hyperedge_ids] =[] keyword[for] identifier[hyperedge] keyword[in] identifier[hyperedges] : keyword[if] identifier[len] ( identifier[hyperedge] )== literal[int] : identifier[tail] , identifier[head] , identifier[hyperedge_attr_dict] = identifier[hyperedge] identifier[new_dict] = identifier[attr_dict] . identifier[copy] () identifier[new_dict] . identifier[update] ( identifier[hyperedge_attr_dict] ) identifier[hyperedge_id] = identifier[self] . identifier[add_hyperedge] ( identifier[tail] , identifier[head] , identifier[new_dict] ) keyword[else] : identifier[tail] , identifier[head] = identifier[hyperedge] identifier[hyperedge_id] = identifier[self] . identifier[add_hyperedge] ( identifier[tail] , identifier[head] , identifier[attr_dict] . identifier[copy] ()) identifier[hyperedge_ids] . identifier[append] ( identifier[hyperedge_id] ) keyword[return] identifier[hyperedge_ids]
def add_hyperedges(self, hyperedges, attr_dict=None, **attr): """Adds multiple hyperedges to the graph, along with any related attributes of the hyperedges. If any node in the tail or head of any hyperedge has not previously been added to the hypergraph, it will automatically be added here. Hyperedges without a "weight" attribute specified will be assigned the default value of 1. :param hyperedges: iterable container to either tuples of (tail reference, head reference) OR tuples of (tail reference, head reference, attribute dictionary); if an attribute dictionary is provided in the tuple, its values will override both attr_dict's and attr's values. :param attr_dict: dictionary of attributes shared by all the hyperedges. :param attr: keyword arguments of attributes of the hyperedges; attr's values will override attr_dict's values if both are provided. :returns: list -- the IDs of the hyperedges added in the order specified by the hyperedges container's iterator. See also: add_hyperedge Examples: :: >>> H = DirectedHypergraph() >>> xyz = hyperedge_list = ((["A", "B"], ["C", "D"]), (("A", "C"), ("B"), {'weight': 2}), (set(["D"]), set(["A", "C"]))) >>> H.add_hyperedges(hyperedge_list) """ attr_dict = self._combine_attribute_arguments(attr_dict, attr) hyperedge_ids = [] for hyperedge in hyperedges: if len(hyperedge) == 3: # See ("A", "C"), ("B"), {weight: 2}) in the # documentation example (tail, head, hyperedge_attr_dict) = hyperedge # Create a new dictionary and load it with node_attr_dict and # attr_dict, with the former (node_attr_dict) taking precedence new_dict = attr_dict.copy() new_dict.update(hyperedge_attr_dict) hyperedge_id = self.add_hyperedge(tail, head, new_dict) # depends on [control=['if'], data=[]] else: # See (["A", "B"], ["C", "D"]) in the documentation example (tail, head) = hyperedge hyperedge_id = self.add_hyperedge(tail, head, attr_dict.copy()) hyperedge_ids.append(hyperedge_id) # depends on [control=['for'], data=['hyperedge']] return hyperedge_ids
def arp_packet(opcode, src_mac, src_ip, dst_mac, dst_ip): """ Generate ARP packet with ethernet encapsulated. """ # Generate ethernet header first. pkt = packet.Packet() eth_pkt = ethernet.ethernet(dst_mac, src_mac, ETH_TYPE_ARP) pkt.add_protocol(eth_pkt) # Use IPv4 ARP wrapper from packet library directly. arp_pkt = arp.arp_ip(opcode, src_mac, src_ip, dst_mac, dst_ip) pkt.add_protocol(arp_pkt) pkt.serialize() return pkt.data
def function[arp_packet, parameter[opcode, src_mac, src_ip, dst_mac, dst_ip]]: constant[ Generate ARP packet with ethernet encapsulated. ] variable[pkt] assign[=] call[name[packet].Packet, parameter[]] variable[eth_pkt] assign[=] call[name[ethernet].ethernet, parameter[name[dst_mac], name[src_mac], name[ETH_TYPE_ARP]]] call[name[pkt].add_protocol, parameter[name[eth_pkt]]] variable[arp_pkt] assign[=] call[name[arp].arp_ip, parameter[name[opcode], name[src_mac], name[src_ip], name[dst_mac], name[dst_ip]]] call[name[pkt].add_protocol, parameter[name[arp_pkt]]] call[name[pkt].serialize, parameter[]] return[name[pkt].data]
keyword[def] identifier[arp_packet] ( identifier[opcode] , identifier[src_mac] , identifier[src_ip] , identifier[dst_mac] , identifier[dst_ip] ): literal[string] identifier[pkt] = identifier[packet] . identifier[Packet] () identifier[eth_pkt] = identifier[ethernet] . identifier[ethernet] ( identifier[dst_mac] , identifier[src_mac] , identifier[ETH_TYPE_ARP] ) identifier[pkt] . identifier[add_protocol] ( identifier[eth_pkt] ) identifier[arp_pkt] = identifier[arp] . identifier[arp_ip] ( identifier[opcode] , identifier[src_mac] , identifier[src_ip] , identifier[dst_mac] , identifier[dst_ip] ) identifier[pkt] . identifier[add_protocol] ( identifier[arp_pkt] ) identifier[pkt] . identifier[serialize] () keyword[return] identifier[pkt] . identifier[data]
def arp_packet(opcode, src_mac, src_ip, dst_mac, dst_ip): """ Generate ARP packet with ethernet encapsulated. """ # Generate ethernet header first. pkt = packet.Packet() eth_pkt = ethernet.ethernet(dst_mac, src_mac, ETH_TYPE_ARP) pkt.add_protocol(eth_pkt) # Use IPv4 ARP wrapper from packet library directly. arp_pkt = arp.arp_ip(opcode, src_mac, src_ip, dst_mac, dst_ip) pkt.add_protocol(arp_pkt) pkt.serialize() return pkt.data
def init_jcrop(min_size=None): """Initialize jcrop. :param min_size: The minimal size of crop area. """ init_x = current_app.config['AVATARS_CROP_INIT_POS'][0] init_y = current_app.config['AVATARS_CROP_INIT_POS'][1] init_size = current_app.config['AVATARS_CROP_INIT_SIZE'] or current_app.config['AVATARS_SIZE_TUPLE'][2] if current_app.config['AVATARS_CROP_MIN_SIZE']: min_size = min_size or current_app.config['AVATARS_SIZE_TUPLE'][2] min_size_js = 'jcrop_api.setOptions({minSize: [%d, %d]});' % (min_size, min_size) else: min_size_js = '' return Markup(''' <script type="text/javascript"> jQuery(function ($) { // Create variables (in this scope) to hold the API and image size var jcrop_api, boundx, boundy, // Grab some information about the preview pane $preview = $('#preview-box'), $pcnt = $('#preview-box .preview-box'), $pimg = $('#preview-box .preview-box img'), xsize = $pcnt.width(), ysize = $pcnt.height(); $('#crop-box').Jcrop({ onChange: updatePreview, onSelect: updateCoords, setSelect: [%s, %s, %s, %s], aspectRatio: 1 }, function () { // Use the API to get the real image size var bounds = this.getBounds(); boundx = bounds[0]; boundy = bounds[1]; // Store the API in the jcrop_api variable jcrop_api = this; %s jcrop_api.focus(); // Move the preview into the jcrop container for css positioning $preview.appendTo(jcrop_api.ui.holder); }); function updatePreview(c) { if (parseInt(c.w) > 0) { var rx = xsize / c.w; var ry = ysize / c.h; $pimg.css({ width: Math.round(rx * boundx) + 'px', height: Math.round(ry * boundy) + 'px', marginLeft: '-' + Math.round(rx * c.x) + 'px', marginTop: '-' + Math.round(ry * c.y) + 'px' }); } } }); function updateCoords(c) { $('#x').val(c.x); $('#y').val(c.y); $('#w').val(c.w); $('#h').val(c.h); } </script> ''' % (init_x, init_y, init_size, init_size, min_size_js))
def function[init_jcrop, parameter[min_size]]: constant[Initialize jcrop. :param min_size: The minimal size of crop area. ] variable[init_x] assign[=] call[call[name[current_app].config][constant[AVATARS_CROP_INIT_POS]]][constant[0]] variable[init_y] assign[=] call[call[name[current_app].config][constant[AVATARS_CROP_INIT_POS]]][constant[1]] variable[init_size] assign[=] <ast.BoolOp object at 0x7da18c4cf430> if call[name[current_app].config][constant[AVATARS_CROP_MIN_SIZE]] begin[:] variable[min_size] assign[=] <ast.BoolOp object at 0x7da18c4ccaf0> variable[min_size_js] assign[=] binary_operation[constant[jcrop_api.setOptions({minSize: [%d, %d]});] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18c4ce500>, <ast.Name object at 0x7da18c4ceb90>]]] return[call[name[Markup], parameter[binary_operation[constant[ <script type="text/javascript"> jQuery(function ($) { // Create variables (in this scope) to hold the API and image size var jcrop_api, boundx, boundy, // Grab some information about the preview pane $preview = $('#preview-box'), $pcnt = $('#preview-box .preview-box'), $pimg = $('#preview-box .preview-box img'), xsize = $pcnt.width(), ysize = $pcnt.height(); $('#crop-box').Jcrop({ onChange: updatePreview, onSelect: updateCoords, setSelect: [%s, %s, %s, %s], aspectRatio: 1 }, function () { // Use the API to get the real image size var bounds = this.getBounds(); boundx = bounds[0]; boundy = bounds[1]; // Store the API in the jcrop_api variable jcrop_api = this; %s jcrop_api.focus(); // Move the preview into the jcrop container for css positioning $preview.appendTo(jcrop_api.ui.holder); }); function updatePreview(c) { if (parseInt(c.w) > 0) { var rx = xsize / c.w; var ry = ysize / c.h; $pimg.css({ width: Math.round(rx * boundx) + 'px', height: Math.round(ry * boundy) + 'px', marginLeft: '-' + Math.round(rx * c.x) + 'px', marginTop: '-' + Math.round(ry * c.y) + 'px' }); } } }); function updateCoords(c) { $('#x').val(c.x); $('#y').val(c.y); $('#w').val(c.w); $('#h').val(c.h); } </script> ] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18c4cfac0>, <ast.Name object at 0x7da18c4cfbe0>, <ast.Name object at 0x7da18c4cf8b0>, <ast.Name object at 0x7da18c4cc820>, <ast.Name object at 0x7da18c4ce1d0>]]]]]]
keyword[def] identifier[init_jcrop] ( identifier[min_size] = keyword[None] ): literal[string] identifier[init_x] = identifier[current_app] . identifier[config] [ literal[string] ][ literal[int] ] identifier[init_y] = identifier[current_app] . identifier[config] [ literal[string] ][ literal[int] ] identifier[init_size] = identifier[current_app] . identifier[config] [ literal[string] ] keyword[or] identifier[current_app] . identifier[config] [ literal[string] ][ literal[int] ] keyword[if] identifier[current_app] . identifier[config] [ literal[string] ]: identifier[min_size] = identifier[min_size] keyword[or] identifier[current_app] . identifier[config] [ literal[string] ][ literal[int] ] identifier[min_size_js] = literal[string] %( identifier[min_size] , identifier[min_size] ) keyword[else] : identifier[min_size_js] = literal[string] keyword[return] identifier[Markup] ( literal[string] %( identifier[init_x] , identifier[init_y] , identifier[init_size] , identifier[init_size] , identifier[min_size_js] ))
def init_jcrop(min_size=None): """Initialize jcrop. :param min_size: The minimal size of crop area. """ init_x = current_app.config['AVATARS_CROP_INIT_POS'][0] init_y = current_app.config['AVATARS_CROP_INIT_POS'][1] init_size = current_app.config['AVATARS_CROP_INIT_SIZE'] or current_app.config['AVATARS_SIZE_TUPLE'][2] if current_app.config['AVATARS_CROP_MIN_SIZE']: min_size = min_size or current_app.config['AVATARS_SIZE_TUPLE'][2] min_size_js = 'jcrop_api.setOptions({minSize: [%d, %d]});' % (min_size, min_size) # depends on [control=['if'], data=[]] else: min_size_js = '' return Markup('\n<script type="text/javascript">\n jQuery(function ($) {\n // Create variables (in this scope) to hold the API and image size\n var jcrop_api,\n boundx,\n boundy,\n\n // Grab some information about the preview pane\n $preview = $(\'#preview-box\'),\n $pcnt = $(\'#preview-box .preview-box\'),\n $pimg = $(\'#preview-box .preview-box img\'),\n\n xsize = $pcnt.width(),\n ysize = $pcnt.height();\n\n $(\'#crop-box\').Jcrop({\n onChange: updatePreview,\n onSelect: updateCoords,\n setSelect: [%s, %s, %s, %s],\n aspectRatio: 1\n }, function () {\n // Use the API to get the real image size\n var bounds = this.getBounds();\n boundx = bounds[0];\n boundy = bounds[1];\n // Store the API in the jcrop_api variable\n jcrop_api = this;\n %s\n jcrop_api.focus();\n // Move the preview into the jcrop container for css positioning\n $preview.appendTo(jcrop_api.ui.holder);\n });\n\n function updatePreview(c) {\n if (parseInt(c.w) > 0) {\n var rx = xsize / c.w;\n var ry = ysize / c.h;\n $pimg.css({\n width: Math.round(rx * boundx) + \'px\',\n height: Math.round(ry * boundy) + \'px\',\n marginLeft: \'-\' + Math.round(rx * c.x) + \'px\',\n marginTop: \'-\' + Math.round(ry * c.y) + \'px\'\n });\n }\n }\n });\n\n function updateCoords(c) {\n $(\'#x\').val(c.x);\n $(\'#y\').val(c.y);\n $(\'#w\').val(c.w);\n $(\'#h\').val(c.h);\n }\n </script>\n ' % (init_x, init_y, init_size, init_size, min_size_js))
def save(self, must_create=False): """ Saves the current session data to the database. If 'must_create' is True, a database error will be raised if the saving operation doesn't create a *new* entry (as opposed to possibly updating an existing entry). """ obj = Session( session_key=self._get_or_create_session_key(), session_data=self.encode(self._get_session(no_load=must_create)), expire_date=self.get_expiry_date(), user_agent=self.user_agent, user_id=self.user_id, ip=self.ip, ) using = router.db_for_write(Session, instance=obj) try: with transaction.atomic(using): obj.save(force_insert=must_create, using=using) except IntegrityError as e: if must_create and 'session_key' in str(e): raise CreateError raise
def function[save, parameter[self, must_create]]: constant[ Saves the current session data to the database. If 'must_create' is True, a database error will be raised if the saving operation doesn't create a *new* entry (as opposed to possibly updating an existing entry). ] variable[obj] assign[=] call[name[Session], parameter[]] variable[using] assign[=] call[name[router].db_for_write, parameter[name[Session]]] <ast.Try object at 0x7da1b1113d00>
keyword[def] identifier[save] ( identifier[self] , identifier[must_create] = keyword[False] ): literal[string] identifier[obj] = identifier[Session] ( identifier[session_key] = identifier[self] . identifier[_get_or_create_session_key] (), identifier[session_data] = identifier[self] . identifier[encode] ( identifier[self] . identifier[_get_session] ( identifier[no_load] = identifier[must_create] )), identifier[expire_date] = identifier[self] . identifier[get_expiry_date] (), identifier[user_agent] = identifier[self] . identifier[user_agent] , identifier[user_id] = identifier[self] . identifier[user_id] , identifier[ip] = identifier[self] . identifier[ip] , ) identifier[using] = identifier[router] . identifier[db_for_write] ( identifier[Session] , identifier[instance] = identifier[obj] ) keyword[try] : keyword[with] identifier[transaction] . identifier[atomic] ( identifier[using] ): identifier[obj] . identifier[save] ( identifier[force_insert] = identifier[must_create] , identifier[using] = identifier[using] ) keyword[except] identifier[IntegrityError] keyword[as] identifier[e] : keyword[if] identifier[must_create] keyword[and] literal[string] keyword[in] identifier[str] ( identifier[e] ): keyword[raise] identifier[CreateError] keyword[raise]
def save(self, must_create=False): """ Saves the current session data to the database. If 'must_create' is True, a database error will be raised if the saving operation doesn't create a *new* entry (as opposed to possibly updating an existing entry). """ obj = Session(session_key=self._get_or_create_session_key(), session_data=self.encode(self._get_session(no_load=must_create)), expire_date=self.get_expiry_date(), user_agent=self.user_agent, user_id=self.user_id, ip=self.ip) using = router.db_for_write(Session, instance=obj) try: with transaction.atomic(using): obj.save(force_insert=must_create, using=using) # depends on [control=['with'], data=[]] # depends on [control=['try'], data=[]] except IntegrityError as e: if must_create and 'session_key' in str(e): raise CreateError # depends on [control=['if'], data=[]] raise # depends on [control=['except'], data=['e']]
def receive_bytes(self, data): """Process bytes received from the network. Arguments: data (bytes): any length bytes received from a network connection to a kafka broker. Returns: responses (list of (correlation_id, response)): any/all completed responses, decoded from bytes to python objects. Raises: KafkaProtocolError: if the bytes received could not be decoded. CorrelationIdError: if the response does not match the request correlation id. """ i = 0 n = len(data) responses = [] while i < n: # Not receiving is the state of reading the payload header if not self._receiving: bytes_to_read = min(4 - self._header.tell(), n - i) self._header.write(data[i:i+bytes_to_read]) i += bytes_to_read if self._header.tell() == 4: self._header.seek(0) nbytes = Int32.decode(self._header) # reset buffer and switch state to receiving payload bytes self._rbuffer = KafkaBytes(nbytes) self._receiving = True elif self._header.tell() > 4: raise Errors.KafkaError('this should not happen - are you threading?') if self._receiving: total_bytes = len(self._rbuffer) staged_bytes = self._rbuffer.tell() bytes_to_read = min(total_bytes - staged_bytes, n - i) self._rbuffer.write(data[i:i+bytes_to_read]) i += bytes_to_read staged_bytes = self._rbuffer.tell() if staged_bytes > total_bytes: raise Errors.KafkaError('Receive buffer has more bytes than expected?') if staged_bytes != total_bytes: break self._receiving = False self._rbuffer.seek(0) resp = self._process_response(self._rbuffer) responses.append(resp) self._reset_buffer() return responses
def function[receive_bytes, parameter[self, data]]: constant[Process bytes received from the network. Arguments: data (bytes): any length bytes received from a network connection to a kafka broker. Returns: responses (list of (correlation_id, response)): any/all completed responses, decoded from bytes to python objects. Raises: KafkaProtocolError: if the bytes received could not be decoded. CorrelationIdError: if the response does not match the request correlation id. ] variable[i] assign[=] constant[0] variable[n] assign[=] call[name[len], parameter[name[data]]] variable[responses] assign[=] list[[]] while compare[name[i] less[<] name[n]] begin[:] if <ast.UnaryOp object at 0x7da1b21e0af0> begin[:] variable[bytes_to_read] assign[=] call[name[min], parameter[binary_operation[constant[4] - call[name[self]._header.tell, parameter[]]], binary_operation[name[n] - name[i]]]] call[name[self]._header.write, parameter[call[name[data]][<ast.Slice object at 0x7da1b21e0580>]]] <ast.AugAssign object at 0x7da18dc9b550> if compare[call[name[self]._header.tell, parameter[]] equal[==] constant[4]] begin[:] call[name[self]._header.seek, parameter[constant[0]]] variable[nbytes] assign[=] call[name[Int32].decode, parameter[name[self]._header]] name[self]._rbuffer assign[=] call[name[KafkaBytes], parameter[name[nbytes]]] name[self]._receiving assign[=] constant[True] if name[self]._receiving begin[:] variable[total_bytes] assign[=] call[name[len], parameter[name[self]._rbuffer]] variable[staged_bytes] assign[=] call[name[self]._rbuffer.tell, parameter[]] variable[bytes_to_read] assign[=] call[name[min], parameter[binary_operation[name[total_bytes] - name[staged_bytes]], binary_operation[name[n] - name[i]]]] call[name[self]._rbuffer.write, parameter[call[name[data]][<ast.Slice object at 0x7da1b21b8130>]]] <ast.AugAssign object at 0x7da1b21b8eb0> variable[staged_bytes] assign[=] call[name[self]._rbuffer.tell, parameter[]] if compare[name[staged_bytes] greater[>] name[total_bytes]] begin[:] <ast.Raise object at 0x7da1b21e2350> if compare[name[staged_bytes] not_equal[!=] name[total_bytes]] begin[:] break name[self]._receiving assign[=] constant[False] call[name[self]._rbuffer.seek, parameter[constant[0]]] variable[resp] assign[=] call[name[self]._process_response, parameter[name[self]._rbuffer]] call[name[responses].append, parameter[name[resp]]] call[name[self]._reset_buffer, parameter[]] return[name[responses]]
keyword[def] identifier[receive_bytes] ( identifier[self] , identifier[data] ): literal[string] identifier[i] = literal[int] identifier[n] = identifier[len] ( identifier[data] ) identifier[responses] =[] keyword[while] identifier[i] < identifier[n] : keyword[if] keyword[not] identifier[self] . identifier[_receiving] : identifier[bytes_to_read] = identifier[min] ( literal[int] - identifier[self] . identifier[_header] . identifier[tell] (), identifier[n] - identifier[i] ) identifier[self] . identifier[_header] . identifier[write] ( identifier[data] [ identifier[i] : identifier[i] + identifier[bytes_to_read] ]) identifier[i] += identifier[bytes_to_read] keyword[if] identifier[self] . identifier[_header] . identifier[tell] ()== literal[int] : identifier[self] . identifier[_header] . identifier[seek] ( literal[int] ) identifier[nbytes] = identifier[Int32] . identifier[decode] ( identifier[self] . identifier[_header] ) identifier[self] . identifier[_rbuffer] = identifier[KafkaBytes] ( identifier[nbytes] ) identifier[self] . identifier[_receiving] = keyword[True] keyword[elif] identifier[self] . identifier[_header] . identifier[tell] ()> literal[int] : keyword[raise] identifier[Errors] . identifier[KafkaError] ( literal[string] ) keyword[if] identifier[self] . identifier[_receiving] : identifier[total_bytes] = identifier[len] ( identifier[self] . identifier[_rbuffer] ) identifier[staged_bytes] = identifier[self] . identifier[_rbuffer] . identifier[tell] () identifier[bytes_to_read] = identifier[min] ( identifier[total_bytes] - identifier[staged_bytes] , identifier[n] - identifier[i] ) identifier[self] . identifier[_rbuffer] . identifier[write] ( identifier[data] [ identifier[i] : identifier[i] + identifier[bytes_to_read] ]) identifier[i] += identifier[bytes_to_read] identifier[staged_bytes] = identifier[self] . identifier[_rbuffer] . identifier[tell] () keyword[if] identifier[staged_bytes] > identifier[total_bytes] : keyword[raise] identifier[Errors] . identifier[KafkaError] ( literal[string] ) keyword[if] identifier[staged_bytes] != identifier[total_bytes] : keyword[break] identifier[self] . identifier[_receiving] = keyword[False] identifier[self] . identifier[_rbuffer] . identifier[seek] ( literal[int] ) identifier[resp] = identifier[self] . identifier[_process_response] ( identifier[self] . identifier[_rbuffer] ) identifier[responses] . identifier[append] ( identifier[resp] ) identifier[self] . identifier[_reset_buffer] () keyword[return] identifier[responses]
def receive_bytes(self, data): """Process bytes received from the network. Arguments: data (bytes): any length bytes received from a network connection to a kafka broker. Returns: responses (list of (correlation_id, response)): any/all completed responses, decoded from bytes to python objects. Raises: KafkaProtocolError: if the bytes received could not be decoded. CorrelationIdError: if the response does not match the request correlation id. """ i = 0 n = len(data) responses = [] while i < n: # Not receiving is the state of reading the payload header if not self._receiving: bytes_to_read = min(4 - self._header.tell(), n - i) self._header.write(data[i:i + bytes_to_read]) i += bytes_to_read if self._header.tell() == 4: self._header.seek(0) nbytes = Int32.decode(self._header) # reset buffer and switch state to receiving payload bytes self._rbuffer = KafkaBytes(nbytes) self._receiving = True # depends on [control=['if'], data=[]] elif self._header.tell() > 4: raise Errors.KafkaError('this should not happen - are you threading?') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if self._receiving: total_bytes = len(self._rbuffer) staged_bytes = self._rbuffer.tell() bytes_to_read = min(total_bytes - staged_bytes, n - i) self._rbuffer.write(data[i:i + bytes_to_read]) i += bytes_to_read staged_bytes = self._rbuffer.tell() if staged_bytes > total_bytes: raise Errors.KafkaError('Receive buffer has more bytes than expected?') # depends on [control=['if'], data=[]] if staged_bytes != total_bytes: break # depends on [control=['if'], data=[]] self._receiving = False self._rbuffer.seek(0) resp = self._process_response(self._rbuffer) responses.append(resp) self._reset_buffer() # depends on [control=['if'], data=[]] # depends on [control=['while'], data=['i', 'n']] return responses
def dilate_obs(self, dilation_radius): """ Use a dilation filter to grow positive observation areas by a specified number of grid points :param dilation_radius: Number of times to dilate the grid. :return: """ for s in self.size_thresholds: self.dilated_obs[s] = np.zeros(self.window_obs[self.mrms_variable].shape) for t in range(self.dilated_obs[s].shape[0]): self.dilated_obs[s][t][binary_dilation(self.window_obs[self.mrms_variable][t] >= s, iterations=dilation_radius)] = 1
def function[dilate_obs, parameter[self, dilation_radius]]: constant[ Use a dilation filter to grow positive observation areas by a specified number of grid points :param dilation_radius: Number of times to dilate the grid. :return: ] for taget[name[s]] in starred[name[self].size_thresholds] begin[:] call[name[self].dilated_obs][name[s]] assign[=] call[name[np].zeros, parameter[call[name[self].window_obs][name[self].mrms_variable].shape]] for taget[name[t]] in starred[call[name[range], parameter[call[call[name[self].dilated_obs][name[s]].shape][constant[0]]]]] begin[:] call[call[call[name[self].dilated_obs][name[s]]][name[t]]][call[name[binary_dilation], parameter[compare[call[call[name[self].window_obs][name[self].mrms_variable]][name[t]] greater_or_equal[>=] name[s]]]]] assign[=] constant[1]
keyword[def] identifier[dilate_obs] ( identifier[self] , identifier[dilation_radius] ): literal[string] keyword[for] identifier[s] keyword[in] identifier[self] . identifier[size_thresholds] : identifier[self] . identifier[dilated_obs] [ identifier[s] ]= identifier[np] . identifier[zeros] ( identifier[self] . identifier[window_obs] [ identifier[self] . identifier[mrms_variable] ]. identifier[shape] ) keyword[for] identifier[t] keyword[in] identifier[range] ( identifier[self] . identifier[dilated_obs] [ identifier[s] ]. identifier[shape] [ literal[int] ]): identifier[self] . identifier[dilated_obs] [ identifier[s] ][ identifier[t] ][ identifier[binary_dilation] ( identifier[self] . identifier[window_obs] [ identifier[self] . identifier[mrms_variable] ][ identifier[t] ]>= identifier[s] , identifier[iterations] = identifier[dilation_radius] )]= literal[int]
def dilate_obs(self, dilation_radius): """ Use a dilation filter to grow positive observation areas by a specified number of grid points :param dilation_radius: Number of times to dilate the grid. :return: """ for s in self.size_thresholds: self.dilated_obs[s] = np.zeros(self.window_obs[self.mrms_variable].shape) for t in range(self.dilated_obs[s].shape[0]): self.dilated_obs[s][t][binary_dilation(self.window_obs[self.mrms_variable][t] >= s, iterations=dilation_radius)] = 1 # depends on [control=['for'], data=['t']] # depends on [control=['for'], data=['s']]
def split_root(self): """Splits this path into a pair (drive, location). Note that, because all paths are normalized, a root of ``'.'`` will be returned for relative paths. """ if not PY3 and hasattr(self._lib, 'splitunc'): root, rest = self._lib.splitunc(self.path) if root: if rest.startswith(self._sep): root += self._sep rest = rest[1:] return self.__class__(root), self.__class__(rest) root, rest = self._lib.splitdrive(self.path) if root: if rest.startswith(self._sep): root += self._sep rest = rest[1:] return self.__class__(root), self.__class__(rest) if self.path.startswith(self._sep): return self.__class__(self._sep), self.__class__(rest[1:]) return self.__class__(''), self
def function[split_root, parameter[self]]: constant[Splits this path into a pair (drive, location). Note that, because all paths are normalized, a root of ``'.'`` will be returned for relative paths. ] if <ast.BoolOp object at 0x7da1b2879000> begin[:] <ast.Tuple object at 0x7da1b2878cd0> assign[=] call[name[self]._lib.splitunc, parameter[name[self].path]] if name[root] begin[:] if call[name[rest].startswith, parameter[name[self]._sep]] begin[:] <ast.AugAssign object at 0x7da1b2878040> variable[rest] assign[=] call[name[rest]][<ast.Slice object at 0x7da1b28785e0>] return[tuple[[<ast.Call object at 0x7da1b2878fd0>, <ast.Call object at 0x7da1b287aec0>]]] <ast.Tuple object at 0x7da1b287b790> assign[=] call[name[self]._lib.splitdrive, parameter[name[self].path]] if name[root] begin[:] if call[name[rest].startswith, parameter[name[self]._sep]] begin[:] <ast.AugAssign object at 0x7da1b287b8e0> variable[rest] assign[=] call[name[rest]][<ast.Slice object at 0x7da1b287a3e0>] return[tuple[[<ast.Call object at 0x7da1b2878370>, <ast.Call object at 0x7da1b287b370>]]] if call[name[self].path.startswith, parameter[name[self]._sep]] begin[:] return[tuple[[<ast.Call object at 0x7da1b287a560>, <ast.Call object at 0x7da1b2879f90>]]] return[tuple[[<ast.Call object at 0x7da1b271ece0>, <ast.Name object at 0x7da1b27806a0>]]]
keyword[def] identifier[split_root] ( identifier[self] ): literal[string] keyword[if] keyword[not] identifier[PY3] keyword[and] identifier[hasattr] ( identifier[self] . identifier[_lib] , literal[string] ): identifier[root] , identifier[rest] = identifier[self] . identifier[_lib] . identifier[splitunc] ( identifier[self] . identifier[path] ) keyword[if] identifier[root] : keyword[if] identifier[rest] . identifier[startswith] ( identifier[self] . identifier[_sep] ): identifier[root] += identifier[self] . identifier[_sep] identifier[rest] = identifier[rest] [ literal[int] :] keyword[return] identifier[self] . identifier[__class__] ( identifier[root] ), identifier[self] . identifier[__class__] ( identifier[rest] ) identifier[root] , identifier[rest] = identifier[self] . identifier[_lib] . identifier[splitdrive] ( identifier[self] . identifier[path] ) keyword[if] identifier[root] : keyword[if] identifier[rest] . identifier[startswith] ( identifier[self] . identifier[_sep] ): identifier[root] += identifier[self] . identifier[_sep] identifier[rest] = identifier[rest] [ literal[int] :] keyword[return] identifier[self] . identifier[__class__] ( identifier[root] ), identifier[self] . identifier[__class__] ( identifier[rest] ) keyword[if] identifier[self] . identifier[path] . identifier[startswith] ( identifier[self] . identifier[_sep] ): keyword[return] identifier[self] . identifier[__class__] ( identifier[self] . identifier[_sep] ), identifier[self] . identifier[__class__] ( identifier[rest] [ literal[int] :]) keyword[return] identifier[self] . identifier[__class__] ( literal[string] ), identifier[self]
def split_root(self): """Splits this path into a pair (drive, location). Note that, because all paths are normalized, a root of ``'.'`` will be returned for relative paths. """ if not PY3 and hasattr(self._lib, 'splitunc'): (root, rest) = self._lib.splitunc(self.path) if root: if rest.startswith(self._sep): root += self._sep rest = rest[1:] # depends on [control=['if'], data=[]] return (self.__class__(root), self.__class__(rest)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] (root, rest) = self._lib.splitdrive(self.path) if root: if rest.startswith(self._sep): root += self._sep rest = rest[1:] # depends on [control=['if'], data=[]] return (self.__class__(root), self.__class__(rest)) # depends on [control=['if'], data=[]] if self.path.startswith(self._sep): return (self.__class__(self._sep), self.__class__(rest[1:])) # depends on [control=['if'], data=[]] return (self.__class__(''), self)
def RMSE(x1, x2=-1): """ Root-mean-square error - this function accepts two series of data or directly one series with error. **Args:** * `x1` - first data series or error (1d array) **Kwargs:** * `x2` - second series (1d array) if first series was not error directly,\\ then this should be the second series **Returns:** * `e` - RMSE of error (float) obtained directly from `x1`, \\ or as a difference of `x1` and `x2` """ e = get_valid_error(x1, x2) return np.sqrt(np.dot(e, e) / float(len(e)))
def function[RMSE, parameter[x1, x2]]: constant[ Root-mean-square error - this function accepts two series of data or directly one series with error. **Args:** * `x1` - first data series or error (1d array) **Kwargs:** * `x2` - second series (1d array) if first series was not error directly,\ then this should be the second series **Returns:** * `e` - RMSE of error (float) obtained directly from `x1`, \ or as a difference of `x1` and `x2` ] variable[e] assign[=] call[name[get_valid_error], parameter[name[x1], name[x2]]] return[call[name[np].sqrt, parameter[binary_operation[call[name[np].dot, parameter[name[e], name[e]]] / call[name[float], parameter[call[name[len], parameter[name[e]]]]]]]]]
keyword[def] identifier[RMSE] ( identifier[x1] , identifier[x2] =- literal[int] ): literal[string] identifier[e] = identifier[get_valid_error] ( identifier[x1] , identifier[x2] ) keyword[return] identifier[np] . identifier[sqrt] ( identifier[np] . identifier[dot] ( identifier[e] , identifier[e] )/ identifier[float] ( identifier[len] ( identifier[e] )))
def RMSE(x1, x2=-1): """ Root-mean-square error - this function accepts two series of data or directly one series with error. **Args:** * `x1` - first data series or error (1d array) **Kwargs:** * `x2` - second series (1d array) if first series was not error directly,\\ then this should be the second series **Returns:** * `e` - RMSE of error (float) obtained directly from `x1`, \\ or as a difference of `x1` and `x2` """ e = get_valid_error(x1, x2) return np.sqrt(np.dot(e, e) / float(len(e)))
def _genLoggingFilePath(): """ Generate a filepath for the calling app """ appName = os.path.splitext(os.path.basename(sys.argv[0]))[0] or 'UnknownApp' appLogDir = os.path.abspath(os.path.join( os.environ['NTA_LOG_DIR'], 'numenta-logs-%s' % (os.environ['USER'],), appName)) appLogFileName = '%s-%s-%s.log' % ( appName, long(time.mktime(time.gmtime())), os.getpid()) return os.path.join(appLogDir, appLogFileName)
def function[_genLoggingFilePath, parameter[]]: constant[ Generate a filepath for the calling app ] variable[appName] assign[=] <ast.BoolOp object at 0x7da18dc07790> variable[appLogDir] assign[=] call[name[os].path.abspath, parameter[call[name[os].path.join, parameter[call[name[os].environ][constant[NTA_LOG_DIR]], binary_operation[constant[numenta-logs-%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da18dc06f80>]]], name[appName]]]]] variable[appLogFileName] assign[=] binary_operation[constant[%s-%s-%s.log] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18dc04fa0>, <ast.Call object at 0x7da18dc04280>, <ast.Call object at 0x7da18dc05300>]]] return[call[name[os].path.join, parameter[name[appLogDir], name[appLogFileName]]]]
keyword[def] identifier[_genLoggingFilePath] (): literal[string] identifier[appName] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[os] . identifier[path] . identifier[basename] ( identifier[sys] . identifier[argv] [ literal[int] ]))[ literal[int] ] keyword[or] literal[string] identifier[appLogDir] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[environ] [ literal[string] ], literal[string] %( identifier[os] . identifier[environ] [ literal[string] ],), identifier[appName] )) identifier[appLogFileName] = literal[string] %( identifier[appName] , identifier[long] ( identifier[time] . identifier[mktime] ( identifier[time] . identifier[gmtime] ())), identifier[os] . identifier[getpid] ()) keyword[return] identifier[os] . identifier[path] . identifier[join] ( identifier[appLogDir] , identifier[appLogFileName] )
def _genLoggingFilePath(): """ Generate a filepath for the calling app """ appName = os.path.splitext(os.path.basename(sys.argv[0]))[0] or 'UnknownApp' appLogDir = os.path.abspath(os.path.join(os.environ['NTA_LOG_DIR'], 'numenta-logs-%s' % (os.environ['USER'],), appName)) appLogFileName = '%s-%s-%s.log' % (appName, long(time.mktime(time.gmtime())), os.getpid()) return os.path.join(appLogDir, appLogFileName)
def _draw_cursor(self, dc, grid, row, col, pen=None, brush=None): """Draws cursor as Rectangle in lower right corner""" # If in full screen mode draw no cursor if grid.main_window.IsFullScreen(): return key = row, col, grid.current_table rect = grid.CellToRect(row, col) rect = self.get_merged_rect(grid, key, rect) # Check if cell is invisible if rect is None: return size = self.get_zoomed_size(1.0) caret_length = int(min([rect.width, rect.height]) / 5.0) color = get_color(config["text_color"]) if pen is None: pen = wx.Pen(color) if brush is None: brush = wx.Brush(color) pen.SetWidth(size) # Inner right and lower borders border_left = rect.x + size - 1 border_right = rect.x + rect.width - size - 1 border_upper = rect.y + size - 1 border_lower = rect.y + rect.height - size - 1 points_lr = [ (border_right, border_lower - caret_length), (border_right, border_lower), (border_right - caret_length, border_lower), (border_right, border_lower), ] points_ur = [ (border_right, border_upper + caret_length), (border_right, border_upper), (border_right - caret_length, border_upper), (border_right, border_upper), ] points_ul = [ (border_left, border_upper + caret_length), (border_left, border_upper), (border_left + caret_length, border_upper), (border_left, border_upper), ] points_ll = [ (border_left, border_lower - caret_length), (border_left, border_lower), (border_left + caret_length, border_lower), (border_left, border_lower), ] point_list = [points_lr, points_ur, points_ul, points_ll] dc.DrawPolygonList(point_list, pens=pen, brushes=brush) self.old_cursor_row_col = row, col
def function[_draw_cursor, parameter[self, dc, grid, row, col, pen, brush]]: constant[Draws cursor as Rectangle in lower right corner] if call[name[grid].main_window.IsFullScreen, parameter[]] begin[:] return[None] variable[key] assign[=] tuple[[<ast.Name object at 0x7da1b17204f0>, <ast.Name object at 0x7da1b1721330>, <ast.Attribute object at 0x7da1b17219f0>]] variable[rect] assign[=] call[name[grid].CellToRect, parameter[name[row], name[col]]] variable[rect] assign[=] call[name[self].get_merged_rect, parameter[name[grid], name[key], name[rect]]] if compare[name[rect] is constant[None]] begin[:] return[None] variable[size] assign[=] call[name[self].get_zoomed_size, parameter[constant[1.0]]] variable[caret_length] assign[=] call[name[int], parameter[binary_operation[call[name[min], parameter[list[[<ast.Attribute object at 0x7da1b1721ea0>, <ast.Attribute object at 0x7da1b1723100>]]]] / constant[5.0]]]] variable[color] assign[=] call[name[get_color], parameter[call[name[config]][constant[text_color]]]] if compare[name[pen] is constant[None]] begin[:] variable[pen] assign[=] call[name[wx].Pen, parameter[name[color]]] if compare[name[brush] is constant[None]] begin[:] variable[brush] assign[=] call[name[wx].Brush, parameter[name[color]]] call[name[pen].SetWidth, parameter[name[size]]] variable[border_left] assign[=] binary_operation[binary_operation[name[rect].x + name[size]] - constant[1]] variable[border_right] assign[=] binary_operation[binary_operation[binary_operation[name[rect].x + name[rect].width] - name[size]] - constant[1]] variable[border_upper] assign[=] binary_operation[binary_operation[name[rect].y + name[size]] - constant[1]] variable[border_lower] assign[=] binary_operation[binary_operation[binary_operation[name[rect].y + name[rect].height] - name[size]] - constant[1]] variable[points_lr] assign[=] list[[<ast.Tuple object at 0x7da1b1617af0>, <ast.Tuple object at 0x7da1b1617e20>, <ast.Tuple object at 0x7da1b1617a90>, <ast.Tuple object at 0x7da1b1615900>]] variable[points_ur] assign[=] list[[<ast.Tuple object at 0x7da1b1617fd0>, <ast.Tuple object at 0x7da1b1617c10>, <ast.Tuple object at 0x7da1b16170a0>, <ast.Tuple object at 0x7da1b16161a0>]] variable[points_ul] assign[=] list[[<ast.Tuple object at 0x7da1b16162f0>, <ast.Tuple object at 0x7da1b1617e50>, <ast.Tuple object at 0x7da1b1615d80>, <ast.Tuple object at 0x7da1b1616a10>]] variable[points_ll] assign[=] list[[<ast.Tuple object at 0x7da1b1616aa0>, <ast.Tuple object at 0x7da1b1617430>, <ast.Tuple object at 0x7da1b16172b0>, <ast.Tuple object at 0x7da1b1616a70>]] variable[point_list] assign[=] list[[<ast.Name object at 0x7da1b1617610>, <ast.Name object at 0x7da1b1616b60>, <ast.Name object at 0x7da1b1616440>, <ast.Name object at 0x7da1b1615b10>]] call[name[dc].DrawPolygonList, parameter[name[point_list]]] name[self].old_cursor_row_col assign[=] tuple[[<ast.Name object at 0x7da1b1617130>, <ast.Name object at 0x7da1b1616770>]]
keyword[def] identifier[_draw_cursor] ( identifier[self] , identifier[dc] , identifier[grid] , identifier[row] , identifier[col] , identifier[pen] = keyword[None] , identifier[brush] = keyword[None] ): literal[string] keyword[if] identifier[grid] . identifier[main_window] . identifier[IsFullScreen] (): keyword[return] identifier[key] = identifier[row] , identifier[col] , identifier[grid] . identifier[current_table] identifier[rect] = identifier[grid] . identifier[CellToRect] ( identifier[row] , identifier[col] ) identifier[rect] = identifier[self] . identifier[get_merged_rect] ( identifier[grid] , identifier[key] , identifier[rect] ) keyword[if] identifier[rect] keyword[is] keyword[None] : keyword[return] identifier[size] = identifier[self] . identifier[get_zoomed_size] ( literal[int] ) identifier[caret_length] = identifier[int] ( identifier[min] ([ identifier[rect] . identifier[width] , identifier[rect] . identifier[height] ])/ literal[int] ) identifier[color] = identifier[get_color] ( identifier[config] [ literal[string] ]) keyword[if] identifier[pen] keyword[is] keyword[None] : identifier[pen] = identifier[wx] . identifier[Pen] ( identifier[color] ) keyword[if] identifier[brush] keyword[is] keyword[None] : identifier[brush] = identifier[wx] . identifier[Brush] ( identifier[color] ) identifier[pen] . identifier[SetWidth] ( identifier[size] ) identifier[border_left] = identifier[rect] . identifier[x] + identifier[size] - literal[int] identifier[border_right] = identifier[rect] . identifier[x] + identifier[rect] . identifier[width] - identifier[size] - literal[int] identifier[border_upper] = identifier[rect] . identifier[y] + identifier[size] - literal[int] identifier[border_lower] = identifier[rect] . identifier[y] + identifier[rect] . identifier[height] - identifier[size] - literal[int] identifier[points_lr] =[ ( identifier[border_right] , identifier[border_lower] - identifier[caret_length] ), ( identifier[border_right] , identifier[border_lower] ), ( identifier[border_right] - identifier[caret_length] , identifier[border_lower] ), ( identifier[border_right] , identifier[border_lower] ), ] identifier[points_ur] =[ ( identifier[border_right] , identifier[border_upper] + identifier[caret_length] ), ( identifier[border_right] , identifier[border_upper] ), ( identifier[border_right] - identifier[caret_length] , identifier[border_upper] ), ( identifier[border_right] , identifier[border_upper] ), ] identifier[points_ul] =[ ( identifier[border_left] , identifier[border_upper] + identifier[caret_length] ), ( identifier[border_left] , identifier[border_upper] ), ( identifier[border_left] + identifier[caret_length] , identifier[border_upper] ), ( identifier[border_left] , identifier[border_upper] ), ] identifier[points_ll] =[ ( identifier[border_left] , identifier[border_lower] - identifier[caret_length] ), ( identifier[border_left] , identifier[border_lower] ), ( identifier[border_left] + identifier[caret_length] , identifier[border_lower] ), ( identifier[border_left] , identifier[border_lower] ), ] identifier[point_list] =[ identifier[points_lr] , identifier[points_ur] , identifier[points_ul] , identifier[points_ll] ] identifier[dc] . identifier[DrawPolygonList] ( identifier[point_list] , identifier[pens] = identifier[pen] , identifier[brushes] = identifier[brush] ) identifier[self] . identifier[old_cursor_row_col] = identifier[row] , identifier[col]
def _draw_cursor(self, dc, grid, row, col, pen=None, brush=None): """Draws cursor as Rectangle in lower right corner""" # If in full screen mode draw no cursor if grid.main_window.IsFullScreen(): return # depends on [control=['if'], data=[]] key = (row, col, grid.current_table) rect = grid.CellToRect(row, col) rect = self.get_merged_rect(grid, key, rect) # Check if cell is invisible if rect is None: return # depends on [control=['if'], data=[]] size = self.get_zoomed_size(1.0) caret_length = int(min([rect.width, rect.height]) / 5.0) color = get_color(config['text_color']) if pen is None: pen = wx.Pen(color) # depends on [control=['if'], data=['pen']] if brush is None: brush = wx.Brush(color) # depends on [control=['if'], data=['brush']] pen.SetWidth(size) # Inner right and lower borders border_left = rect.x + size - 1 border_right = rect.x + rect.width - size - 1 border_upper = rect.y + size - 1 border_lower = rect.y + rect.height - size - 1 points_lr = [(border_right, border_lower - caret_length), (border_right, border_lower), (border_right - caret_length, border_lower), (border_right, border_lower)] points_ur = [(border_right, border_upper + caret_length), (border_right, border_upper), (border_right - caret_length, border_upper), (border_right, border_upper)] points_ul = [(border_left, border_upper + caret_length), (border_left, border_upper), (border_left + caret_length, border_upper), (border_left, border_upper)] points_ll = [(border_left, border_lower - caret_length), (border_left, border_lower), (border_left + caret_length, border_lower), (border_left, border_lower)] point_list = [points_lr, points_ur, points_ul, points_ll] dc.DrawPolygonList(point_list, pens=pen, brushes=brush) self.old_cursor_row_col = (row, col)
def locateChild(self, ctx, segments): """ Retrieve a L{SharingIndex} for a particular user, or rend.NotFound. """ store = _storeFromUsername( self.loginSystem.store, segments[0].decode('utf-8')) if store is None: return rend.NotFound return (SharingIndex(store, self.webViewer), segments[1:])
def function[locateChild, parameter[self, ctx, segments]]: constant[ Retrieve a L{SharingIndex} for a particular user, or rend.NotFound. ] variable[store] assign[=] call[name[_storeFromUsername], parameter[name[self].loginSystem.store, call[call[name[segments]][constant[0]].decode, parameter[constant[utf-8]]]]] if compare[name[store] is constant[None]] begin[:] return[name[rend].NotFound] return[tuple[[<ast.Call object at 0x7da1b0a221a0>, <ast.Subscript object at 0x7da1b0a233a0>]]]
keyword[def] identifier[locateChild] ( identifier[self] , identifier[ctx] , identifier[segments] ): literal[string] identifier[store] = identifier[_storeFromUsername] ( identifier[self] . identifier[loginSystem] . identifier[store] , identifier[segments] [ literal[int] ]. identifier[decode] ( literal[string] )) keyword[if] identifier[store] keyword[is] keyword[None] : keyword[return] identifier[rend] . identifier[NotFound] keyword[return] ( identifier[SharingIndex] ( identifier[store] , identifier[self] . identifier[webViewer] ), identifier[segments] [ literal[int] :])
def locateChild(self, ctx, segments): """ Retrieve a L{SharingIndex} for a particular user, or rend.NotFound. """ store = _storeFromUsername(self.loginSystem.store, segments[0].decode('utf-8')) if store is None: return rend.NotFound # depends on [control=['if'], data=[]] return (SharingIndex(store, self.webViewer), segments[1:])
def createExperimentArgs(): """Run the basic probability of false positives experiment.""" experimentArguments = [] # for n in [300, 500, 700, 900, 1100, 1300, 1500, 1700, 1900, 2100, 2300, # 2500, 2700, 2900, 3100, 3300, 3500, 3700, 3900]: for n in [1500, 1700, 1900, 2100]: for a in [128]: # Some parameter combinations are just not worth running! if ( a==64 and n<=1500 ) or ( a==128 and n<= 1900 ) or ( a==256 ): experimentArguments.append( ("./sdr_calculations2", "results_errorbars/temp_"+str(n)+"_"+str(a)+".csv", "200000", str(n), str(a), "0"), ) return experimentArguments
def function[createExperimentArgs, parameter[]]: constant[Run the basic probability of false positives experiment.] variable[experimentArguments] assign[=] list[[]] for taget[name[n]] in starred[list[[<ast.Constant object at 0x7da1b0832d70>, <ast.Constant object at 0x7da1b0832d10>, <ast.Constant object at 0x7da1b0832ce0>, <ast.Constant object at 0x7da1b0832cb0>]]] begin[:] for taget[name[a]] in starred[list[[<ast.Constant object at 0x7da1b0833520>]]] begin[:] if <ast.BoolOp object at 0x7da1b08334c0> begin[:] call[name[experimentArguments].append, parameter[tuple[[<ast.Constant object at 0x7da1b08314b0>, <ast.BinOp object at 0x7da1b0830970>, <ast.Constant object at 0x7da1b0831600>, <ast.Call object at 0x7da1b0831630>, <ast.Call object at 0x7da1b0831750>, <ast.Constant object at 0x7da1b0832590>]]]] return[name[experimentArguments]]
keyword[def] identifier[createExperimentArgs] (): literal[string] identifier[experimentArguments] =[] keyword[for] identifier[n] keyword[in] [ literal[int] , literal[int] , literal[int] , literal[int] ]: keyword[for] identifier[a] keyword[in] [ literal[int] ]: keyword[if] ( identifier[a] == literal[int] keyword[and] identifier[n] <= literal[int] ) keyword[or] ( identifier[a] == literal[int] keyword[and] identifier[n] <= literal[int] ) keyword[or] ( identifier[a] == literal[int] ): identifier[experimentArguments] . identifier[append] ( ( literal[string] , literal[string] + identifier[str] ( identifier[n] )+ literal[string] + identifier[str] ( identifier[a] )+ literal[string] , literal[string] , identifier[str] ( identifier[n] ), identifier[str] ( identifier[a] ), literal[string] ), ) keyword[return] identifier[experimentArguments]
def createExperimentArgs(): """Run the basic probability of false positives experiment.""" experimentArguments = [] # for n in [300, 500, 700, 900, 1100, 1300, 1500, 1700, 1900, 2100, 2300, # 2500, 2700, 2900, 3100, 3300, 3500, 3700, 3900]: for n in [1500, 1700, 1900, 2100]: for a in [128]: # Some parameter combinations are just not worth running! if a == 64 and n <= 1500 or (a == 128 and n <= 1900) or a == 256: experimentArguments.append(('./sdr_calculations2', 'results_errorbars/temp_' + str(n) + '_' + str(a) + '.csv', '200000', str(n), str(a), '0')) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['a']] # depends on [control=['for'], data=['n']] return experimentArguments
def _compute_raw_moments(self, n_counter, k_counter): # The symbols for expectations are simply the first order raw moments. """ :param n_counter: a list of :class:`~means.core.descriptors.Moment`\s representing central moments :type n_counter: list[:class:`~means.core.descriptors.Moment`] :param k_counter: a list of :class:`~means.core.descriptors.Moment`\s representing raw moments :type k_counter: list[:class:`~means.core.descriptors.Moment`] :return: a vector of parametric expression for raw moments """ expectation_symbols = [pm.symbol for pm in k_counter if pm.order == 1] n_species = len(expectation_symbols) # The covariance expressed in terms of central moment symbols (typically, yxNs, where N is an integer) covariance_matrix = sp.Matrix(n_species,n_species, lambda x,y: self._get_covariance_symbol(n_counter,x,y)) # Variances is the diagonal of covariance matrix variance_symbols = [covariance_matrix[i, i] for i in range(n_species)] # :math: '\logVar(x_i) = 1 + \frac { Var(x_i)}{ \mathbb{E} (x_i)^2}' log_variance_symbols = sp.Matrix([sp.log(sp.Integer(1) + v/(e ** sp.Integer(2))) for e,v in zip(expectation_symbols, variance_symbols)]) # :math: '\log\mathbb{E} (x_i) = \log(\mathbb{E} (x_i) )+ \frac {\log (Var(x_i))}{2}' log_expectation_symbols = sp.Matrix([sp.log(e) - lv/sp.Integer(2) for e,lv in zip(expectation_symbols, log_variance_symbols)]) # Assign log variance symbols on the diagonal of size n_species by n_species log_variance_mat = sp.Matrix(n_species,n_species, lambda x,y: log_variance_symbols[x] if x == y else 0) # Assign log covariances and log variances in the matrix log_covariance matrix based on matrix indices log_covariance_matrix = sp.Matrix(n_species,n_species, lambda x, y: self._get_log_covariance(log_variance_mat, log_expectation_symbols, covariance_matrix, x, y)) # The n_vectors (e.g. [0,2,0]) of the central moments pm_n_vecs = [sp.Matrix(pm.n_vector) for pm in n_counter if pm.order > 1] out_mat = sp.Matrix([n.T * (log_covariance_matrix * n) / sp.Integer(2) + n.T * log_expectation_symbols for n in pm_n_vecs]) # return the exponential of all values out_mat = out_mat.applyfunc(lambda x: sp.exp(x)) return out_mat
def function[_compute_raw_moments, parameter[self, n_counter, k_counter]]: constant[ :param n_counter: a list of :class:`~means.core.descriptors.Moment`\s representing central moments :type n_counter: list[:class:`~means.core.descriptors.Moment`] :param k_counter: a list of :class:`~means.core.descriptors.Moment`\s representing raw moments :type k_counter: list[:class:`~means.core.descriptors.Moment`] :return: a vector of parametric expression for raw moments ] variable[expectation_symbols] assign[=] <ast.ListComp object at 0x7da18f813b50> variable[n_species] assign[=] call[name[len], parameter[name[expectation_symbols]]] variable[covariance_matrix] assign[=] call[name[sp].Matrix, parameter[name[n_species], name[n_species], <ast.Lambda object at 0x7da18f810100>]] variable[variance_symbols] assign[=] <ast.ListComp object at 0x7da18f811e40> variable[log_variance_symbols] assign[=] call[name[sp].Matrix, parameter[<ast.ListComp object at 0x7da18f810640>]] variable[log_expectation_symbols] assign[=] call[name[sp].Matrix, parameter[<ast.ListComp object at 0x7da18f8103a0>]] variable[log_variance_mat] assign[=] call[name[sp].Matrix, parameter[name[n_species], name[n_species], <ast.Lambda object at 0x7da18f8129e0>]] variable[log_covariance_matrix] assign[=] call[name[sp].Matrix, parameter[name[n_species], name[n_species], <ast.Lambda object at 0x7da18f811c30>]] variable[pm_n_vecs] assign[=] <ast.ListComp object at 0x7da18f8111e0> variable[out_mat] assign[=] call[name[sp].Matrix, parameter[<ast.ListComp object at 0x7da18f813490>]] variable[out_mat] assign[=] call[name[out_mat].applyfunc, parameter[<ast.Lambda object at 0x7da18dc06140>]] return[name[out_mat]]
keyword[def] identifier[_compute_raw_moments] ( identifier[self] , identifier[n_counter] , identifier[k_counter] ): literal[string] identifier[expectation_symbols] =[ identifier[pm] . identifier[symbol] keyword[for] identifier[pm] keyword[in] identifier[k_counter] keyword[if] identifier[pm] . identifier[order] == literal[int] ] identifier[n_species] = identifier[len] ( identifier[expectation_symbols] ) identifier[covariance_matrix] = identifier[sp] . identifier[Matrix] ( identifier[n_species] , identifier[n_species] , keyword[lambda] identifier[x] , identifier[y] : identifier[self] . identifier[_get_covariance_symbol] ( identifier[n_counter] , identifier[x] , identifier[y] )) identifier[variance_symbols] =[ identifier[covariance_matrix] [ identifier[i] , identifier[i] ] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n_species] )] identifier[log_variance_symbols] = identifier[sp] . identifier[Matrix] ([ identifier[sp] . identifier[log] ( identifier[sp] . identifier[Integer] ( literal[int] )+ identifier[v] /( identifier[e] ** identifier[sp] . identifier[Integer] ( literal[int] ))) keyword[for] identifier[e] , identifier[v] keyword[in] identifier[zip] ( identifier[expectation_symbols] , identifier[variance_symbols] )]) identifier[log_expectation_symbols] = identifier[sp] . identifier[Matrix] ([ identifier[sp] . identifier[log] ( identifier[e] )- identifier[lv] / identifier[sp] . identifier[Integer] ( literal[int] ) keyword[for] identifier[e] , identifier[lv] keyword[in] identifier[zip] ( identifier[expectation_symbols] , identifier[log_variance_symbols] )]) identifier[log_variance_mat] = identifier[sp] . identifier[Matrix] ( identifier[n_species] , identifier[n_species] , keyword[lambda] identifier[x] , identifier[y] : identifier[log_variance_symbols] [ identifier[x] ] keyword[if] identifier[x] == identifier[y] keyword[else] literal[int] ) identifier[log_covariance_matrix] = identifier[sp] . identifier[Matrix] ( identifier[n_species] , identifier[n_species] , keyword[lambda] identifier[x] , identifier[y] : identifier[self] . identifier[_get_log_covariance] ( identifier[log_variance_mat] , identifier[log_expectation_symbols] , identifier[covariance_matrix] , identifier[x] , identifier[y] )) identifier[pm_n_vecs] =[ identifier[sp] . identifier[Matrix] ( identifier[pm] . identifier[n_vector] ) keyword[for] identifier[pm] keyword[in] identifier[n_counter] keyword[if] identifier[pm] . identifier[order] > literal[int] ] identifier[out_mat] = identifier[sp] . identifier[Matrix] ([ identifier[n] . identifier[T] *( identifier[log_covariance_matrix] * identifier[n] )/ identifier[sp] . identifier[Integer] ( literal[int] )+ identifier[n] . identifier[T] * identifier[log_expectation_symbols] keyword[for] identifier[n] keyword[in] identifier[pm_n_vecs] ]) identifier[out_mat] = identifier[out_mat] . identifier[applyfunc] ( keyword[lambda] identifier[x] : identifier[sp] . identifier[exp] ( identifier[x] )) keyword[return] identifier[out_mat]
def _compute_raw_moments(self, n_counter, k_counter): # The symbols for expectations are simply the first order raw moments. '\n :param n_counter: a list of :class:`~means.core.descriptors.Moment`\\s representing central moments\n :type n_counter: list[:class:`~means.core.descriptors.Moment`]\n :param k_counter: a list of :class:`~means.core.descriptors.Moment`\\s representing raw moments\n :type k_counter: list[:class:`~means.core.descriptors.Moment`]\n :return: a vector of parametric expression for raw moments\n ' expectation_symbols = [pm.symbol for pm in k_counter if pm.order == 1] n_species = len(expectation_symbols) # The covariance expressed in terms of central moment symbols (typically, yxNs, where N is an integer) covariance_matrix = sp.Matrix(n_species, n_species, lambda x, y: self._get_covariance_symbol(n_counter, x, y)) # Variances is the diagonal of covariance matrix variance_symbols = [covariance_matrix[i, i] for i in range(n_species)] # :math: '\logVar(x_i) = 1 + \frac { Var(x_i)}{ \mathbb{E} (x_i)^2}' log_variance_symbols = sp.Matrix([sp.log(sp.Integer(1) + v / e ** sp.Integer(2)) for (e, v) in zip(expectation_symbols, variance_symbols)]) # :math: '\log\mathbb{E} (x_i) = \log(\mathbb{E} (x_i) )+ \frac {\log (Var(x_i))}{2}' log_expectation_symbols = sp.Matrix([sp.log(e) - lv / sp.Integer(2) for (e, lv) in zip(expectation_symbols, log_variance_symbols)]) # Assign log variance symbols on the diagonal of size n_species by n_species log_variance_mat = sp.Matrix(n_species, n_species, lambda x, y: log_variance_symbols[x] if x == y else 0) # Assign log covariances and log variances in the matrix log_covariance matrix based on matrix indices log_covariance_matrix = sp.Matrix(n_species, n_species, lambda x, y: self._get_log_covariance(log_variance_mat, log_expectation_symbols, covariance_matrix, x, y)) # The n_vectors (e.g. [0,2,0]) of the central moments pm_n_vecs = [sp.Matrix(pm.n_vector) for pm in n_counter if pm.order > 1] out_mat = sp.Matrix([n.T * (log_covariance_matrix * n) / sp.Integer(2) + n.T * log_expectation_symbols for n in pm_n_vecs]) # return the exponential of all values out_mat = out_mat.applyfunc(lambda x: sp.exp(x)) return out_mat
def pad_width(model, table_padding=0.85, tabs_padding=1.2): """ Computes the width of a model and sets up appropriate padding for Tabs and DataTable types. """ if isinstance(model, Row): vals = [pad_width(child) for child in model.children] width = np.max([v for v in vals if v is not None]) elif isinstance(model, Column): vals = [pad_width(child) for child in model.children] width = np.sum([v for v in vals if v is not None]) elif isinstance(model, Tabs): vals = [pad_width(t) for t in model.tabs] width = np.max([v for v in vals if v is not None]) for model in model.tabs: model.width = width width = int(tabs_padding*width) elif isinstance(model, DataTable): width = model.width model.width = int(table_padding*width) elif isinstance(model, (WidgetBox, Div)): width = model.width elif model: width = model.plot_width else: width = 0 return width
def function[pad_width, parameter[model, table_padding, tabs_padding]]: constant[ Computes the width of a model and sets up appropriate padding for Tabs and DataTable types. ] if call[name[isinstance], parameter[name[model], name[Row]]] begin[:] variable[vals] assign[=] <ast.ListComp object at 0x7da18f58ce80> variable[width] assign[=] call[name[np].max, parameter[<ast.ListComp object at 0x7da18f58fc40>]] return[name[width]]
keyword[def] identifier[pad_width] ( identifier[model] , identifier[table_padding] = literal[int] , identifier[tabs_padding] = literal[int] ): literal[string] keyword[if] identifier[isinstance] ( identifier[model] , identifier[Row] ): identifier[vals] =[ identifier[pad_width] ( identifier[child] ) keyword[for] identifier[child] keyword[in] identifier[model] . identifier[children] ] identifier[width] = identifier[np] . identifier[max] ([ identifier[v] keyword[for] identifier[v] keyword[in] identifier[vals] keyword[if] identifier[v] keyword[is] keyword[not] keyword[None] ]) keyword[elif] identifier[isinstance] ( identifier[model] , identifier[Column] ): identifier[vals] =[ identifier[pad_width] ( identifier[child] ) keyword[for] identifier[child] keyword[in] identifier[model] . identifier[children] ] identifier[width] = identifier[np] . identifier[sum] ([ identifier[v] keyword[for] identifier[v] keyword[in] identifier[vals] keyword[if] identifier[v] keyword[is] keyword[not] keyword[None] ]) keyword[elif] identifier[isinstance] ( identifier[model] , identifier[Tabs] ): identifier[vals] =[ identifier[pad_width] ( identifier[t] ) keyword[for] identifier[t] keyword[in] identifier[model] . identifier[tabs] ] identifier[width] = identifier[np] . identifier[max] ([ identifier[v] keyword[for] identifier[v] keyword[in] identifier[vals] keyword[if] identifier[v] keyword[is] keyword[not] keyword[None] ]) keyword[for] identifier[model] keyword[in] identifier[model] . identifier[tabs] : identifier[model] . identifier[width] = identifier[width] identifier[width] = identifier[int] ( identifier[tabs_padding] * identifier[width] ) keyword[elif] identifier[isinstance] ( identifier[model] , identifier[DataTable] ): identifier[width] = identifier[model] . identifier[width] identifier[model] . identifier[width] = identifier[int] ( identifier[table_padding] * identifier[width] ) keyword[elif] identifier[isinstance] ( identifier[model] ,( identifier[WidgetBox] , identifier[Div] )): identifier[width] = identifier[model] . identifier[width] keyword[elif] identifier[model] : identifier[width] = identifier[model] . identifier[plot_width] keyword[else] : identifier[width] = literal[int] keyword[return] identifier[width]
def pad_width(model, table_padding=0.85, tabs_padding=1.2): """ Computes the width of a model and sets up appropriate padding for Tabs and DataTable types. """ if isinstance(model, Row): vals = [pad_width(child) for child in model.children] width = np.max([v for v in vals if v is not None]) # depends on [control=['if'], data=[]] elif isinstance(model, Column): vals = [pad_width(child) for child in model.children] width = np.sum([v for v in vals if v is not None]) # depends on [control=['if'], data=[]] elif isinstance(model, Tabs): vals = [pad_width(t) for t in model.tabs] width = np.max([v for v in vals if v is not None]) for model in model.tabs: model.width = width width = int(tabs_padding * width) # depends on [control=['for'], data=['model']] # depends on [control=['if'], data=[]] elif isinstance(model, DataTable): width = model.width model.width = int(table_padding * width) # depends on [control=['if'], data=[]] elif isinstance(model, (WidgetBox, Div)): width = model.width # depends on [control=['if'], data=[]] elif model: width = model.plot_width # depends on [control=['if'], data=[]] else: width = 0 return width
def next_state(self): """This is a method that will be called when the time remaining ends. The current state can be: roasting, cooling, idle, sleeping, connecting, or unkown.""" if(self.roaster.get_roaster_state() == 'roasting'): self.roaster.time_remaining = 20 self.roaster.cool() elif(self.roaster.get_roaster_state() == 'cooling'): self.roaster.idle()
def function[next_state, parameter[self]]: constant[This is a method that will be called when the time remaining ends. The current state can be: roasting, cooling, idle, sleeping, connecting, or unkown.] if compare[call[name[self].roaster.get_roaster_state, parameter[]] equal[==] constant[roasting]] begin[:] name[self].roaster.time_remaining assign[=] constant[20] call[name[self].roaster.cool, parameter[]]
keyword[def] identifier[next_state] ( identifier[self] ): literal[string] keyword[if] ( identifier[self] . identifier[roaster] . identifier[get_roaster_state] ()== literal[string] ): identifier[self] . identifier[roaster] . identifier[time_remaining] = literal[int] identifier[self] . identifier[roaster] . identifier[cool] () keyword[elif] ( identifier[self] . identifier[roaster] . identifier[get_roaster_state] ()== literal[string] ): identifier[self] . identifier[roaster] . identifier[idle] ()
def next_state(self): """This is a method that will be called when the time remaining ends. The current state can be: roasting, cooling, idle, sleeping, connecting, or unkown.""" if self.roaster.get_roaster_state() == 'roasting': self.roaster.time_remaining = 20 self.roaster.cool() # depends on [control=['if'], data=[]] elif self.roaster.get_roaster_state() == 'cooling': self.roaster.idle() # depends on [control=['if'], data=[]]
def dt_to_timestamp(dt): """Converts from a :class:`~datetime.datetime` object to an integer timestamp, suitable interoperation with :func:`time.time` and other `Epoch-based timestamps`. .. _Epoch-based timestamps: https://en.wikipedia.org/wiki/Unix_time >>> abs(round(time.time() - dt_to_timestamp(datetime.utcnow()), 2)) 0.0 ``dt_to_timestamp`` supports both timezone-aware and naïve :class:`~datetime.datetime` objects. Note that it assumes naïve datetime objects are implied UTC, such as those generated with :meth:`datetime.datetime.utcnow`. If your datetime objects are local time, such as those generated with :meth:`datetime.datetime.now`, first convert it using the :meth:`datetime.datetime.replace` method with ``tzinfo=`` :class:`LocalTZ` object in this module, then pass the result of that to ``dt_to_timestamp``. """ if dt.tzinfo: td = dt - EPOCH_AWARE else: td = dt - EPOCH_NAIVE return total_seconds(td)
def function[dt_to_timestamp, parameter[dt]]: constant[Converts from a :class:`~datetime.datetime` object to an integer timestamp, suitable interoperation with :func:`time.time` and other `Epoch-based timestamps`. .. _Epoch-based timestamps: https://en.wikipedia.org/wiki/Unix_time >>> abs(round(time.time() - dt_to_timestamp(datetime.utcnow()), 2)) 0.0 ``dt_to_timestamp`` supports both timezone-aware and naïve :class:`~datetime.datetime` objects. Note that it assumes naïve datetime objects are implied UTC, such as those generated with :meth:`datetime.datetime.utcnow`. If your datetime objects are local time, such as those generated with :meth:`datetime.datetime.now`, first convert it using the :meth:`datetime.datetime.replace` method with ``tzinfo=`` :class:`LocalTZ` object in this module, then pass the result of that to ``dt_to_timestamp``. ] if name[dt].tzinfo begin[:] variable[td] assign[=] binary_operation[name[dt] - name[EPOCH_AWARE]] return[call[name[total_seconds], parameter[name[td]]]]
keyword[def] identifier[dt_to_timestamp] ( identifier[dt] ): literal[string] keyword[if] identifier[dt] . identifier[tzinfo] : identifier[td] = identifier[dt] - identifier[EPOCH_AWARE] keyword[else] : identifier[td] = identifier[dt] - identifier[EPOCH_NAIVE] keyword[return] identifier[total_seconds] ( identifier[td] )
def dt_to_timestamp(dt): """Converts from a :class:`~datetime.datetime` object to an integer timestamp, suitable interoperation with :func:`time.time` and other `Epoch-based timestamps`. .. _Epoch-based timestamps: https://en.wikipedia.org/wiki/Unix_time >>> abs(round(time.time() - dt_to_timestamp(datetime.utcnow()), 2)) 0.0 ``dt_to_timestamp`` supports both timezone-aware and naïve :class:`~datetime.datetime` objects. Note that it assumes naïve datetime objects are implied UTC, such as those generated with :meth:`datetime.datetime.utcnow`. If your datetime objects are local time, such as those generated with :meth:`datetime.datetime.now`, first convert it using the :meth:`datetime.datetime.replace` method with ``tzinfo=`` :class:`LocalTZ` object in this module, then pass the result of that to ``dt_to_timestamp``. """ if dt.tzinfo: td = dt - EPOCH_AWARE # depends on [control=['if'], data=[]] else: td = dt - EPOCH_NAIVE return total_seconds(td)
def pretty_descriptor(self): """ assemble a long member name from access flags, type, argument types, exceptions as applicable """ f = " ".join(self.pretty_access_flags()) p = self.pretty_type() n = self.get_name() t = ",".join(self.pretty_exceptions()) if n == "<init>": # we pretend that there's no return type, even though it's # V for constructors p = None if self.is_method: # stick the name and args together so there's no space n = "%s(%s)" % (n, ",".join(self.pretty_arg_types())) if t: # assemble any throws as necessary t = "throws " + t return " ".join(z for z in (f, p, n, t) if z)
def function[pretty_descriptor, parameter[self]]: constant[ assemble a long member name from access flags, type, argument types, exceptions as applicable ] variable[f] assign[=] call[constant[ ].join, parameter[call[name[self].pretty_access_flags, parameter[]]]] variable[p] assign[=] call[name[self].pretty_type, parameter[]] variable[n] assign[=] call[name[self].get_name, parameter[]] variable[t] assign[=] call[constant[,].join, parameter[call[name[self].pretty_exceptions, parameter[]]]] if compare[name[n] equal[==] constant[<init>]] begin[:] variable[p] assign[=] constant[None] if name[self].is_method begin[:] variable[n] assign[=] binary_operation[constant[%s(%s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0ca6a10>, <ast.Call object at 0x7da1b0ca71c0>]]] if name[t] begin[:] variable[t] assign[=] binary_operation[constant[throws ] + name[t]] return[call[constant[ ].join, parameter[<ast.GeneratorExp object at 0x7da1b0e2c580>]]]
keyword[def] identifier[pretty_descriptor] ( identifier[self] ): literal[string] identifier[f] = literal[string] . identifier[join] ( identifier[self] . identifier[pretty_access_flags] ()) identifier[p] = identifier[self] . identifier[pretty_type] () identifier[n] = identifier[self] . identifier[get_name] () identifier[t] = literal[string] . identifier[join] ( identifier[self] . identifier[pretty_exceptions] ()) keyword[if] identifier[n] == literal[string] : identifier[p] = keyword[None] keyword[if] identifier[self] . identifier[is_method] : identifier[n] = literal[string] %( identifier[n] , literal[string] . identifier[join] ( identifier[self] . identifier[pretty_arg_types] ())) keyword[if] identifier[t] : identifier[t] = literal[string] + identifier[t] keyword[return] literal[string] . identifier[join] ( identifier[z] keyword[for] identifier[z] keyword[in] ( identifier[f] , identifier[p] , identifier[n] , identifier[t] ) keyword[if] identifier[z] )
def pretty_descriptor(self): """ assemble a long member name from access flags, type, argument types, exceptions as applicable """ f = ' '.join(self.pretty_access_flags()) p = self.pretty_type() n = self.get_name() t = ','.join(self.pretty_exceptions()) if n == '<init>': # we pretend that there's no return type, even though it's # V for constructors p = None # depends on [control=['if'], data=[]] if self.is_method: # stick the name and args together so there's no space n = '%s(%s)' % (n, ','.join(self.pretty_arg_types())) # depends on [control=['if'], data=[]] if t: # assemble any throws as necessary t = 'throws ' + t # depends on [control=['if'], data=[]] return ' '.join((z for z in (f, p, n, t) if z))
def set_terminal_size(fd, size): """Set the (width, height) size tuple for the given pty fd.""" sizebuf = array.array('h', reversed(size)) fcntl.ioctl(fd, termios.TIOCSWINSZ, sizebuf)
def function[set_terminal_size, parameter[fd, size]]: constant[Set the (width, height) size tuple for the given pty fd.] variable[sizebuf] assign[=] call[name[array].array, parameter[constant[h], call[name[reversed], parameter[name[size]]]]] call[name[fcntl].ioctl, parameter[name[fd], name[termios].TIOCSWINSZ, name[sizebuf]]]
keyword[def] identifier[set_terminal_size] ( identifier[fd] , identifier[size] ): literal[string] identifier[sizebuf] = identifier[array] . identifier[array] ( literal[string] , identifier[reversed] ( identifier[size] )) identifier[fcntl] . identifier[ioctl] ( identifier[fd] , identifier[termios] . identifier[TIOCSWINSZ] , identifier[sizebuf] )
def set_terminal_size(fd, size): """Set the (width, height) size tuple for the given pty fd.""" sizebuf = array.array('h', reversed(size)) fcntl.ioctl(fd, termios.TIOCSWINSZ, sizebuf)
def serialize(self): """ :returns: A dictionary representation of the statement object. :rtype: dict """ data = {} for field_name in self.get_statement_field_names(): format_method = getattr(self, 'get_{}'.format( field_name ), None) if format_method: data[field_name] = format_method() else: data[field_name] = getattr(self, field_name) return data
def function[serialize, parameter[self]]: constant[ :returns: A dictionary representation of the statement object. :rtype: dict ] variable[data] assign[=] dictionary[[], []] for taget[name[field_name]] in starred[call[name[self].get_statement_field_names, parameter[]]] begin[:] variable[format_method] assign[=] call[name[getattr], parameter[name[self], call[constant[get_{}].format, parameter[name[field_name]]], constant[None]]] if name[format_method] begin[:] call[name[data]][name[field_name]] assign[=] call[name[format_method], parameter[]] return[name[data]]
keyword[def] identifier[serialize] ( identifier[self] ): literal[string] identifier[data] ={} keyword[for] identifier[field_name] keyword[in] identifier[self] . identifier[get_statement_field_names] (): identifier[format_method] = identifier[getattr] ( identifier[self] , literal[string] . identifier[format] ( identifier[field_name] ), keyword[None] ) keyword[if] identifier[format_method] : identifier[data] [ identifier[field_name] ]= identifier[format_method] () keyword[else] : identifier[data] [ identifier[field_name] ]= identifier[getattr] ( identifier[self] , identifier[field_name] ) keyword[return] identifier[data]
def serialize(self): """ :returns: A dictionary representation of the statement object. :rtype: dict """ data = {} for field_name in self.get_statement_field_names(): format_method = getattr(self, 'get_{}'.format(field_name), None) if format_method: data[field_name] = format_method() # depends on [control=['if'], data=[]] else: data[field_name] = getattr(self, field_name) # depends on [control=['for'], data=['field_name']] return data
def get_attributes(path): ''' Return a dictionary object with the Windows file attributes for a file. Args: path (str): The path to the file or directory Returns: dict: A dictionary of file attributes CLI Example: .. code-block:: bash salt '*' file.get_attributes c:\\temp\\a.txt ''' if not os.path.exists(path): raise CommandExecutionError('Path not found: {0}'.format(path)) # set up dictionary for attribute values attributes = {} # Get cumulative int value of attributes intAttributes = win32file.GetFileAttributes(path) # Assign individual attributes attributes['archive'] = (intAttributes & 32) == 32 attributes['reparsePoint'] = (intAttributes & 1024) == 1024 attributes['compressed'] = (intAttributes & 2048) == 2048 attributes['directory'] = (intAttributes & 16) == 16 attributes['encrypted'] = (intAttributes & 16384) == 16384 attributes['hidden'] = (intAttributes & 2) == 2 attributes['normal'] = (intAttributes & 128) == 128 attributes['notIndexed'] = (intAttributes & 8192) == 8192 attributes['offline'] = (intAttributes & 4096) == 4096 attributes['readonly'] = (intAttributes & 1) == 1 attributes['system'] = (intAttributes & 4) == 4 attributes['temporary'] = (intAttributes & 256) == 256 # check if it's a Mounted Volume attributes['mountedVolume'] = False if attributes['reparsePoint'] is True and attributes['directory'] is True: fileIterator = win32file.FindFilesIterator(path) findDataTuple = next(fileIterator) if findDataTuple[6] == 0xA0000003: attributes['mountedVolume'] = True # check if it's a soft (symbolic) link # Note: os.path.islink() does not work in # Python 2.7 for the Windows NTFS file system. # The following code does, however, work (tested in Windows 8) attributes['symbolicLink'] = False if attributes['reparsePoint'] is True: fileIterator = win32file.FindFilesIterator(path) findDataTuple = next(fileIterator) if findDataTuple[6] == 0xA000000C: attributes['symbolicLink'] = True return attributes
def function[get_attributes, parameter[path]]: constant[ Return a dictionary object with the Windows file attributes for a file. Args: path (str): The path to the file or directory Returns: dict: A dictionary of file attributes CLI Example: .. code-block:: bash salt '*' file.get_attributes c:\temp\a.txt ] if <ast.UnaryOp object at 0x7da18f00fbe0> begin[:] <ast.Raise object at 0x7da1b210a230> variable[attributes] assign[=] dictionary[[], []] variable[intAttributes] assign[=] call[name[win32file].GetFileAttributes, parameter[name[path]]] call[name[attributes]][constant[archive]] assign[=] compare[binary_operation[name[intAttributes] <ast.BitAnd object at 0x7da2590d6b60> constant[32]] equal[==] constant[32]] call[name[attributes]][constant[reparsePoint]] assign[=] compare[binary_operation[name[intAttributes] <ast.BitAnd object at 0x7da2590d6b60> constant[1024]] equal[==] constant[1024]] call[name[attributes]][constant[compressed]] assign[=] compare[binary_operation[name[intAttributes] <ast.BitAnd object at 0x7da2590d6b60> constant[2048]] equal[==] constant[2048]] call[name[attributes]][constant[directory]] assign[=] compare[binary_operation[name[intAttributes] <ast.BitAnd object at 0x7da2590d6b60> constant[16]] equal[==] constant[16]] call[name[attributes]][constant[encrypted]] assign[=] compare[binary_operation[name[intAttributes] <ast.BitAnd object at 0x7da2590d6b60> constant[16384]] equal[==] constant[16384]] call[name[attributes]][constant[hidden]] assign[=] compare[binary_operation[name[intAttributes] <ast.BitAnd object at 0x7da2590d6b60> constant[2]] equal[==] constant[2]] call[name[attributes]][constant[normal]] assign[=] compare[binary_operation[name[intAttributes] <ast.BitAnd object at 0x7da2590d6b60> constant[128]] equal[==] constant[128]] call[name[attributes]][constant[notIndexed]] assign[=] compare[binary_operation[name[intAttributes] <ast.BitAnd object at 0x7da2590d6b60> constant[8192]] equal[==] constant[8192]] call[name[attributes]][constant[offline]] assign[=] compare[binary_operation[name[intAttributes] <ast.BitAnd object at 0x7da2590d6b60> constant[4096]] equal[==] constant[4096]] call[name[attributes]][constant[readonly]] assign[=] compare[binary_operation[name[intAttributes] <ast.BitAnd object at 0x7da2590d6b60> constant[1]] equal[==] constant[1]] call[name[attributes]][constant[system]] assign[=] compare[binary_operation[name[intAttributes] <ast.BitAnd object at 0x7da2590d6b60> constant[4]] equal[==] constant[4]] call[name[attributes]][constant[temporary]] assign[=] compare[binary_operation[name[intAttributes] <ast.BitAnd object at 0x7da2590d6b60> constant[256]] equal[==] constant[256]] call[name[attributes]][constant[mountedVolume]] assign[=] constant[False] if <ast.BoolOp object at 0x7da1b217c520> begin[:] variable[fileIterator] assign[=] call[name[win32file].FindFilesIterator, parameter[name[path]]] variable[findDataTuple] assign[=] call[name[next], parameter[name[fileIterator]]] if compare[call[name[findDataTuple]][constant[6]] equal[==] constant[2684354563]] begin[:] call[name[attributes]][constant[mountedVolume]] assign[=] constant[True] call[name[attributes]][constant[symbolicLink]] assign[=] constant[False] if compare[call[name[attributes]][constant[reparsePoint]] is constant[True]] begin[:] variable[fileIterator] assign[=] call[name[win32file].FindFilesIterator, parameter[name[path]]] variable[findDataTuple] assign[=] call[name[next], parameter[name[fileIterator]]] if compare[call[name[findDataTuple]][constant[6]] equal[==] constant[2684354572]] begin[:] call[name[attributes]][constant[symbolicLink]] assign[=] constant[True] return[name[attributes]]
keyword[def] identifier[get_attributes] ( identifier[path] ): literal[string] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[path] ): keyword[raise] identifier[CommandExecutionError] ( literal[string] . identifier[format] ( identifier[path] )) identifier[attributes] ={} identifier[intAttributes] = identifier[win32file] . identifier[GetFileAttributes] ( identifier[path] ) identifier[attributes] [ literal[string] ]=( identifier[intAttributes] & literal[int] )== literal[int] identifier[attributes] [ literal[string] ]=( identifier[intAttributes] & literal[int] )== literal[int] identifier[attributes] [ literal[string] ]=( identifier[intAttributes] & literal[int] )== literal[int] identifier[attributes] [ literal[string] ]=( identifier[intAttributes] & literal[int] )== literal[int] identifier[attributes] [ literal[string] ]=( identifier[intAttributes] & literal[int] )== literal[int] identifier[attributes] [ literal[string] ]=( identifier[intAttributes] & literal[int] )== literal[int] identifier[attributes] [ literal[string] ]=( identifier[intAttributes] & literal[int] )== literal[int] identifier[attributes] [ literal[string] ]=( identifier[intAttributes] & literal[int] )== literal[int] identifier[attributes] [ literal[string] ]=( identifier[intAttributes] & literal[int] )== literal[int] identifier[attributes] [ literal[string] ]=( identifier[intAttributes] & literal[int] )== literal[int] identifier[attributes] [ literal[string] ]=( identifier[intAttributes] & literal[int] )== literal[int] identifier[attributes] [ literal[string] ]=( identifier[intAttributes] & literal[int] )== literal[int] identifier[attributes] [ literal[string] ]= keyword[False] keyword[if] identifier[attributes] [ literal[string] ] keyword[is] keyword[True] keyword[and] identifier[attributes] [ literal[string] ] keyword[is] keyword[True] : identifier[fileIterator] = identifier[win32file] . identifier[FindFilesIterator] ( identifier[path] ) identifier[findDataTuple] = identifier[next] ( identifier[fileIterator] ) keyword[if] identifier[findDataTuple] [ literal[int] ]== literal[int] : identifier[attributes] [ literal[string] ]= keyword[True] identifier[attributes] [ literal[string] ]= keyword[False] keyword[if] identifier[attributes] [ literal[string] ] keyword[is] keyword[True] : identifier[fileIterator] = identifier[win32file] . identifier[FindFilesIterator] ( identifier[path] ) identifier[findDataTuple] = identifier[next] ( identifier[fileIterator] ) keyword[if] identifier[findDataTuple] [ literal[int] ]== literal[int] : identifier[attributes] [ literal[string] ]= keyword[True] keyword[return] identifier[attributes]
def get_attributes(path): """ Return a dictionary object with the Windows file attributes for a file. Args: path (str): The path to the file or directory Returns: dict: A dictionary of file attributes CLI Example: .. code-block:: bash salt '*' file.get_attributes c:\\temp\\a.txt """ if not os.path.exists(path): raise CommandExecutionError('Path not found: {0}'.format(path)) # depends on [control=['if'], data=[]] # set up dictionary for attribute values attributes = {} # Get cumulative int value of attributes intAttributes = win32file.GetFileAttributes(path) # Assign individual attributes attributes['archive'] = intAttributes & 32 == 32 attributes['reparsePoint'] = intAttributes & 1024 == 1024 attributes['compressed'] = intAttributes & 2048 == 2048 attributes['directory'] = intAttributes & 16 == 16 attributes['encrypted'] = intAttributes & 16384 == 16384 attributes['hidden'] = intAttributes & 2 == 2 attributes['normal'] = intAttributes & 128 == 128 attributes['notIndexed'] = intAttributes & 8192 == 8192 attributes['offline'] = intAttributes & 4096 == 4096 attributes['readonly'] = intAttributes & 1 == 1 attributes['system'] = intAttributes & 4 == 4 attributes['temporary'] = intAttributes & 256 == 256 # check if it's a Mounted Volume attributes['mountedVolume'] = False if attributes['reparsePoint'] is True and attributes['directory'] is True: fileIterator = win32file.FindFilesIterator(path) findDataTuple = next(fileIterator) if findDataTuple[6] == 2684354563: attributes['mountedVolume'] = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # check if it's a soft (symbolic) link # Note: os.path.islink() does not work in # Python 2.7 for the Windows NTFS file system. # The following code does, however, work (tested in Windows 8) attributes['symbolicLink'] = False if attributes['reparsePoint'] is True: fileIterator = win32file.FindFilesIterator(path) findDataTuple = next(fileIterator) if findDataTuple[6] == 2684354572: attributes['symbolicLink'] = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] return attributes
def get_time_server(): ''' Display the currently set network time server. :return: the network time server :rtype: str CLI Example: .. code-block:: bash salt '*' timezone.get_time_server ''' ret = salt.utils.mac_utils.execute_return_result( 'systemsetup -getnetworktimeserver') return salt.utils.mac_utils.parse_return(ret)
def function[get_time_server, parameter[]]: constant[ Display the currently set network time server. :return: the network time server :rtype: str CLI Example: .. code-block:: bash salt '*' timezone.get_time_server ] variable[ret] assign[=] call[name[salt].utils.mac_utils.execute_return_result, parameter[constant[systemsetup -getnetworktimeserver]]] return[call[name[salt].utils.mac_utils.parse_return, parameter[name[ret]]]]
keyword[def] identifier[get_time_server] (): literal[string] identifier[ret] = identifier[salt] . identifier[utils] . identifier[mac_utils] . identifier[execute_return_result] ( literal[string] ) keyword[return] identifier[salt] . identifier[utils] . identifier[mac_utils] . identifier[parse_return] ( identifier[ret] )
def get_time_server(): """ Display the currently set network time server. :return: the network time server :rtype: str CLI Example: .. code-block:: bash salt '*' timezone.get_time_server """ ret = salt.utils.mac_utils.execute_return_result('systemsetup -getnetworktimeserver') return salt.utils.mac_utils.parse_return(ret)
def to_comment(comment): """ Convert a string to a ``.properties`` file comment. All non-Latin-1 characters in the string are escaped using ``\\uXXXX`` escapes (after converting non-BMP characters to surrogate pairs), a ``#`` is prepended to the string, any CR LF or CR line breaks in the string are converted to LF, and a ``#`` is inserted after any line break not already followed by a ``#`` or ``!``. No trailing newline is added. >>> to_comment('They say foo=bar,\\r\\nbut does bar=foo?') '#They say foo=bar,\\n#but does bar=foo?' :param comment: the string to convert to a comment :type comment: text string :rtype: text string """ return '#' + re.sub(r'[^\x00-\xFF]', _esc, re.sub(r'\n(?![#!])', '\n#', re.sub(r'\r\n?', '\n', comment)))
def function[to_comment, parameter[comment]]: constant[ Convert a string to a ``.properties`` file comment. All non-Latin-1 characters in the string are escaped using ``\uXXXX`` escapes (after converting non-BMP characters to surrogate pairs), a ``#`` is prepended to the string, any CR LF or CR line breaks in the string are converted to LF, and a ``#`` is inserted after any line break not already followed by a ``#`` or ``!``. No trailing newline is added. >>> to_comment('They say foo=bar,\r\nbut does bar=foo?') '#They say foo=bar,\n#but does bar=foo?' :param comment: the string to convert to a comment :type comment: text string :rtype: text string ] return[binary_operation[constant[#] + call[name[re].sub, parameter[constant[[^\x00-\xFF]], name[_esc], call[name[re].sub, parameter[constant[\n(?![#!])], constant[ #], call[name[re].sub, parameter[constant[\r\n?], constant[ ], name[comment]]]]]]]]]
keyword[def] identifier[to_comment] ( identifier[comment] ): literal[string] keyword[return] literal[string] + identifier[re] . identifier[sub] ( literal[string] , identifier[_esc] , identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[comment] )))
def to_comment(comment): """ Convert a string to a ``.properties`` file comment. All non-Latin-1 characters in the string are escaped using ``\\uXXXX`` escapes (after converting non-BMP characters to surrogate pairs), a ``#`` is prepended to the string, any CR LF or CR line breaks in the string are converted to LF, and a ``#`` is inserted after any line break not already followed by a ``#`` or ``!``. No trailing newline is added. >>> to_comment('They say foo=bar,\\r\\nbut does bar=foo?') '#They say foo=bar,\\n#but does bar=foo?' :param comment: the string to convert to a comment :type comment: text string :rtype: text string """ return '#' + re.sub('[^\\x00-\\xFF]', _esc, re.sub('\\n(?![#!])', '\n#', re.sub('\\r\\n?', '\n', comment)))
def create_router(self, name, ext_network=None, admin_state_up=True): ''' Creates a new router ''' body = {'name': name, 'admin_state_up': admin_state_up} if ext_network: net_id = self._find_network_id(ext_network) body['external_gateway_info'] = {'network_id': net_id} return self.network_conn.create_router(body={'router': body})
def function[create_router, parameter[self, name, ext_network, admin_state_up]]: constant[ Creates a new router ] variable[body] assign[=] dictionary[[<ast.Constant object at 0x7da1b1c466b0>, <ast.Constant object at 0x7da1b1c46530>], [<ast.Name object at 0x7da1b1c46020>, <ast.Name object at 0x7da1b1c451b0>]] if name[ext_network] begin[:] variable[net_id] assign[=] call[name[self]._find_network_id, parameter[name[ext_network]]] call[name[body]][constant[external_gateway_info]] assign[=] dictionary[[<ast.Constant object at 0x7da1b1c465f0>], [<ast.Name object at 0x7da1b1c46c20>]] return[call[name[self].network_conn.create_router, parameter[]]]
keyword[def] identifier[create_router] ( identifier[self] , identifier[name] , identifier[ext_network] = keyword[None] , identifier[admin_state_up] = keyword[True] ): literal[string] identifier[body] ={ literal[string] : identifier[name] , literal[string] : identifier[admin_state_up] } keyword[if] identifier[ext_network] : identifier[net_id] = identifier[self] . identifier[_find_network_id] ( identifier[ext_network] ) identifier[body] [ literal[string] ]={ literal[string] : identifier[net_id] } keyword[return] identifier[self] . identifier[network_conn] . identifier[create_router] ( identifier[body] ={ literal[string] : identifier[body] })
def create_router(self, name, ext_network=None, admin_state_up=True): """ Creates a new router """ body = {'name': name, 'admin_state_up': admin_state_up} if ext_network: net_id = self._find_network_id(ext_network) body['external_gateway_info'] = {'network_id': net_id} # depends on [control=['if'], data=[]] return self.network_conn.create_router(body={'router': body})
def enter(self, node): ''' Tries to invoke a method matching the pattern *enter_<type name>*, where <type name> is the name of the type of the *node*. ''' name = 'enter_' + node.__class__.__name__ fn = getattr(self, name, self.default_enter) fn(node)
def function[enter, parameter[self, node]]: constant[ Tries to invoke a method matching the pattern *enter_<type name>*, where <type name> is the name of the type of the *node*. ] variable[name] assign[=] binary_operation[constant[enter_] + name[node].__class__.__name__] variable[fn] assign[=] call[name[getattr], parameter[name[self], name[name], name[self].default_enter]] call[name[fn], parameter[name[node]]]
keyword[def] identifier[enter] ( identifier[self] , identifier[node] ): literal[string] identifier[name] = literal[string] + identifier[node] . identifier[__class__] . identifier[__name__] identifier[fn] = identifier[getattr] ( identifier[self] , identifier[name] , identifier[self] . identifier[default_enter] ) identifier[fn] ( identifier[node] )
def enter(self, node): """ Tries to invoke a method matching the pattern *enter_<type name>*, where <type name> is the name of the type of the *node*. """ name = 'enter_' + node.__class__.__name__ fn = getattr(self, name, self.default_enter) fn(node)
def value(self, value, *args, **kwargs): """ Takes a string value and returns the Date based on the format """ from datetime import datetime value = self.obj.value(value, *args, **kwargs) try: rv = datetime.strptime(value, self.format) except ValueError as _: # noqa rv = None return rv
def function[value, parameter[self, value]]: constant[ Takes a string value and returns the Date based on the format ] from relative_module[datetime] import module[datetime] variable[value] assign[=] call[name[self].obj.value, parameter[name[value], <ast.Starred object at 0x7da1b15b3c70>]] <ast.Try object at 0x7da1b15b1390> return[name[rv]]
keyword[def] identifier[value] ( identifier[self] , identifier[value] ,* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[from] identifier[datetime] keyword[import] identifier[datetime] identifier[value] = identifier[self] . identifier[obj] . identifier[value] ( identifier[value] ,* identifier[args] ,** identifier[kwargs] ) keyword[try] : identifier[rv] = identifier[datetime] . identifier[strptime] ( identifier[value] , identifier[self] . identifier[format] ) keyword[except] identifier[ValueError] keyword[as] identifier[_] : identifier[rv] = keyword[None] keyword[return] identifier[rv]
def value(self, value, *args, **kwargs): """ Takes a string value and returns the Date based on the format """ from datetime import datetime value = self.obj.value(value, *args, **kwargs) try: rv = datetime.strptime(value, self.format) # depends on [control=['try'], data=[]] except ValueError as _: # noqa rv = None # depends on [control=['except'], data=[]] return rv
def get_args(): """Parse command line arguments.""" parser = argparse.ArgumentParser( prog=__title__, description=__description__, ) parser.add_argument( '--testdb', action='store_true', help='create and use a database with test users' ) parser.add_argument( '-v', '--verbose', action='store_true', help='print a detailed log' ) parser.add_argument( '--debug', action='store_true', help='print debug log' ) parser.add_argument( '--log-sql', action='store_true', help='log sql transactions' ) parser.add_argument( '-V', '--version', action='store_true', help='print version info and exit' ) parser.add_argument( '--tk', action='store_true', help='use old tk interface' ) return parser.parse_args()
def function[get_args, parameter[]]: constant[Parse command line arguments.] variable[parser] assign[=] call[name[argparse].ArgumentParser, parameter[]] call[name[parser].add_argument, parameter[constant[--testdb]]] call[name[parser].add_argument, parameter[constant[-v], constant[--verbose]]] call[name[parser].add_argument, parameter[constant[--debug]]] call[name[parser].add_argument, parameter[constant[--log-sql]]] call[name[parser].add_argument, parameter[constant[-V], constant[--version]]] call[name[parser].add_argument, parameter[constant[--tk]]] return[call[name[parser].parse_args, parameter[]]]
keyword[def] identifier[get_args] (): literal[string] identifier[parser] = identifier[argparse] . identifier[ArgumentParser] ( identifier[prog] = identifier[__title__] , identifier[description] = identifier[__description__] , ) identifier[parser] . identifier[add_argument] ( literal[string] , identifier[action] = literal[string] , identifier[help] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[action] = literal[string] , identifier[help] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , identifier[action] = literal[string] , identifier[help] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , identifier[action] = literal[string] , identifier[help] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[action] = literal[string] , identifier[help] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , identifier[action] = literal[string] , identifier[help] = literal[string] ) keyword[return] identifier[parser] . identifier[parse_args] ()
def get_args(): """Parse command line arguments.""" parser = argparse.ArgumentParser(prog=__title__, description=__description__) parser.add_argument('--testdb', action='store_true', help='create and use a database with test users') parser.add_argument('-v', '--verbose', action='store_true', help='print a detailed log') parser.add_argument('--debug', action='store_true', help='print debug log') parser.add_argument('--log-sql', action='store_true', help='log sql transactions') parser.add_argument('-V', '--version', action='store_true', help='print version info and exit') parser.add_argument('--tk', action='store_true', help='use old tk interface') return parser.parse_args()
def plot_xbt(fignum, XB, T, e, b): """ function to plot series of chi measurements as a function of temperature, holding field constant and varying frequency """ plt.figure(num=fignum) plt.clf() if not isServer: plt.figtext(.02, .01, version_num) plt.xlabel('Field (T)') plt.ylabel('Susceptibility (m^3/kg)') k = 0 B, X = [], [] for xb in XB: X.append(xb[0]) B.append(xb[1]) plt.plot(B, X) plt.legend(['%i' % (int(T)) + ' K']) plt.title(e + ': f = ' + '%i' % (int(f)) + ' Hz')
def function[plot_xbt, parameter[fignum, XB, T, e, b]]: constant[ function to plot series of chi measurements as a function of temperature, holding field constant and varying frequency ] call[name[plt].figure, parameter[]] call[name[plt].clf, parameter[]] if <ast.UnaryOp object at 0x7da1b056c7c0> begin[:] call[name[plt].figtext, parameter[constant[0.02], constant[0.01], name[version_num]]] call[name[plt].xlabel, parameter[constant[Field (T)]]] call[name[plt].ylabel, parameter[constant[Susceptibility (m^3/kg)]]] variable[k] assign[=] constant[0] <ast.Tuple object at 0x7da1b056e1d0> assign[=] tuple[[<ast.List object at 0x7da1b056e710>, <ast.List object at 0x7da1b056c430>]] for taget[name[xb]] in starred[name[XB]] begin[:] call[name[X].append, parameter[call[name[xb]][constant[0]]]] call[name[B].append, parameter[call[name[xb]][constant[1]]]] call[name[plt].plot, parameter[name[B], name[X]]] call[name[plt].legend, parameter[list[[<ast.BinOp object at 0x7da18dc9b880>]]]] call[name[plt].title, parameter[binary_operation[binary_operation[binary_operation[name[e] + constant[: f = ]] + binary_operation[constant[%i] <ast.Mod object at 0x7da2590d6920> call[name[int], parameter[name[f]]]]] + constant[ Hz]]]]
keyword[def] identifier[plot_xbt] ( identifier[fignum] , identifier[XB] , identifier[T] , identifier[e] , identifier[b] ): literal[string] identifier[plt] . identifier[figure] ( identifier[num] = identifier[fignum] ) identifier[plt] . identifier[clf] () keyword[if] keyword[not] identifier[isServer] : identifier[plt] . identifier[figtext] ( literal[int] , literal[int] , identifier[version_num] ) identifier[plt] . identifier[xlabel] ( literal[string] ) identifier[plt] . identifier[ylabel] ( literal[string] ) identifier[k] = literal[int] identifier[B] , identifier[X] =[],[] keyword[for] identifier[xb] keyword[in] identifier[XB] : identifier[X] . identifier[append] ( identifier[xb] [ literal[int] ]) identifier[B] . identifier[append] ( identifier[xb] [ literal[int] ]) identifier[plt] . identifier[plot] ( identifier[B] , identifier[X] ) identifier[plt] . identifier[legend] ([ literal[string] %( identifier[int] ( identifier[T] ))+ literal[string] ]) identifier[plt] . identifier[title] ( identifier[e] + literal[string] + literal[string] %( identifier[int] ( identifier[f] ))+ literal[string] )
def plot_xbt(fignum, XB, T, e, b): """ function to plot series of chi measurements as a function of temperature, holding field constant and varying frequency """ plt.figure(num=fignum) plt.clf() if not isServer: plt.figtext(0.02, 0.01, version_num) # depends on [control=['if'], data=[]] plt.xlabel('Field (T)') plt.ylabel('Susceptibility (m^3/kg)') k = 0 (B, X) = ([], []) for xb in XB: X.append(xb[0]) B.append(xb[1]) # depends on [control=['for'], data=['xb']] plt.plot(B, X) plt.legend(['%i' % int(T) + ' K']) plt.title(e + ': f = ' + '%i' % int(f) + ' Hz')
def to_text(self, origin=None, relativize=True, **kw): """Convert the message to text. The I{origin}, I{relativize}, and any other keyword arguments are passed to the rrset to_wire() method. @rtype: string """ s = cStringIO.StringIO() print >> s, 'id %d' % self.id print >> s, 'opcode %s' % \ dns.opcode.to_text(dns.opcode.from_flags(self.flags)) rc = dns.rcode.from_flags(self.flags, self.ednsflags) print >> s, 'rcode %s' % dns.rcode.to_text(rc) print >> s, 'flags %s' % dns.flags.to_text(self.flags) if self.edns >= 0: print >> s, 'edns %s' % self.edns if self.ednsflags != 0: print >> s, 'eflags %s' % \ dns.flags.edns_to_text(self.ednsflags) print >> s, 'payload', self.payload is_update = dns.opcode.is_update(self.flags) if is_update: print >> s, ';ZONE' else: print >> s, ';QUESTION' for rrset in self.question: print >> s, rrset.to_text(origin, relativize, **kw) if is_update: print >> s, ';PREREQ' else: print >> s, ';ANSWER' for rrset in self.answer: print >> s, rrset.to_text(origin, relativize, **kw) if is_update: print >> s, ';UPDATE' else: print >> s, ';AUTHORITY' for rrset in self.authority: print >> s, rrset.to_text(origin, relativize, **kw) print >> s, ';ADDITIONAL' for rrset in self.additional: print >> s, rrset.to_text(origin, relativize, **kw) # # We strip off the final \n so the caller can print the result without # doing weird things to get around eccentricities in Python print # formatting # return s.getvalue()[:-1]
def function[to_text, parameter[self, origin, relativize]]: constant[Convert the message to text. The I{origin}, I{relativize}, and any other keyword arguments are passed to the rrset to_wire() method. @rtype: string ] variable[s] assign[=] call[name[cStringIO].StringIO, parameter[]] tuple[[<ast.BinOp object at 0x7da18dc06950>, <ast.BinOp object at 0x7da18dc07040>]] tuple[[<ast.BinOp object at 0x7da18dc057e0>, <ast.BinOp object at 0x7da18dc066b0>]] variable[rc] assign[=] call[name[dns].rcode.from_flags, parameter[name[self].flags, name[self].ednsflags]] tuple[[<ast.BinOp object at 0x7da18dc05ed0>, <ast.BinOp object at 0x7da18dc072b0>]] tuple[[<ast.BinOp object at 0x7da18dc07ee0>, <ast.BinOp object at 0x7da18dc04a90>]] if compare[name[self].edns greater_or_equal[>=] constant[0]] begin[:] tuple[[<ast.BinOp object at 0x7da18dc047f0>, <ast.BinOp object at 0x7da18dc060b0>]] if compare[name[self].ednsflags not_equal[!=] constant[0]] begin[:] tuple[[<ast.BinOp object at 0x7da18dc04cd0>, <ast.BinOp object at 0x7da18dc04970>]] tuple[[<ast.BinOp object at 0x7da18dc06350>, <ast.Constant object at 0x7da18dc068f0>, <ast.Attribute object at 0x7da18dc04fd0>]] variable[is_update] assign[=] call[name[dns].opcode.is_update, parameter[name[self].flags]] if name[is_update] begin[:] tuple[[<ast.BinOp object at 0x7da18dc040a0>, <ast.Constant object at 0x7da18dc04790>]] for taget[name[rrset]] in starred[name[self].question] begin[:] tuple[[<ast.BinOp object at 0x7da2047e90f0>, <ast.Call object at 0x7da2047eb370>]] if name[is_update] begin[:] tuple[[<ast.BinOp object at 0x7da2047e84f0>, <ast.Constant object at 0x7da2047e85b0>]] for taget[name[rrset]] in starred[name[self].answer] begin[:] tuple[[<ast.BinOp object at 0x7da2047eac20>, <ast.Call object at 0x7da2047ea770>]] if name[is_update] begin[:] tuple[[<ast.BinOp object at 0x7da2047e8880>, <ast.Constant object at 0x7da2047e8e80>]] for taget[name[rrset]] in starred[name[self].authority] begin[:] tuple[[<ast.BinOp object at 0x7da2047eb010>, <ast.Call object at 0x7da2047ebe20>]] tuple[[<ast.BinOp object at 0x7da2047ea920>, <ast.Constant object at 0x7da2047e8d90>]] for taget[name[rrset]] in starred[name[self].additional] begin[:] tuple[[<ast.BinOp object at 0x7da2047e9960>, <ast.Call object at 0x7da2047eb2b0>]] return[call[call[name[s].getvalue, parameter[]]][<ast.Slice object at 0x7da1b2344190>]]
keyword[def] identifier[to_text] ( identifier[self] , identifier[origin] = keyword[None] , identifier[relativize] = keyword[True] ,** identifier[kw] ): literal[string] identifier[s] = identifier[cStringIO] . identifier[StringIO] () identifier[print] >> identifier[s] , literal[string] % identifier[self] . identifier[id] identifier[print] >> identifier[s] , literal[string] % identifier[dns] . identifier[opcode] . identifier[to_text] ( identifier[dns] . identifier[opcode] . identifier[from_flags] ( identifier[self] . identifier[flags] )) identifier[rc] = identifier[dns] . identifier[rcode] . identifier[from_flags] ( identifier[self] . identifier[flags] , identifier[self] . identifier[ednsflags] ) identifier[print] >> identifier[s] , literal[string] % identifier[dns] . identifier[rcode] . identifier[to_text] ( identifier[rc] ) identifier[print] >> identifier[s] , literal[string] % identifier[dns] . identifier[flags] . identifier[to_text] ( identifier[self] . identifier[flags] ) keyword[if] identifier[self] . identifier[edns] >= literal[int] : identifier[print] >> identifier[s] , literal[string] % identifier[self] . identifier[edns] keyword[if] identifier[self] . identifier[ednsflags] != literal[int] : identifier[print] >> identifier[s] , literal[string] % identifier[dns] . identifier[flags] . identifier[edns_to_text] ( identifier[self] . identifier[ednsflags] ) identifier[print] >> identifier[s] , literal[string] , identifier[self] . identifier[payload] identifier[is_update] = identifier[dns] . identifier[opcode] . identifier[is_update] ( identifier[self] . identifier[flags] ) keyword[if] identifier[is_update] : identifier[print] >> identifier[s] , literal[string] keyword[else] : identifier[print] >> identifier[s] , literal[string] keyword[for] identifier[rrset] keyword[in] identifier[self] . identifier[question] : identifier[print] >> identifier[s] , identifier[rrset] . identifier[to_text] ( identifier[origin] , identifier[relativize] ,** identifier[kw] ) keyword[if] identifier[is_update] : identifier[print] >> identifier[s] , literal[string] keyword[else] : identifier[print] >> identifier[s] , literal[string] keyword[for] identifier[rrset] keyword[in] identifier[self] . identifier[answer] : identifier[print] >> identifier[s] , identifier[rrset] . identifier[to_text] ( identifier[origin] , identifier[relativize] ,** identifier[kw] ) keyword[if] identifier[is_update] : identifier[print] >> identifier[s] , literal[string] keyword[else] : identifier[print] >> identifier[s] , literal[string] keyword[for] identifier[rrset] keyword[in] identifier[self] . identifier[authority] : identifier[print] >> identifier[s] , identifier[rrset] . identifier[to_text] ( identifier[origin] , identifier[relativize] ,** identifier[kw] ) identifier[print] >> identifier[s] , literal[string] keyword[for] identifier[rrset] keyword[in] identifier[self] . identifier[additional] : identifier[print] >> identifier[s] , identifier[rrset] . identifier[to_text] ( identifier[origin] , identifier[relativize] ,** identifier[kw] ) keyword[return] identifier[s] . identifier[getvalue] ()[:- literal[int] ]
def to_text(self, origin=None, relativize=True, **kw): """Convert the message to text. The I{origin}, I{relativize}, and any other keyword arguments are passed to the rrset to_wire() method. @rtype: string """ s = cStringIO.StringIO() (print >> s, 'id %d' % self.id) (print >> s, 'opcode %s' % dns.opcode.to_text(dns.opcode.from_flags(self.flags))) rc = dns.rcode.from_flags(self.flags, self.ednsflags) (print >> s, 'rcode %s' % dns.rcode.to_text(rc)) (print >> s, 'flags %s' % dns.flags.to_text(self.flags)) if self.edns >= 0: (print >> s, 'edns %s' % self.edns) if self.ednsflags != 0: (print >> s, 'eflags %s' % dns.flags.edns_to_text(self.ednsflags)) # depends on [control=['if'], data=[]] (print >> s, 'payload', self.payload) # depends on [control=['if'], data=[]] is_update = dns.opcode.is_update(self.flags) if is_update: (print >> s, ';ZONE') # depends on [control=['if'], data=[]] else: (print >> s, ';QUESTION') for rrset in self.question: (print >> s, rrset.to_text(origin, relativize, **kw)) # depends on [control=['for'], data=['rrset']] if is_update: (print >> s, ';PREREQ') # depends on [control=['if'], data=[]] else: (print >> s, ';ANSWER') for rrset in self.answer: (print >> s, rrset.to_text(origin, relativize, **kw)) # depends on [control=['for'], data=['rrset']] if is_update: (print >> s, ';UPDATE') # depends on [control=['if'], data=[]] else: (print >> s, ';AUTHORITY') for rrset in self.authority: (print >> s, rrset.to_text(origin, relativize, **kw)) # depends on [control=['for'], data=['rrset']] (print >> s, ';ADDITIONAL') for rrset in self.additional: (print >> s, rrset.to_text(origin, relativize, **kw)) # depends on [control=['for'], data=['rrset']] # # We strip off the final \n so the caller can print the result without # doing weird things to get around eccentricities in Python print # formatting # return s.getvalue()[:-1]
def make_tx_signatures(txs_to_sign, privkey_list, pubkey_list): """ Loops through txs_to_sign and makes signatures using privkey_list and pubkey_list Not sure what privkeys and pubkeys to supply? Use get_input_addresses() to return a list of addresses. Matching those addresses to keys is up to you and how you store your private keys. A future version of this library may handle this for you, but it is not trivial. Note that if spending multisig funds the process is significantly more complicated. Each tx_to_sign must be signed by *each* private key. In a 2-of-3 transaction, two of [privkey1, privkey2, privkey3] must sign each tx_to_sign http://dev.blockcypher.com/#multisig-transactions """ assert len(privkey_list) == len(pubkey_list) == len(txs_to_sign) # in the event of multiple inputs using the same pub/privkey, # that privkey should be included multiple times signatures = [] for cnt, tx_to_sign in enumerate(txs_to_sign): sig = der_encode_sig(*ecdsa_raw_sign(tx_to_sign.rstrip(' \t\r\n\0'), privkey_list[cnt])) err_msg = 'Bad Signature: sig %s for tx %s with pubkey %s' % ( sig, tx_to_sign, pubkey_list[cnt], ) assert ecdsa_raw_verify(tx_to_sign, der_decode_sig(sig), pubkey_list[cnt]), err_msg signatures.append(sig) return signatures
def function[make_tx_signatures, parameter[txs_to_sign, privkey_list, pubkey_list]]: constant[ Loops through txs_to_sign and makes signatures using privkey_list and pubkey_list Not sure what privkeys and pubkeys to supply? Use get_input_addresses() to return a list of addresses. Matching those addresses to keys is up to you and how you store your private keys. A future version of this library may handle this for you, but it is not trivial. Note that if spending multisig funds the process is significantly more complicated. Each tx_to_sign must be signed by *each* private key. In a 2-of-3 transaction, two of [privkey1, privkey2, privkey3] must sign each tx_to_sign http://dev.blockcypher.com/#multisig-transactions ] assert[compare[call[name[len], parameter[name[privkey_list]]] equal[==] call[name[len], parameter[name[pubkey_list]]]]] variable[signatures] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da18bcc9b40>, <ast.Name object at 0x7da18bccabc0>]]] in starred[call[name[enumerate], parameter[name[txs_to_sign]]]] begin[:] variable[sig] assign[=] call[name[der_encode_sig], parameter[<ast.Starred object at 0x7da18bccadd0>]] variable[err_msg] assign[=] binary_operation[constant[Bad Signature: sig %s for tx %s with pubkey %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18bcc8a00>, <ast.Name object at 0x7da18bcca530>, <ast.Subscript object at 0x7da18bcca4a0>]]] assert[call[name[ecdsa_raw_verify], parameter[name[tx_to_sign], call[name[der_decode_sig], parameter[name[sig]]], call[name[pubkey_list]][name[cnt]]]]] call[name[signatures].append, parameter[name[sig]]] return[name[signatures]]
keyword[def] identifier[make_tx_signatures] ( identifier[txs_to_sign] , identifier[privkey_list] , identifier[pubkey_list] ): literal[string] keyword[assert] identifier[len] ( identifier[privkey_list] )== identifier[len] ( identifier[pubkey_list] )== identifier[len] ( identifier[txs_to_sign] ) identifier[signatures] =[] keyword[for] identifier[cnt] , identifier[tx_to_sign] keyword[in] identifier[enumerate] ( identifier[txs_to_sign] ): identifier[sig] = identifier[der_encode_sig] (* identifier[ecdsa_raw_sign] ( identifier[tx_to_sign] . identifier[rstrip] ( literal[string] ), identifier[privkey_list] [ identifier[cnt] ])) identifier[err_msg] = literal[string] %( identifier[sig] , identifier[tx_to_sign] , identifier[pubkey_list] [ identifier[cnt] ], ) keyword[assert] identifier[ecdsa_raw_verify] ( identifier[tx_to_sign] , identifier[der_decode_sig] ( identifier[sig] ), identifier[pubkey_list] [ identifier[cnt] ]), identifier[err_msg] identifier[signatures] . identifier[append] ( identifier[sig] ) keyword[return] identifier[signatures]
def make_tx_signatures(txs_to_sign, privkey_list, pubkey_list): """ Loops through txs_to_sign and makes signatures using privkey_list and pubkey_list Not sure what privkeys and pubkeys to supply? Use get_input_addresses() to return a list of addresses. Matching those addresses to keys is up to you and how you store your private keys. A future version of this library may handle this for you, but it is not trivial. Note that if spending multisig funds the process is significantly more complicated. Each tx_to_sign must be signed by *each* private key. In a 2-of-3 transaction, two of [privkey1, privkey2, privkey3] must sign each tx_to_sign http://dev.blockcypher.com/#multisig-transactions """ assert len(privkey_list) == len(pubkey_list) == len(txs_to_sign) # in the event of multiple inputs using the same pub/privkey, # that privkey should be included multiple times signatures = [] for (cnt, tx_to_sign) in enumerate(txs_to_sign): sig = der_encode_sig(*ecdsa_raw_sign(tx_to_sign.rstrip(' \t\r\n\x00'), privkey_list[cnt])) err_msg = 'Bad Signature: sig %s for tx %s with pubkey %s' % (sig, tx_to_sign, pubkey_list[cnt]) assert ecdsa_raw_verify(tx_to_sign, der_decode_sig(sig), pubkey_list[cnt]), err_msg signatures.append(sig) # depends on [control=['for'], data=[]] return signatures
def file_field(*args, **kwargs): ''' File field ''' file_field = wtforms.FileField(*args, **kwargs) file_field.input_type = 'file_field' return file_field
def function[file_field, parameter[]]: constant[ File field ] variable[file_field] assign[=] call[name[wtforms].FileField, parameter[<ast.Starred object at 0x7da18c4cdde0>]] name[file_field].input_type assign[=] constant[file_field] return[name[file_field]]
keyword[def] identifier[file_field] (* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[file_field] = identifier[wtforms] . identifier[FileField] (* identifier[args] ,** identifier[kwargs] ) identifier[file_field] . identifier[input_type] = literal[string] keyword[return] identifier[file_field]
def file_field(*args, **kwargs): """ File field """ file_field = wtforms.FileField(*args, **kwargs) file_field.input_type = 'file_field' return file_field
def get_reflectance_lut(self): """Read the LUT with reflectances as a function of wavelength, satellite zenith secant, azimuth difference angle, and sun zenith secant """ if self._rayl is None: lut_vars = get_reflectance_lut(self.reflectance_lut_filename) self._rayl = lut_vars[0] self._wvl_coord = lut_vars[1] self._azid_coord = lut_vars[2] self._satz_sec_coord = lut_vars[3] self._sunz_sec_coord = lut_vars[4] return self._rayl, self._wvl_coord, self._azid_coord,\ self._satz_sec_coord, self._sunz_sec_coord
def function[get_reflectance_lut, parameter[self]]: constant[Read the LUT with reflectances as a function of wavelength, satellite zenith secant, azimuth difference angle, and sun zenith secant ] if compare[name[self]._rayl is constant[None]] begin[:] variable[lut_vars] assign[=] call[name[get_reflectance_lut], parameter[name[self].reflectance_lut_filename]] name[self]._rayl assign[=] call[name[lut_vars]][constant[0]] name[self]._wvl_coord assign[=] call[name[lut_vars]][constant[1]] name[self]._azid_coord assign[=] call[name[lut_vars]][constant[2]] name[self]._satz_sec_coord assign[=] call[name[lut_vars]][constant[3]] name[self]._sunz_sec_coord assign[=] call[name[lut_vars]][constant[4]] return[tuple[[<ast.Attribute object at 0x7da207f98580>, <ast.Attribute object at 0x7da207f9b6d0>, <ast.Attribute object at 0x7da207f9a710>, <ast.Attribute object at 0x7da207f98910>, <ast.Attribute object at 0x7da207f99690>]]]
keyword[def] identifier[get_reflectance_lut] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_rayl] keyword[is] keyword[None] : identifier[lut_vars] = identifier[get_reflectance_lut] ( identifier[self] . identifier[reflectance_lut_filename] ) identifier[self] . identifier[_rayl] = identifier[lut_vars] [ literal[int] ] identifier[self] . identifier[_wvl_coord] = identifier[lut_vars] [ literal[int] ] identifier[self] . identifier[_azid_coord] = identifier[lut_vars] [ literal[int] ] identifier[self] . identifier[_satz_sec_coord] = identifier[lut_vars] [ literal[int] ] identifier[self] . identifier[_sunz_sec_coord] = identifier[lut_vars] [ literal[int] ] keyword[return] identifier[self] . identifier[_rayl] , identifier[self] . identifier[_wvl_coord] , identifier[self] . identifier[_azid_coord] , identifier[self] . identifier[_satz_sec_coord] , identifier[self] . identifier[_sunz_sec_coord]
def get_reflectance_lut(self): """Read the LUT with reflectances as a function of wavelength, satellite zenith secant, azimuth difference angle, and sun zenith secant """ if self._rayl is None: lut_vars = get_reflectance_lut(self.reflectance_lut_filename) self._rayl = lut_vars[0] self._wvl_coord = lut_vars[1] self._azid_coord = lut_vars[2] self._satz_sec_coord = lut_vars[3] self._sunz_sec_coord = lut_vars[4] # depends on [control=['if'], data=[]] return (self._rayl, self._wvl_coord, self._azid_coord, self._satz_sec_coord, self._sunz_sec_coord)
def main(): '''Main routine.''' # validate command line arguments arg_parser = argparse.ArgumentParser() arg_parser.add_argument( '--vmssname', '-n', required=True, action='store', help='VMSS Name') arg_parser.add_argument('--rgname', '-g', required=True, action='store', help='Resource Group Name') arg_parser.add_argument('--details', '-a', required=False, action='store_true', default=False, help='Print all details') args = arg_parser.parse_args() name = args.vmssname rgname = args.rgname details = args.details # Load Azure app defaults try: with open('azurermconfig.json') as config_file: config_data = json.load(config_file) except FileNotFoundError: print("Error: Expecting azurermconfig.json in current folder") sys.exit() tenant_id = config_data['tenantId'] app_id = config_data['appId'] app_secret = config_data['appSecret'] subscription_id = config_data['subscriptionId'] # authenticate access_token = azurerm.get_access_token(tenant_id, app_id, app_secret) # get rolling upgrade latest status upgrade_status = azurerm.get_vmss_rolling_upgrades( access_token, subscription_id, rgname, name) # print details if details is True: print(json.dumps(upgrade_status, sort_keys=False, indent=2, separators=(',', ': '))) else: print(json.dumps(upgrade_status, sort_keys=False, indent=2, separators=(',', ': ')))
def function[main, parameter[]]: constant[Main routine.] variable[arg_parser] assign[=] call[name[argparse].ArgumentParser, parameter[]] call[name[arg_parser].add_argument, parameter[constant[--vmssname], constant[-n]]] call[name[arg_parser].add_argument, parameter[constant[--rgname], constant[-g]]] call[name[arg_parser].add_argument, parameter[constant[--details], constant[-a]]] variable[args] assign[=] call[name[arg_parser].parse_args, parameter[]] variable[name] assign[=] name[args].vmssname variable[rgname] assign[=] name[args].rgname variable[details] assign[=] name[args].details <ast.Try object at 0x7da1b0317f10> variable[tenant_id] assign[=] call[name[config_data]][constant[tenantId]] variable[app_id] assign[=] call[name[config_data]][constant[appId]] variable[app_secret] assign[=] call[name[config_data]][constant[appSecret]] variable[subscription_id] assign[=] call[name[config_data]][constant[subscriptionId]] variable[access_token] assign[=] call[name[azurerm].get_access_token, parameter[name[tenant_id], name[app_id], name[app_secret]]] variable[upgrade_status] assign[=] call[name[azurerm].get_vmss_rolling_upgrades, parameter[name[access_token], name[subscription_id], name[rgname], name[name]]] if compare[name[details] is constant[True]] begin[:] call[name[print], parameter[call[name[json].dumps, parameter[name[upgrade_status]]]]]
keyword[def] identifier[main] (): literal[string] identifier[arg_parser] = identifier[argparse] . identifier[ArgumentParser] () identifier[arg_parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[required] = keyword[True] , identifier[action] = literal[string] , identifier[help] = literal[string] ) identifier[arg_parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[required] = keyword[True] , identifier[action] = literal[string] , identifier[help] = literal[string] ) identifier[arg_parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[required] = keyword[False] , identifier[action] = literal[string] , identifier[default] = keyword[False] , identifier[help] = literal[string] ) identifier[args] = identifier[arg_parser] . identifier[parse_args] () identifier[name] = identifier[args] . identifier[vmssname] identifier[rgname] = identifier[args] . identifier[rgname] identifier[details] = identifier[args] . identifier[details] keyword[try] : keyword[with] identifier[open] ( literal[string] ) keyword[as] identifier[config_file] : identifier[config_data] = identifier[json] . identifier[load] ( identifier[config_file] ) keyword[except] identifier[FileNotFoundError] : identifier[print] ( literal[string] ) identifier[sys] . identifier[exit] () identifier[tenant_id] = identifier[config_data] [ literal[string] ] identifier[app_id] = identifier[config_data] [ literal[string] ] identifier[app_secret] = identifier[config_data] [ literal[string] ] identifier[subscription_id] = identifier[config_data] [ literal[string] ] identifier[access_token] = identifier[azurerm] . identifier[get_access_token] ( identifier[tenant_id] , identifier[app_id] , identifier[app_secret] ) identifier[upgrade_status] = identifier[azurerm] . identifier[get_vmss_rolling_upgrades] ( identifier[access_token] , identifier[subscription_id] , identifier[rgname] , identifier[name] ) keyword[if] identifier[details] keyword[is] keyword[True] : identifier[print] ( identifier[json] . identifier[dumps] ( identifier[upgrade_status] , identifier[sort_keys] = keyword[False] , identifier[indent] = literal[int] , identifier[separators] =( literal[string] , literal[string] ))) keyword[else] : identifier[print] ( identifier[json] . identifier[dumps] ( identifier[upgrade_status] , identifier[sort_keys] = keyword[False] , identifier[indent] = literal[int] , identifier[separators] =( literal[string] , literal[string] )))
def main(): """Main routine.""" # validate command line arguments arg_parser = argparse.ArgumentParser() arg_parser.add_argument('--vmssname', '-n', required=True, action='store', help='VMSS Name') arg_parser.add_argument('--rgname', '-g', required=True, action='store', help='Resource Group Name') arg_parser.add_argument('--details', '-a', required=False, action='store_true', default=False, help='Print all details') args = arg_parser.parse_args() name = args.vmssname rgname = args.rgname details = args.details # Load Azure app defaults try: with open('azurermconfig.json') as config_file: config_data = json.load(config_file) # depends on [control=['with'], data=['config_file']] # depends on [control=['try'], data=[]] except FileNotFoundError: print('Error: Expecting azurermconfig.json in current folder') sys.exit() # depends on [control=['except'], data=[]] tenant_id = config_data['tenantId'] app_id = config_data['appId'] app_secret = config_data['appSecret'] subscription_id = config_data['subscriptionId'] # authenticate access_token = azurerm.get_access_token(tenant_id, app_id, app_secret) # get rolling upgrade latest status upgrade_status = azurerm.get_vmss_rolling_upgrades(access_token, subscription_id, rgname, name) # print details if details is True: print(json.dumps(upgrade_status, sort_keys=False, indent=2, separators=(',', ': '))) # depends on [control=['if'], data=[]] else: print(json.dumps(upgrade_status, sort_keys=False, indent=2, separators=(',', ': ')))
def is_exchange(self, reaction_id): """Whether the given reaction is an exchange reaction.""" reaction = self.get_reaction(reaction_id) return (len(reaction.left) == 0) != (len(reaction.right) == 0)
def function[is_exchange, parameter[self, reaction_id]]: constant[Whether the given reaction is an exchange reaction.] variable[reaction] assign[=] call[name[self].get_reaction, parameter[name[reaction_id]]] return[compare[compare[call[name[len], parameter[name[reaction].left]] equal[==] constant[0]] not_equal[!=] compare[call[name[len], parameter[name[reaction].right]] equal[==] constant[0]]]]
keyword[def] identifier[is_exchange] ( identifier[self] , identifier[reaction_id] ): literal[string] identifier[reaction] = identifier[self] . identifier[get_reaction] ( identifier[reaction_id] ) keyword[return] ( identifier[len] ( identifier[reaction] . identifier[left] )== literal[int] )!=( identifier[len] ( identifier[reaction] . identifier[right] )== literal[int] )
def is_exchange(self, reaction_id): """Whether the given reaction is an exchange reaction.""" reaction = self.get_reaction(reaction_id) return (len(reaction.left) == 0) != (len(reaction.right) == 0)
def parse(cls, text): """ Parse the given text. Returns a tuple: (list_of_parts, start_pos_of_the_last_part). """ OUTSIDE, IN_DOUBLE, IN_SINGLE = 0, 1, 2 iterator = enumerate(text) state = OUTSIDE parts = [] current_part = '' part_start_pos = 0 for i, c in iterator: # XXX: correctly handle empty strings. if state == OUTSIDE: if c.isspace(): # New part. if current_part: parts.append(current_part) part_start_pos = i + 1 current_part = '' elif c == '"': state = IN_DOUBLE elif c == "'": state = IN_SINGLE else: current_part += c elif state == IN_SINGLE: if c == "'": state = OUTSIDE elif c == "\\": next(iterator) current_part += c else: current_part += c elif state == IN_DOUBLE: if c == '"': state = OUTSIDE elif c == "\\": next(iterator) current_part += c else: current_part += c parts.append(current_part) return parts, part_start_pos
def function[parse, parameter[cls, text]]: constant[ Parse the given text. Returns a tuple: (list_of_parts, start_pos_of_the_last_part). ] <ast.Tuple object at 0x7da2049601c0> assign[=] tuple[[<ast.Constant object at 0x7da204963130>, <ast.Constant object at 0x7da204961ba0>, <ast.Constant object at 0x7da204960610>]] variable[iterator] assign[=] call[name[enumerate], parameter[name[text]]] variable[state] assign[=] name[OUTSIDE] variable[parts] assign[=] list[[]] variable[current_part] assign[=] constant[] variable[part_start_pos] assign[=] constant[0] for taget[tuple[[<ast.Name object at 0x7da2049635b0>, <ast.Name object at 0x7da2049639a0>]]] in starred[name[iterator]] begin[:] if compare[name[state] equal[==] name[OUTSIDE]] begin[:] if call[name[c].isspace, parameter[]] begin[:] if name[current_part] begin[:] call[name[parts].append, parameter[name[current_part]]] variable[part_start_pos] assign[=] binary_operation[name[i] + constant[1]] variable[current_part] assign[=] constant[] call[name[parts].append, parameter[name[current_part]]] return[tuple[[<ast.Name object at 0x7da20c9924a0>, <ast.Name object at 0x7da20c990c40>]]]
keyword[def] identifier[parse] ( identifier[cls] , identifier[text] ): literal[string] identifier[OUTSIDE] , identifier[IN_DOUBLE] , identifier[IN_SINGLE] = literal[int] , literal[int] , literal[int] identifier[iterator] = identifier[enumerate] ( identifier[text] ) identifier[state] = identifier[OUTSIDE] identifier[parts] =[] identifier[current_part] = literal[string] identifier[part_start_pos] = literal[int] keyword[for] identifier[i] , identifier[c] keyword[in] identifier[iterator] : keyword[if] identifier[state] == identifier[OUTSIDE] : keyword[if] identifier[c] . identifier[isspace] (): keyword[if] identifier[current_part] : identifier[parts] . identifier[append] ( identifier[current_part] ) identifier[part_start_pos] = identifier[i] + literal[int] identifier[current_part] = literal[string] keyword[elif] identifier[c] == literal[string] : identifier[state] = identifier[IN_DOUBLE] keyword[elif] identifier[c] == literal[string] : identifier[state] = identifier[IN_SINGLE] keyword[else] : identifier[current_part] += identifier[c] keyword[elif] identifier[state] == identifier[IN_SINGLE] : keyword[if] identifier[c] == literal[string] : identifier[state] = identifier[OUTSIDE] keyword[elif] identifier[c] == literal[string] : identifier[next] ( identifier[iterator] ) identifier[current_part] += identifier[c] keyword[else] : identifier[current_part] += identifier[c] keyword[elif] identifier[state] == identifier[IN_DOUBLE] : keyword[if] identifier[c] == literal[string] : identifier[state] = identifier[OUTSIDE] keyword[elif] identifier[c] == literal[string] : identifier[next] ( identifier[iterator] ) identifier[current_part] += identifier[c] keyword[else] : identifier[current_part] += identifier[c] identifier[parts] . identifier[append] ( identifier[current_part] ) keyword[return] identifier[parts] , identifier[part_start_pos]
def parse(cls, text): """ Parse the given text. Returns a tuple: (list_of_parts, start_pos_of_the_last_part). """ (OUTSIDE, IN_DOUBLE, IN_SINGLE) = (0, 1, 2) iterator = enumerate(text) state = OUTSIDE parts = [] current_part = '' part_start_pos = 0 for (i, c) in iterator: # XXX: correctly handle empty strings. if state == OUTSIDE: if c.isspace(): # New part. if current_part: parts.append(current_part) # depends on [control=['if'], data=[]] part_start_pos = i + 1 current_part = '' # depends on [control=['if'], data=[]] elif c == '"': state = IN_DOUBLE # depends on [control=['if'], data=[]] elif c == "'": state = IN_SINGLE # depends on [control=['if'], data=[]] else: current_part += c # depends on [control=['if'], data=['state']] elif state == IN_SINGLE: if c == "'": state = OUTSIDE # depends on [control=['if'], data=[]] elif c == '\\': next(iterator) current_part += c # depends on [control=['if'], data=['c']] else: current_part += c # depends on [control=['if'], data=['state']] elif state == IN_DOUBLE: if c == '"': state = OUTSIDE # depends on [control=['if'], data=[]] elif c == '\\': next(iterator) current_part += c # depends on [control=['if'], data=['c']] else: current_part += c # depends on [control=['if'], data=['state']] # depends on [control=['for'], data=[]] parts.append(current_part) return (parts, part_start_pos)