text
stringlengths
4
1.02M
meta
dict
from flask import Blueprint, request, render_template, flash, redirect, url_for, g, make_response, jsonify from flask_login import login_required, current_user from flask_login import login_user from flask_login import logout_user from app import db, lm from flask_cors import cross_origin from app.forms.login_user import LoginUser from werkzeug.security import check_password_hash # instantiate the module's blueprint users = Blueprint('users', __name__, template_folder='users', url_prefix='/users') @lm.user_loader def load_user(_id): return db.User.find_one({'_id': _id}) # sets the global variable each time a request is received @users.before_request def before_request(): g.user = current_user @cross_origin() @users.route('/', methods=['GET', 'POST']) @login_required def get_users(): return make_response(jsonify([user for user in db.User.find()]), 200) @cross_origin() @users.route('/count/', methods=['GET', 'POST']) @login_required def get_users_count(): return make_response(jsonify(db['users'].count())) @cross_origin() @users.route('/<string:_id>/', methods=['GET', 'POST']) @login_required def get_user(_id): return make_response(jsonify(db.User.find_one_or_404({'_id': _id})), 200) @users.route('/login/', methods=['GET', 'POST']) def login_mistt_user(): form = LoginUser() # check for HTTP method and valid form submission if request.method == 'POST': if not form.validate_on_submit(): flash('Please enter some data!', category='error') else: # check if user exists user = db.User.find_one({'_id': form.id.data}) if user is not None: user = db.User(user) if not user['confirmed']: flash('Check your email for an account confirmation email.', category='error') elif check_password_hash(user['password'], form.password.data) and user['confirmed']: login_user(user) flash('Logged in successfully!', category='success') return redirect(request.args.get('next') or url_for('index')) else: flash('Incorrect password!', category='error') else: flash('No user by that name!', category='error') return render_template('users/login.html', title='login', form=form) @users.route('/logout/') def logout_mistt_user(): logout_user() return redirect(url_for('index'))
{ "content_hash": "6525da76a97c2aa55af4466c0c7285cc", "timestamp": "", "source": "github", "line_count": 74, "max_line_length": 106, "avg_line_length": 33.608108108108105, "alnum_prop": 0.634097305991154, "repo_name": "michaelnetbiz/mistt-solution", "id": "a28974c16de0a20c8f8031a5bea1dc414f0b34cb", "size": "2487", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/controllers/users.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "13533" }, { "name": "HTML", "bytes": "29977" }, { "name": "JavaScript", "bytes": "339802" }, { "name": "Python", "bytes": "78151" } ], "symlink_target": "" }
import numpy as np import numpy.random as nr class Exploration(object): def __init__(self, env): self.action_size = env.action_space.shape[0] def add_noise(self, action, info={}): pass def reset(self): pass class OUExploration(Exploration): # Reference: https://github.com/rllab/rllab/blob/master/rllab/exploration_strategies/ou_strategy.py def __init__(self, env, sigma=0.3, mu=0, theta=0.15): super(OUExploration, self).__init__(env) self.mu = mu self.theta = theta self.sigma = sigma self.state = np.ones(self.action_size) * self.mu self.reset() def add_noise(self, action, info={}): x = self.state dx = self.theta * (self.mu - x) + self.sigma * nr.randn(len(x)) self.state = x + dx return action + self.state def reset(self): self.state = np.ones(self.action_size) * self.mu class LinearDecayExploration(Exploration): def __init__(self, env): super(LinearDecayExploration, self).__init__(env) def add_noise(self, action, info={}): return action + np.random.randn(self.action_size) / (info['idx_episode'] + 1) class BrownianExploration(Exploration): def __init__(self, env, noise_scale): super(BrownianExploration, self).__init__(env) raise Exception('not implemented yet')
{ "content_hash": "89b3612f2c34768829e07c0578b668e8", "timestamp": "", "source": "github", "line_count": 48, "max_line_length": 101, "avg_line_length": 26.729166666666668, "alnum_prop": 0.661730319563523, "repo_name": "carpedm20/NAF-tensorflow", "id": "7bc0867d9cc87d29e3a680a60648113c1d68b973", "size": "1283", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/exploration.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "28725" }, { "name": "Shell", "bytes": "273" } ], "symlink_target": "" }
from flask import Flask from mono.app import init_app from mono.logger import init_task_logger def init_task_app(): app = init_app(is_register_api=False) init_task_logger(app, app.config.get('TASK_LOG_FILE', "monoreader_task.log")) return app task_app = init_task_app()
{ "content_hash": "c6fcf9b0bae31e6fa8d632033051040a", "timestamp": "", "source": "github", "line_count": 11, "max_line_length": 81, "avg_line_length": 25.90909090909091, "alnum_prop": 0.7157894736842105, "repo_name": "ragnraok/MonoReader", "id": "2f50add5fb90ddd216372a03d09322abf1223ddd", "size": "285", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "monoweb/mono/task/base.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "1146" }, { "name": "Groovy", "bytes": "1784" }, { "name": "Java", "bytes": "172840" }, { "name": "Python", "bytes": "66709" } ], "symlink_target": "" }
def hello_string(): return 'hello!' def hello(): print(hello_string())
{ "content_hash": "28f5b3eac1e1456fb30eb555b162fdb3", "timestamp": "", "source": "github", "line_count": 6, "max_line_length": 23, "avg_line_length": 12.833333333333334, "alnum_prop": 0.6363636363636364, "repo_name": "tdyas/pants", "id": "75698dfa3e0ccc88a10edf5d3db1d7a411f7dc02", "size": "210", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "testprojects/src/python/python_distribution/hello_with_install_requires/hello_package/hello.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "655" }, { "name": "C++", "bytes": "2010" }, { "name": "CSS", "bytes": "9444" }, { "name": "Dockerfile", "bytes": "5596" }, { "name": "GAP", "bytes": "1283" }, { "name": "Gherkin", "bytes": "919" }, { "name": "Go", "bytes": "2765" }, { "name": "HTML", "bytes": "44381" }, { "name": "Java", "bytes": "518180" }, { "name": "JavaScript", "bytes": "22906" }, { "name": "Python", "bytes": "7955590" }, { "name": "Rust", "bytes": "1031208" }, { "name": "Scala", "bytes": "106520" }, { "name": "Shell", "bytes": "109904" }, { "name": "Starlark", "bytes": "502255" }, { "name": "Thrift", "bytes": "2953" } ], "symlink_target": "" }
"""BaseModelCardField. This class serves as a basic shared API between all Model Card data classes ( see model_card.py). """ import abc import dataclasses import json as json_lib from typing import Any, Dict from model_card_toolkit.utils import validation from google.protobuf import descriptor from google.protobuf import message class BaseModelCardField(abc.ABC): """Model card field base class. This is an abstract class. All the model card fields should inherit this class and override the _proto_type property to the corresponding proto type. This abstract class provides methods `copy_from_proto`, `merge_from_proto` and `to_proto` to convert the class from and to proto. The child class does not need to override this unless it needs some special process. """ @property @abc.abstractmethod def _proto_type(self): """The proto type. Child class should overwrite this.""" def to_proto(self) -> message.Message: """Convert this class object to the proto.""" proto = self._proto_type() for field_name, field_value in self.__dict__.items(): if not hasattr(proto, field_name): raise ValueError("%s has no such field named '%s'." % (type(proto), field_name)) if not field_value: continue field_descriptor = proto.DESCRIPTOR.fields_by_name[field_name] # Process Message type. if field_descriptor.type == descriptor.FieldDescriptor.TYPE_MESSAGE: if field_descriptor.label == descriptor.FieldDescriptor.LABEL_REPEATED: for nested_message in field_value: getattr(proto, field_name).add().CopyFrom(nested_message.to_proto()) # pylint: disable=protected-access else: getattr(proto, field_name).CopyFrom(field_value.to_proto()) # pylint: disable=protected-access # Process Non-Message type else: if field_descriptor.label == descriptor.FieldDescriptor.LABEL_REPEATED: getattr(proto, field_name).extend(field_value) else: setattr(proto, field_name, field_value) return proto def _from_proto(self, proto: message.Message) -> "BaseModelCardField": """Convert proto to this class object.""" if not isinstance(proto, self._proto_type): raise TypeError("%s is expected. However %s is provided." % (self._proto_type, type(proto))) for field_descriptor in proto.DESCRIPTOR.fields: field_name = field_descriptor.name if not hasattr(self, field_name): raise ValueError("%s has no such field named '%s.'" % (self, field_name)) # Process Message type. if field_descriptor.type == descriptor.FieldDescriptor.TYPE_MESSAGE: if field_descriptor.label == descriptor.FieldDescriptor.LABEL_REPEATED: # Clean the list first. setattr(self, field_name, []) for p in getattr(proto, field_name): # To get the type hint of a list is not easy. field = self.__annotations__[field_name].__args__[0]() # pytype: disable=attribute-error field._from_proto(p) # pylint: disable=protected-access getattr(self, field_name).append(field) elif proto.HasField(field_name): getattr(self, field_name)._from_proto(getattr(proto, field_name)) # pylint: disable=protected-access # Process Non-Message type else: if field_descriptor.label == descriptor.FieldDescriptor.LABEL_REPEATED: setattr(self, field_name, getattr(proto, field_name)[:]) elif proto.HasField(field_name): setattr(self, field_name, getattr(proto, field_name)) return self def merge_from_proto(self, proto: message.Message) -> "BaseModelCardField": """Merges the contents of the model card proto into current object.""" current = self.to_proto() current.MergeFrom(proto) self.clear() return self._from_proto(current) def copy_from_proto(self, proto: message.Message) -> "BaseModelCardField": """Copies the contents of the model card proto into current object.""" self.clear() return self._from_proto(proto) def _from_json(self, json_dict: Dict[str, Any], field: "BaseModelCardField") -> "BaseModelCardField": """Parses a JSON dictionary into the current object.""" for subfield_key, subfield_json_value in json_dict.items(): if subfield_key.startswith(validation.SCHEMA_VERSION_STRING): continue elif not hasattr(field, subfield_key): raise ValueError( "BaseModelCardField %s has no such field named '%s.'" % (field, subfield_key)) elif isinstance(subfield_json_value, dict): subfield_value = self._from_json( subfield_json_value, getattr(field, subfield_key)) elif isinstance(subfield_json_value, list): subfield_value = [] for item in subfield_json_value: if isinstance(item, dict): new_object = field.__annotations__[subfield_key].__args__[0]() # pytype: disable=attribute-error subfield_value.append(self._from_json(item, new_object)) else: # if primitive subfield_value.append(item) else: subfield_value = subfield_json_value setattr(field, subfield_key, subfield_value) return field def to_json(self) -> str: """Convert this class object to json.""" return json_lib.dumps(self.to_dict(), indent=2) def to_dict(self) -> Dict[str, Any]: """Convert your model card to a python dictionary.""" # ignore None properties recursively to allow missing values. ignore_none = lambda properties: {k: v for k, v in properties if v} return dataclasses.asdict(self, dict_factory=ignore_none) def clear(self): """Clear the subfields of this BaseModelCardField.""" for field_name, field_value in self.__dict__.items(): if isinstance(field_value, BaseModelCardField): field_value.clear() elif isinstance(field_value, list): setattr(self, field_name, []) else: setattr(self, field_name, None) @classmethod def _get_type(cls, obj: Any): return type(obj)
{ "content_hash": "cf35f4528422b51112154fb964f97505", "timestamp": "", "source": "github", "line_count": 156, "max_line_length": 116, "avg_line_length": 39.6474358974359, "alnum_prop": 0.6585286984640258, "repo_name": "tensorflow/model-card-toolkit", "id": "5d4e46e6d88ab8ec6a775fccd82a4a11019a415e", "size": "6760", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "model_card_toolkit/base_model_card_field.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "HTML", "bytes": "80677" }, { "name": "Jinja", "bytes": "13576" }, { "name": "Python", "bytes": "196136" }, { "name": "Shell", "bytes": "1858" }, { "name": "Starlark", "bytes": "3039" } ], "symlink_target": "" }
""" MINDBODY Public API No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501 OpenAPI spec version: v6 Generated by: https://github.com/swagger-api/swagger-codegen.git """ from __future__ import absolute_import import unittest import swagger_client from swagger_client.models.staff_permission_group import StaffPermissionGroup # noqa: E501 from swagger_client.rest import ApiException class TestStaffPermissionGroup(unittest.TestCase): """StaffPermissionGroup unit test stubs""" def setUp(self): pass def tearDown(self): pass def testStaffPermissionGroup(self): """Test StaffPermissionGroup""" # FIXME: construct object with mandatory attributes with example values # model = swagger_client.models.staff_permission_group.StaffPermissionGroup() # noqa: E501 pass if __name__ == '__main__': unittest.main()
{ "content_hash": "827a431b24daf42fcd7693dd5d2460cf", "timestamp": "", "source": "github", "line_count": 38, "max_line_length": 119, "avg_line_length": 25.55263157894737, "alnum_prop": 0.7064881565396498, "repo_name": "mindbody/API-Examples", "id": "7af521101cb700a5f09f6b1d1ae6a8b48e14cc1d", "size": "988", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "SDKs/Python/test/test_staff_permission_group.py", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "PHP", "bytes": "3610259" }, { "name": "Python", "bytes": "2338642" }, { "name": "Ruby", "bytes": "2284441" }, { "name": "Shell", "bytes": "5058" } ], "symlink_target": "" }
""" from pymel.api.plugins import Command class testCmd(Command): def doIt(self, args): print "doIt..." testCmd.register() cmds.testCmd() testCmd.deregister() """ import sys import inspect import maya.OpenMayaMPx as mpx import maya.cmds global registeredCommands registeredCommands = [] def _pluginModule(): return inspect.getmodule( lambda: None ) def _pluginName(): return _pluginModule().__name__.split('.')[-1] def _pluginFile(): return inspect.getsourcefile( lambda:None ) # module = sys.modules[__name__] # print module, __name__ # return module.__file__ def _loadPlugin(): thisFile = _pluginFile() if not maya.cmds.pluginInfo( thisFile, query=1, loaded=1 ): maya.cmds.loadPlugin( thisFile ) def _unloadPlugin(): thisFile = _pluginFile() if maya.cmds.pluginInfo( thisFile, query=1, loaded=1 ): maya.cmds.unloadPlugin( thisFile ) def _getPlugin(): _loadPlugin() mobject = mpx.MFnPlugin.findPlugin( _pluginName() ) return mpx.MFnPlugin(mobject) class Command(mpx.MPxCommand): registeredCommands = [] def __init__(self): mpx.MPxCommand.__init__(self) @classmethod def creator(cls): return mpx.asMPxPtr( cls() ) @classmethod def register(cls, object=None): """ by default the command will be registered to a dummy plugin provided by pymel. If you if using from within a plugin's initializePlugin or uninitializePlugin callback, pass along the MObject given to these functions """ if object is None: plugin = _getPlugin() cls.registeredCommands.append( cls.__name__ ) else: plugin = mpx.MFnPlugin(object) if hasattr(cls, 'createSyntax'): plugin.registerCommand( cls.__name__, cls.creator, cls.createSyntax ) else: plugin.registerCommand( cls.__name__, cls.creator ) @classmethod def deregister(cls, object=None): """ if using from within a plugin's initializePlugin or uninitializePlugin callback, pass along the MObject given to these functions """ if object is None: plugin = _getPlugin() cls.registeredCommands.pop(cls.__name__) else: plugin = mpx.MFnPlugin(object) plugin.deregisterCommand( cls.__name__ ) # allow this file to be loaded as its own dummy plugin # Initialize the script plug-in def initializePlugin(mobject): pass # Uninitialize the script plug-in def uninitializePlugin(mobject): #print "getmodule", inspect.getmodule( None ) #mod = _pluginModule() #when uninitializePlugin is called it is execfile'd which changes the module in which this code runs. #we need to get the correct module first # FIXME: determine a reliable way to get this module's name when it is being execfile'd mod = sys.modules['pymel.api.plugins'] plugin = mpx.MFnPlugin(mobject) for cmd in mod.Command.registeredCommands: print "deregistering", cmd plugin.deregisterCommand(cmd) registeredCommands = [] #def _repoplulate(): # print "repopulate" # try: # global registeredCommands # commands = maya.cmds.pluginInfo(_pluginName(), query=1, command=1) # registeredCommands = registeredCommands # except: # pass # #_repoplulate() # when we reload, should we deregister all plugins??? or maybe we can just repopulate registeredCommands #_unloadPlugin()
{ "content_hash": "243b0533636133e7809f3811fdb03622", "timestamp": "", "source": "github", "line_count": 130, "max_line_length": 105, "avg_line_length": 27.076923076923077, "alnum_prop": 0.6539772727272727, "repo_name": "cgrebeld/pymel", "id": "6177852648e175bcee12275ed39f9f2620a823c0", "size": "3520", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "pymel/api/plugins.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Python", "bytes": "2384715" } ], "symlink_target": "" }
<<<<<<< HEAD <<<<<<< HEAD # # iso2022_jp_ext.py: Python Unicode Codec for ISO2022_JP_EXT # # Written by Hye-Shik Chang <[email protected]> # import _codecs_iso2022, codecs import _multibytecodec as mbc codec = _codecs_iso2022.getcodec('iso2022_jp_ext') class Codec(codecs.Codec): encode = codec.encode decode = codec.decode class IncrementalEncoder(mbc.MultibyteIncrementalEncoder, codecs.IncrementalEncoder): codec = codec class IncrementalDecoder(mbc.MultibyteIncrementalDecoder, codecs.IncrementalDecoder): codec = codec class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader): codec = codec class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter): codec = codec def getregentry(): return codecs.CodecInfo( name='iso2022_jp_ext', encode=Codec().encode, decode=Codec().decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter, ) ======= # # iso2022_jp_ext.py: Python Unicode Codec for ISO2022_JP_EXT # # Written by Hye-Shik Chang <[email protected]> # import _codecs_iso2022, codecs import _multibytecodec as mbc codec = _codecs_iso2022.getcodec('iso2022_jp_ext') class Codec(codecs.Codec): encode = codec.encode decode = codec.decode class IncrementalEncoder(mbc.MultibyteIncrementalEncoder, codecs.IncrementalEncoder): codec = codec class IncrementalDecoder(mbc.MultibyteIncrementalDecoder, codecs.IncrementalDecoder): codec = codec class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader): codec = codec class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter): codec = codec def getregentry(): return codecs.CodecInfo( name='iso2022_jp_ext', encode=Codec().encode, decode=Codec().decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter, ) >>>>>>> b875702c9c06ab5012e52ff4337439b03918f453 ======= # # iso2022_jp_ext.py: Python Unicode Codec for ISO2022_JP_EXT # # Written by Hye-Shik Chang <[email protected]> # import _codecs_iso2022, codecs import _multibytecodec as mbc codec = _codecs_iso2022.getcodec('iso2022_jp_ext') class Codec(codecs.Codec): encode = codec.encode decode = codec.decode class IncrementalEncoder(mbc.MultibyteIncrementalEncoder, codecs.IncrementalEncoder): codec = codec class IncrementalDecoder(mbc.MultibyteIncrementalDecoder, codecs.IncrementalDecoder): codec = codec class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader): codec = codec class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter): codec = codec def getregentry(): return codecs.CodecInfo( name='iso2022_jp_ext', encode=Codec().encode, decode=Codec().decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter, ) >>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
{ "content_hash": "b19a6d27069925af9b37b4e3871c7fc1", "timestamp": "", "source": "github", "line_count": 123, "max_line_length": 74, "avg_line_length": 27.211382113821138, "alnum_prop": 0.7012249775918733, "repo_name": "ArcherSys/ArcherSys", "id": "11343647272334fdf3f703bd5f54b360e916114f", "size": "3347", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Lib/encodings/iso2022_jp_ext.py", "mode": "33188", "license": "mit", "language": [], "symlink_target": "" }
import sqlite3 con = sqlite3.connect('users.db') cur = con.cursor() cur.execute('CREATE TABLE users (id INTEGER PRIMARY KEY, firstName VARCHAR(100), secondName VARCHAR(30))') con.commit() cur.execute('INSERT INTO users (id, firstName, secondName) VALUES(NULL, "Guido", "van Rossum")') con.commit() print cur.lastrowid cur.execute('SELECT * FROM users') print cur.fetchall() con.close()
{ "content_hash": "a49befecbc53799579ca228748cebcf4", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 106, "avg_line_length": 29.76923076923077, "alnum_prop": 0.7390180878552972, "repo_name": "janusnic/21v-python", "id": "f2501e0313498c967aeb2ff0e83c4424737963e9", "size": "412", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "unit_14/user1.py", "mode": "33188", "license": "mit", "language": [ { "name": "Makefile", "bytes": "369" }, { "name": "Python", "bytes": "990972" }, { "name": "SQLPL", "bytes": "147" } ], "symlink_target": "" }
import numpy as np from . import defHeaders_CBox_v3 as defHeaders from . import test_suite CBox = None class CBox_tests_v3(test_suite.CBox_tests): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.loadSaveDataFile = False @classmethod def setUpClass(self): self.CBox = CBox def LoadSavedData(self): if(not self.loadSaveDataFile): try: DataFile = np.load("SaveData.npz") self.loadSaveDataFile = True self.SavedInputAvgRes0 = DataFile['SavedInputAvgRes0'] self.SavedIntLogResult0_8 = DataFile['SavedIntLogResult0_8'] self.SavedIntLogResult1_8 = DataFile['SavedIntLogResult1_8'] self.SavedIntLogResult0_200 = \ DataFile['SavedIntLogResult0_200'] self.SavedIntLogResult1_200 = \ DataFile['SavedIntLogResult1_200'] self.SavedIntAvgResult0 = DataFile['SavedIntAvgResult0'] self.SavedIntAvgResult1 = DataFile['SavedIntAvgResult1'] self.SavedCh0Counters = DataFile['SavedCh0Counters'] self.SavedCh0Result = DataFile['SavedCh0Result'] self.SavedCh1Result = DataFile['SavedCh1Result'] self.SavedCh1Counters = DataFile['SavedCh1Counters'] self.SavedTimingtapeResult0 = \ DataFile['SavedTimingtapeResult0'] self.SavedTimingtapeResult7 = \ DataFile['SavedTimingtapeResult7'] except: print("Cannot open the saved data file.") self.loadSaveDataFile = False self.assertTrue(False) def test_parameter_read_back(self): for par in ['core_state', 'trigger_source', 'adc_offset', 'signal_delay', 'integration_length', 'lin_trans_coeffs', 'sig0_threshold_line', 'sig1_threshold_line', 'log_length', 'nr_samples', 'nr_averages']: p = self.CBox.get(par) self.CBox.set(par, p) self.CBox.firmware_version.get() p2 = self.CBox.get(par) np.testing.assert_array_equal(p, p2) def test_firmware_version(self): v = self.CBox.get('firmware_version') self.assertTrue(int(v[1]) == 3) # major version self.assertTrue(int(v[3]) == 2) # minor version def test_setting_mode(self): # test acquisition_mode for i in range(len(defHeaders.acquisition_modes)): self.CBox.set('acquisition_mode', i) self.assertEqual(self.CBox.get('acquisition_mode'), defHeaders.acquisition_modes[i]) # test setting core state to 'active' self.CBox.set('core_state', 'active') self.assertEqual(self.CBox.get('core_state'), defHeaders.core_states[1]) # test run mode for i in range(len(defHeaders.run_modes)): self.CBox.set('run_mode', i) self.assertEqual(self.CBox.get('run_mode'), defHeaders.run_modes[i]) # test setting core state to 'idle' self.CBox.set('core_state', 'idle') self.assertEqual(self.CBox.get('core_state'), defHeaders.core_states[0]) # test trigger source for i in range(3): self.CBox.set('trigger_source', i) self.assertEqual(self.CBox.get('trigger_source'), defHeaders.trigger_sources[i]) # test awg mode for j in range(3): for i in range(len(defHeaders.awg_modes)): self.CBox.set('AWG{}_mode'.format(j), i) self.assertEqual(self.CBox.get('AWG{}_mode'.format(j)), defHeaders.awg_modes[i]) self.CBox.set_master_controller_working_state(0, 0, 0) self.CBox.set('acquisition_mode', 'idle') for j in range(3): self.CBox.set('AWG{}_mode'.format(j), 0) def test_input_avg_mode(self): self.LoadSavedData() sine_numbers = 8 waveLength = 120 sine_waves = [[0]*(waveLength+1) for i in range(sine_numbers)] cosine_waves = [[0]*(waveLength+1) for i in range(sine_numbers)] for sin_nr in range(sine_numbers): for sample_nr in range(waveLength): sine_waves[sin_nr][sample_nr] = \ min(2**13-1, np.floor(6*1024*np.sin( sample_nr/float(waveLength)*2*np.pi*(sin_nr+1)))) cosine_waves[sin_nr][sample_nr] = \ min(2**13-1, np.floor(6*1024*np.cos( sample_nr/float(waveLength)*2*np.pi*(sin_nr+1)))) for awg_nr in range(3): for dac_nr in range(2): for pulse_nr in range(8): self.CBox.set_awg_lookuptable( awg_nr, pulse_nr, dac_nr, sine_waves[pulse_nr], unit='dac', length=waveLength) self.CBox.set('acquisition_mode', 'idle') # set to idle state self.CBox.set_master_controller_working_state(0, 0, 0) self.CBox.load_instructions('programs\\input_avg.asm') self.CBox.set_master_controller_working_state(1, 0, 0) self.CBox.set('run_mode', 1) NoSamples = 400 self.CBox.set("nr_samples", NoSamples) self.CBox.set('nr_averages', 2**4) self.CBox.set('signal_delay', 0) self.CBox.set('acquisition_mode', 'input averaging') [InputAvgRes0, InputAvgRes1] = self.CBox.get_input_avg_results() self.assertTrue(self.Appx_Cmp_Wave(InputAvgRes0, self.SavedInputAvgRes0)) def test_Integration_logging(self): self.LoadSavedData() # initalizing waveform LUT in awgs triggerlength = 2 plane = [-6*1024]*(triggerlength) for awg_nr in range(3): for dac_nr in range(2): for pulse_nr in range(8): self.CBox.set_awg_lookuptable(awg_nr, pulse_nr, dac_nr, plane, unit='dac', length=triggerlength-1) # Setting parameters. integration_length = 200 nr_samples = 9 self.CBox.set('signal_delay', 0) self.CBox.set('integration_length', integration_length) self.CBox.set('nr_averages', 2**4) self.CBox.set('nr_samples', nr_samples) self.CBox.set('lin_trans_coeffs', [1, 0, 0, 1]) self.CBox.set('adc_offset', -1) weights0 = 1*np.ones(512) weights1 = 2*np.ones(512) self.CBox.set('sig0_integration_weights', weights0) self.CBox.set('sig1_integration_weights', weights1) self.CBox.set('log_length', 8) self.CBox.set('acquisition_mode', 'idle') self.CBox.set_master_controller_working_state(0, 0, 0) self.CBox.load_instructions('programs\\int_log.txt') self.CBox.set_master_controller_working_state(1, 0, 0) self.CBox.set('acquisition_mode', 'integration logging') self.CBox.set('run_mode', 1) [IntLogResult0_8, IntLogResult1_8] = \ self.CBox.get_integration_log_results() weights1 = 0*np.ones(512) self.CBox.set('sig1_integration_weights', weights1) self.CBox.set('log_length', 200) self.CBox.set('acquisition_mode', 'idle') self.CBox.set_master_controller_working_state(0, 0, 0) self.CBox.load_instructions('programs\\int_log.txt') self.CBox.set_master_controller_working_state(1, 0, 0) self.CBox.set('acquisition_mode', 'integration logging') self.CBox.set('run_mode', 1) [IntLogResult0_200, IntLogResult1_200] = \ self.CBox.get_integration_log_results() self.CBox.set('acquisition_mode', 'idle') self.assertTrue(self.Appx_Cmp_Wave(IntLogResult0_8, self.SavedIntLogResult0_8)) self.assertTrue(self.Appx_Cmp_Wave(IntLogResult1_8, self.SavedIntLogResult1_8)) self.assertTrue(self.Appx_Cmp_Wave(IntLogResult0_200, self.SavedIntLogResult0_200)) self.assertTrue(self.Acc_Cmp_Wave(IntLogResult1_200, self.SavedIntLogResult1_200)) def test_integration_average_mode(self): self.LoadSavedData() triggerlength = 20 plane = [-6*1024]*(triggerlength) for awg_nr in range(3): for dac_nr in range(2): for pulse_nr in range(8): self.CBox.set_awg_lookuptable(awg_nr, pulse_nr, dac_nr, plane, unit='dac', length=triggerlength-1) # load instructions self.CBox.set_master_controller_working_state(0, 0, 0) self.CBox.load_instructions('programs\\int_avg.txt') # Set the parameters integration_length = 500 nr_samples = 9 self.CBox.set('signal_delay', 0) self.CBox.set('integration_length', integration_length) self.CBox.set('nr_averages', 2**4) self.CBox.set('nr_samples', nr_samples) self.CBox.set('lin_trans_coeffs', [1, 0, 0, 1]) self.CBox.set('adc_offset', 0) # set the integration weights weights0 = 1*np.ones(512) # weights1 = np.zeros(512) weights1 = -1*np.ones(512) self.CBox.set('sig0_integration_weights', weights0) self.CBox.set('sig1_integration_weights', weights1) # Perform integration average and plot the result self.CBox.set('acquisition_mode', 'idle') self.CBox.set_master_controller_working_state(0, 0, 0) self.CBox.set_master_controller_working_state(1, 0, 0) self.CBox.set('acquisition_mode', 'integration averaging') self.CBox.set('run_mode', 1) [IntAvgRst0, IntAvgRst1] = self.CBox.get_integrated_avg_results() self.CBox.set('acquisition_mode', 'idle') self.assertTrue(self.Appx_Cmp_Wave(IntAvgRst0, self.SavedIntAvgResult0)) self.assertTrue(self.Appx_Cmp_Wave(IntAvgRst1, self.SavedIntAvgResult1)) def test_state_logging_and_counters(self): self.LoadSavedData() # initalizing waveform LUT in awgs triggerlength = 20 plane = [-6*1024]*(triggerlength) for awg_nr in range(3): for dac_nr in range(2): for pulse_nr in range(8): self.CBox.set_awg_lookuptable(awg_nr, pulse_nr, dac_nr, plane, unit='dac', length=triggerlength-1) # load instructions self.CBox.set_master_controller_working_state(0, 0, 0) self.CBox.load_instructions('programs\\QubitStateLog.asm') # Set the parameters integration_length = 500 self.CBox.set('log_length', 100) self.CBox.set('signal_delay', 0) self.CBox.set('integration_length', integration_length) self.CBox.set('lin_trans_coeffs', [1, 0, 0, 1]) self.CBox.set('adc_offset', 0) self.CBox.set('sig0_threshold_line', 10000) self.CBox.set('sig1_threshold_line', 1000000) # set the integration weights weights0 = 1*np.ones(512) weights1 = -1*np.ones(512) self.CBox.set('sig0_integration_weights', weights0) self.CBox.set('sig1_integration_weights', weights1) # Perform integration average and plot the result self.CBox.set('acquisition_mode', 'idle') self.CBox.set_master_controller_working_state(0, 0, 0) self.CBox.set_master_controller_working_state(1, 0, 0) self.CBox.set('acquisition_mode', 'integration logging') self.CBox.set('run_mode', 1) [ch0_counters, ch1_counters] = self.CBox.get_qubit_state_log_counters() [ch0_result, ch1_result] = self.CBox.get_qubit_state_log_results() self.CBox.set('acquisition_mode', 'idle') self.assertEqual(ch0_counters, self.SavedCh0Counters) self.assertEqual(ch1_counters, self.SavedCh1Counters) self.assertEqual(ch0_result, self.SavedCh0Result) self.assertEqual(ch1_result, self.SavedCh1Result) def test_tape(self): self.LoadSavedData() self.loadSineWaves(100) for i in range(7): tape = [] tape.extend(self.CBox.create_timing_tape_entry(0, 7, False)) for j in range(i+1): tape.extend(self.CBox.create_timing_tape_entry(i*10, i, False)) tape.extend(self.CBox.create_timing_tape_entry(0, 7, True)) for awg_nr in range(3): self.CBox.set_conditional_tape(awg_nr, i, tape) tape = [] for i in range(8): tape.extend(self.CBox.create_timing_tape_entry(0, 0, False)) tape.extend(self.CBox.create_timing_tape_entry(0, i, True)) for awg_nr in range(3): self.CBox.set_segmented_tape(awg_nr, tape) self.CBox.AWG0_mode.set('tape') self.CBox.AWG1_mode.set('tape') self.CBox.AWG2_mode.set('tape') self.CBox.set('acquisition_mode', 'idle') self.CBox.set_master_controller_working_state(0, 0, 0) self.CBox.load_instructions('programs\\TimingTapeTest0.asm') self.CBox.set_master_controller_working_state(1, 0, 0) NoSamples = 400 self.CBox.set("nr_samples", NoSamples) self.CBox.set('nr_averages', 2**4) self.CBox.set('signal_delay', 0) self.CBox.set('acquisition_mode', 'input averaging') self.CBox.set('run_mode', 1) [TimingtapeResult0, _] = self.CBox.get_input_avg_results() self.CBox.set('acquisition_mode', 'idle') self.CBox.set_master_controller_working_state(0, 0, 0) self.CBox.load_instructions('programs\\TimingTapeTest7.asm') self.CBox.set_master_controller_working_state(1, 0, 0) NoSamples = 400 self.CBox.set("nr_samples", NoSamples) self.CBox.set('nr_averages', 2**10) self.CBox.set('signal_delay', 0) self.CBox.set('acquisition_mode', 'input averaging') self.CBox.set('run_mode', 1) [TimingtapeResult7, _] = self.CBox.get_input_avg_results() assert(self.Appx_Cmp_Wave(self.SavedTimingtapeResult0, TimingtapeResult0)) assert(self.Appx_Cmp_Wave(self.SavedTimingtapeResult7, TimingtapeResult7)) def loadSineWaves(self, waveLength=120): sine_numbers = 8 sine_waves = [[0]*(waveLength+1) for i in range(sine_numbers)] cosine_waves = [[0]*(waveLength+1) for i in range(sine_numbers)] for sin_nr in range(sine_numbers): for sample_nr in range(waveLength): sine_waves[sin_nr][sample_nr] = np.floor( -6*1024*np.sin( sample_nr/float(waveLength)*2*np.pi*(sin_nr+1))) cosine_waves[sin_nr][sample_nr] = np.floor( -6*1024*np.cos( sample_nr/float(waveLength)*2*np.pi*(sin_nr+1))) for awg_nr in range(3): for pulse_nr in range(8): self.CBox.set_awg_lookuptable(awg_nr, pulse_nr, 0, sine_waves[pulse_nr], unit='dac', length=waveLength) self.CBox.set_awg_lookuptable(awg_nr, pulse_nr, 1, cosine_waves[pulse_nr], unit='dac', length=waveLength) def Appx_Cmp_Wave(self, wave1, wave2, allowedDev=0.02): (dev, pos) = self.MaxDev(wave1, wave2) if (dev < allowedDev): return True else: for i in range(len(wave1)): if ((wave1[0] == 0 and np.abs(wave1[1]) > 100) or (wave1[1] == 0 and np.abs(wave1[0]) > 100)): return False return True def Acc_Cmp_Wave(self, wave1, wave2): self.assertEqual(len(wave1), len(wave2)) return np.array_equal(wave1, wave2) def MaxDev(self, wave1, wave2): self.assertEqual(len(wave1), len(wave2)) wave_len = len(wave1) deviation = 0 pos = None max_wave = np.maximum(wave1, wave2) for i in range(wave_len): if (max_wave[i] != 0): dev = np.abs((wave1[i]-wave2[i])/max_wave[i]) if (dev > deviation): deviation = dev pos = i return deviation, pos
{ "content_hash": "0df12267b99610d018c169c94f993a43", "timestamp": "", "source": "github", "line_count": 405, "max_line_length": 79, "avg_line_length": 42.13086419753086, "alnum_prop": 0.5550606575631484, "repo_name": "DiCarloLab-Delft/PycQED_py3", "id": "4225aa05a48f786c2fe9132c91ad3fa12917439a", "size": "17063", "binary": false, "copies": "1", "ref": "refs/heads/develop", "path": "deprecated/pycqed/instrument_drivers/physical_instruments/_controlbox/test_suite_v3.py", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "8748" }, { "name": "C++", "bytes": "8802" }, { "name": "Cython", "bytes": "8291" }, { "name": "OpenQASM", "bytes": "15894" }, { "name": "Python", "bytes": "7978715" }, { "name": "TeX", "bytes": "8" } ], "symlink_target": "" }
from socket import gaierror, gethostbyname from urlparse import urlparse from ansible.module_utils.basic import AnsibleModule # noqa: ignore=H303 from keystoneauth1.exceptions import MissingRequiredOptions import netaddr from openstack import connect DOCUMENTATION = """ --- module: service_discovery short_description: - Discovery module for MaaS that uses the OpenStack service catalog. description: - This module utilizes an OpenStack SDK connection to obtain the service catalog and create a set of facts based on the endpoint data. options: raxdc: description: - A boolean identifying whether the deployment is in a Rackspace data center (RDC) or customer data center (CDC). required: true internal_vip: description: - An IP address identifying the internal OpenStack VIP. required: true external_vip: description: - An IP address identifying the external OpenStack VIP. required: true author: - Nathan Pawelek (@npawelek) """ EXAMPLES = """ - name: Service discovery service_discovery: raxdc: False internal_vip: 172.29.236.100 external_vip: 172.99.120.153 """ class ServiceDiscovery(object): def __init__(self, module): self.module = module self.raxdc = module.params.get('raxdc') self.internal_vip = module.params.get('internal_vip') self.external_vip = module.params.get('external_vip') self.conn = self.build_sdk_connection() self.catalog_details = dict() self.cert_expiry = False self.cert_expiry_list = list(dict()) self.pnm = False self.api_endpoints = dict() self.maas_external_hostname = '' self.maas_external_ip_address = '' self.use_public = False self.cinder_backends = { "local": list(), "shared": list() } def build_sdk_connection(self): """ Create a universal connection to OpenStack with the OpenStack SDK. It will use the defined configuration from /root/.config/openstack/clouds.yaml. """ try: sdk_conn = connect(cloud='default', verify=False) except MissingRequiredOptions as e: message = ('Missing option in clouds.yaml: %s' % str(e)) self.module.fail_json(msg=message) else: return sdk_conn def parse_service_catalog(self): """ Parse the OpenStack service catalog and identify service components. """ try: catalog = self.conn.service_catalog items = ['protocol', 'port', 'address'] invalid_chars = '-_' for service in catalog: # NOTE(npawelek): Sanitize the service catalog name to remove # dashes and underscores service_name = service['name'] for c in invalid_chars: service_name = service_name.replace(c, '') for endpoint in service['endpoints']: url = urlparse(endpoint.get('url')) for item in items: key_name = "%s_%s_%s" % ( service_name, endpoint['interface'], item) if item == 'protocol': self.catalog_details[key_name] = str(url.scheme) elif item == 'port': self.parse_port(key_name, url) else: self.catalog_details[key_name] = str( url.netloc.split(':')[0]) except Exception as e: message = ('Issue parsing the service catalog. The following' 'error was received: %s' % str(e)) self.module.fail_json(msg=message) def parse_port(self, key, url): """ Identify endpoint port. If a port is not detected in the url, this attempts to use the protocol to associate a valid port. """ if not url.port: if url.scheme == 'https': self.catalog_details[key] = 443 elif url.scheme == 'http': self.catalog_details[key] = 80 else: raise Exception('Endpoint object has an unexpected port and' 'scheme: %s' % url) else: self.catalog_details[key] = url.port def generate_facts(self): """ Gather information based on data center (raxdc or cdc) and endpoint validation. This results in whether PNM should be enabled, a list of certificates to validate expiry on, and the associated endpoint targets for private or public pollers. It's assumed that RDC deployments are accessible from public Rackspace Monitoring pollers, so we only check the public interface first. If PNM is required, then iterate over internal interfaces. """ if self.raxdc: self.validate_endpoints(['public']) if self.pnm: self.validate_endpoints(['internal']) else: self.validate_endpoints(['public', 'internal']) # No CDC private endpoints found. Must validate public endpoints. if len(self.api_endpoints) == 0: self.use_public = True self.validate_endpoints(['public', 'internal'], use_public=self.use_public) # Set generic fallback vip depending on PNM if self.pnm and self.use_public is False: if self.internal_vip.replace('.', '').isdigit(): self.maas_external_hostname = self.internal_vip self.maas_external_ip_address = self.internal_vip else: vip_ip = self.get_url_ip_address(self.internal_vip) self.maas_external_hostname = self.internal_vip self.maas_external_ip_address = vip_ip else: if self.external_vip.replace('.', '').isdigit(): self.maas_external_hostname = self.external_vip self.maas_external_ip_address = self.external_vip else: vip_ip = self.get_url_ip_address(self.external_vip) self.maas_external_hostname = self.external_vip self.maas_external_ip_address = vip_ip def validate_endpoints(self, interface_list, use_public=False): """ Determine whether the endpoint is natively usable or requires additional overrides for the target URL and IP. This will run against both presented interfaces to detect values usable for both lb_api_checks and private_lb_api_checks. """ for interface in interface_list: # Only use address keys from catalog (netloc) string = "_%s_address" % interface # Walk the service catalog for key, value in self.catalog_details.items(): if string in key: # Detect IP address or hostname if value.replace('.', '').isdigit(): is_private = self.validate_private_ip(value) if is_private is False and self.raxdc is False: if use_public is True: self.pnm = True self.service_specific_overrides(key, value) else: pass elif is_private: self.pnm = True self.service_specific_overrides(key, value) else: self.service_specific_overrides(key, value) else: # Ensure the hostname is resolvable by the system url_ip = self.get_url_ip_address(value) # Validation for SSL cert expiry self.cert_expiry_check(key, value, url_ip) # Determine if the URL is private is_private = self.validate_private_ip(url_ip) # CDC environments should always have PNM enabled. # Skip if public endpoints have public addresses # which likely aren't accessible from PNM poller if is_private is False and self.raxdc is False: if use_public is True: self.pnm = True self.service_specific_overrides(key, value, url_ip) else: pass # Enable PNM and configure associated api facts elif is_private: self.pnm = True self.service_specific_overrides(key, value, url_ip) # Configure api facts for public pollers else: self.service_specific_overrides(key, value, url_ip) def get_url_ip_address(self, url): """Ensure the hostname is resolvable by the system""" try: url_ip = gethostbyname(url) return url_ip except gaierror as e: message = ('%s does not appear to be resolvable ' 'by DNS. Ensure the address is ' 'resolvable or add an entry to ' '/etc/hosts on all controller nodes. ' 'PNM Exception: %s') % (url, str(e)) self.module.fail_json(msg=message) except Exception as e: message = 'Failed to get URL ip address for %s. Error: %s' % ( url, str(e)) self.module.fail_json(msg=message) def cert_expiry_check(self, key, url, url_ip): """ Enable the certificate expiry check and create a unique list of endpoints for validating all certificates. """ endpoint = "%s_%s" % (key.split('_')[0], key.split('_')[1]) protocol_key = "%s_protocol" % endpoint # Ensure protocol is https if self.catalog_details[protocol_key] == 'https': self.cert_expiry = True # Determine if the URL (excluding port) is already in the cert list url_check = len( [i for i in self.cert_expiry_list if url in i.get('cert_url', '')] ) if url_check == 0: port_key = "%s_port" % endpoint cert_url = "https://%s:%s/" % (url, self.catalog_details[port_key]) cert_dict = { "cert_url": cert_url, "cert_ip": url_ip } self.cert_expiry_list.append(cert_dict) def validate_private_ip(self, address): """ Determine whether the associated IP address is valid and is an RFC 1918 private address (non-routable). """ try: ipaddr = netaddr.IPAddress(address) except netaddr.AddrFormatError as e: message = ('%s is not a proper IP address according to ' 'netaddr. PNM Exception: %s') % (address, str(e)) self.module.fail_json(msg=message) except Exception as e: message = 'Unable to validate IP %s. Error: %s' % (address, str(e)) self.module.fail_json(msg=message) else: if ipaddr.is_private(): return True else: return False def service_specific_overrides(self, key, value, url_ip=None): items = ['url', 'ip'] endpoint = "%s_%s" % (key.split('_')[0], key.split('_')[1]) for item in items: key_name = "%s_%s" % (endpoint, item) if item == 'url': self.set_full_address(key_name, value) else: if url_ip is None: self.api_endpoints[key_name] = value else: self.api_endpoints[key_name] = url_ip def set_full_address(self, key, address): """ Defines the full address of a specific endpoint. proto://address:port """ endpoint = "%s_%s" % (key.split('_')[0], key.split('_')[1]) for attr in 'protocol', 'port': attr_key = "%s_%s" % (endpoint, attr) if attr == 'protocol': protocol = self.catalog_details[attr_key] else: port = self.catalog_details[attr_key] self.api_endpoints[key] = "%s://%s:%s/" % (protocol, address, port) def get_cinder_backends(self): """Discovers hosts for local and/or shared block storage backend pools. Queries the OpenStack Block Storage API to identify all backend pools. Using the volume backend name (everything after #), hosts are split into local and/or shared volume backends. This will provide dynamic cinder-volume hosts for any nomenclature. Returns: A dict mapping of volume hosts within local and/or shared backends. For example: { 'local': [ 'infra02@midtier', 'infra01@midtier', 'infra03@midtier', 'infra03@ceph', 'infra01@ceph', 'infra02@ceph' ], 'shared': [] } """ cinder = self.conn.block_storage backend_pools = [str(bp.name) for bp in cinder.backend_pools()] backend_names = [i.split('#')[-1] for i in backend_pools] unique_backend_names = set(backend_names) unique_backend_counts = dict() for name in unique_backend_names: unique_backend_counts[name] = backend_names.count(name) for pool in backend_pools: host, backend_name = pool.split('#') if unique_backend_counts[backend_name] == 1: self.cinder_backends['shared'].append(host) else: self.cinder_backends['local'].append(host) def main(): module = AnsibleModule( argument_spec=dict( raxdc=dict(required=True, type='bool'), internal_vip=dict(required=True), external_vip=dict(required=True), cinder_discovery=dict(required=True, type='bool') ), supports_check_mode=False ) discovery = ServiceDiscovery(module) discovery.parse_service_catalog() discovery.generate_facts() if module.params.get('cinder_discovery') is True: discovery.get_cinder_backends() module.exit_json( changed=False, ansible_facts={ 'cert_expiry': discovery.cert_expiry, 'cert_expiry_list': discovery.cert_expiry_list, 'pnm': discovery.pnm, 'api_endpoints': discovery.api_endpoints, 'maas_external_hostname': discovery.maas_external_hostname, 'maas_external_ip_address': discovery.maas_external_ip_address, 'maas_cinder_local_backends': discovery.cinder_backends['local'], 'maas_cinder_shared_backends': discovery.cinder_backends['shared'] } ) if __name__ == '__main__': main()
{ "content_hash": "d2ea79636cb8325d9583c3c0100f01bb", "timestamp": "", "source": "github", "line_count": 415, "max_line_length": 79, "avg_line_length": 38.08433734939759, "alnum_prop": 0.5253400822524518, "repo_name": "npawelek/rpc-maas", "id": "37af49f7d9bea4eec7fa4bb9f061eace07f4493a", "size": "16411", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "playbooks/library/service_discovery.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "HTML", "bytes": "3215" }, { "name": "Python", "bytes": "368284" }, { "name": "Shell", "bytes": "50085" } ], "symlink_target": "" }
import os import redis import urlparse from werkzeug.wrappers import Request, Response from werkzeug.routing import Map, Rule from werkzeug.exceptions import HTTPException, NotFound from werkzeug.wsgi import SharedDataMiddleware from werkzeug.utils import redirect from jinja2 import Environment, FileSystemLoader class Shortly(object): def __init__(self, config): self.redis = redis.Redis(config['redis_host'], config['redis_port']) template_path = os.path.join(os.path.dirname(__file__), 'templates') self.jinja_env = Environment(loader=FileSystemLoader(template_path), autoescape=True) def render_template(self, template_name, **context): t = self.jinja_env.get_template(template_name) return Response(t.render(context), mimetype='text/html') def dispatch_request(self, request): return Response('Hello World!') def wsgi_app(self, environ, start_response): request = Request(environ) response = self.dispatch_request(request) return response(environ, start_response) def __call__(self, environ, start_response): return self.wsgi_app(environ, start_response) def create_app(redis_host='localhost', redis_port=6379, with_static=True): app = Shortly({ 'redis_host': redis_host, 'redis_port': redis_port }) if with_static: app.wsgi_app = SharedDataMiddleware(app.wsgi_app, { '/static': os.path.join(os.path.dirname(__file__), 'static') }) return app if __name__ == '__main__': from werkzeug.serving import run_simple app = create_app() run_simple('127.0.0.1', 5000, app, use_debugger=True, use_reloader=True)
{ "content_hash": "93278d1075f824982154f67c3ccf2712", "timestamp": "", "source": "github", "line_count": 49, "max_line_length": 93, "avg_line_length": 34.734693877551024, "alnum_prop": 0.6750881316098707, "repo_name": "zzeleznick/zzeleznick.github.io", "id": "1ef9b1d328fec13af25e394d092d32c86a40acf1", "size": "1702", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "python-practice/werk/werk.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "783941" }, { "name": "HTML", "bytes": "431328" }, { "name": "JavaScript", "bytes": "1331629" }, { "name": "Python", "bytes": "12348" }, { "name": "Ruby", "bytes": "841" } ], "symlink_target": "" }
""" Django settings for mango project. For more information on this file, see https://docs.djangoproject.com/en/1.7/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.7/ref/settings/ """ # Build paths inside the project like this: os.path.join(BASE_DIR, ...) import os BASE_DIR = os.path.dirname(__file__) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'eawzo#-6!t2zj=ilru8#z*2%efkx$qzr807#+y&b53m+&ciw6i' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True TEMPLATE_DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'djangojs', 'rest_framework', 'todo', ) TEMPLATE_DIRS = ( os.path.join(BASE_DIR, 'templates'), ) MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ) ROOT_URLCONF = 'urls' WSGI_APPLICATION = 'wsgi.application' # Database # https://docs.djangoproject.com/en/1.7/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Internationalization # https://docs.djangoproject.com/en/1.7/topics/i18n/ LANGUAGE_CODE = 'es-mx' TIME_ZONE = 'America/Mexico_City' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.7/howto/static-files/ STATIC_URL = '/static/' STATICFILES_DIRS = ( "static", ) REST_FRAMEWORK = { # Use Django's standard `django.contrib.auth` permissions, # or allow read-only access for unauthenticated users. 'DEFAULT_PERMISSION_CLASSES': [ 'rest_framework.permissions.AllowAny' ], 'FILTER_BACKEND': 'rest_framework.filters.DjangoFilterBackend', }
{ "content_hash": "991de6735f9c44ad6a3488181ba9183f", "timestamp": "", "source": "github", "line_count": 104, "max_line_length": 71, "avg_line_length": 23.78846153846154, "alnum_prop": 0.7134195634599838, "repo_name": "joyinsky/tododjango", "id": "5faad083a24a112a6b1175ec72354ac5eb48dd9e", "size": "2474", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "settings.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "7788" }, { "name": "JavaScript", "bytes": "4199597" }, { "name": "Python", "bytes": "4834" }, { "name": "Ruby", "bytes": "902" }, { "name": "Shell", "bytes": "1577" } ], "symlink_target": "" }
from __future__ import annotations import unittest from airflow.api_connexion.schemas.health_schema import health_schema class TestHealthSchema(unittest.TestCase): def setUp(self): self.default_datetime = "2020-06-10T12:02:44+00:00" def test_serialize(self): payload = { "metadatabase": {"status": "healthy"}, "scheduler": { "status": "healthy", "latest_scheduler_heartbeat": self.default_datetime, }, } serialized_data = health_schema.dump(payload) assert serialized_data == payload
{ "content_hash": "4347938f272a41c8cc3dbaa29b279c96", "timestamp": "", "source": "github", "line_count": 21, "max_line_length": 69, "avg_line_length": 28.80952380952381, "alnum_prop": 0.6099173553719008, "repo_name": "cfei18/incubator-airflow", "id": "f1920e95c51df64815d3fd357eedd04f2d11902d", "size": "1390", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tests/api_connexion/schemas/test_health_schema.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "25980" }, { "name": "Dockerfile", "bytes": "72003" }, { "name": "HCL", "bytes": "3786" }, { "name": "HTML", "bytes": "173434" }, { "name": "JavaScript", "bytes": "143068" }, { "name": "Jinja", "bytes": "38808" }, { "name": "Jupyter Notebook", "bytes": "5482" }, { "name": "Mako", "bytes": "1339" }, { "name": "Python", "bytes": "22660683" }, { "name": "R", "bytes": "313" }, { "name": "Shell", "bytes": "312715" }, { "name": "TypeScript", "bytes": "472379" } ], "symlink_target": "" }
from flask import Blueprint, abort from application.db import api user_page = Blueprint('user_page', __name__) @user_page.route('/user/add') def add(): api.add() return "Add user done" @user_page.route('/user/show/<user_id>') def show(user_id): user = api.get(user_id) return "show"+ user.name
{ "content_hash": "6161769837b3b53026c98dc4a31b5d1b", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 44, "avg_line_length": 21.933333333333334, "alnum_prop": 0.6291793313069909, "repo_name": "shunliz/test", "id": "ab450f6b72986ec38a3791d76f15a37406799371", "size": "329", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "python/flask/application/user_page.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "57158" }, { "name": "Go", "bytes": "1697" }, { "name": "HTML", "bytes": "604781" }, { "name": "Java", "bytes": "83183" }, { "name": "JavaScript", "bytes": "573" }, { "name": "M4", "bytes": "1133" }, { "name": "Makefile", "bytes": "55574" }, { "name": "Python", "bytes": "139607" }, { "name": "Shell", "bytes": "98080" }, { "name": "TypeScript", "bytes": "2922" } ], "symlink_target": "" }
""" <fn-group> <fn fn-type="other"> <label>Additions and Corrections</label> <p>On page 10, where it was read:</p> <p>“Joao da Silva”</p> <p>Now reads:</p> <p>“João da Silva Santos”</p> </fn> </fn-group> """ class ArticleWithErrataNotes: def __init__(self, xmltree): self.xmltree = xmltree def footnotes(self, fn_types=None): _errata = [] if not fn_types: xpath_pattern = ".//fn-group//fn[@fn-type='other']" else: xpath_pattern = "|".join([".//fn-group//fn[@fn-type='{0}']".format(i) for i in fn_types]) for node in self.xmltree.xpath(xpath_pattern): _errata.append(Footnote(node)) return _errata class Footnote: def __init__(self, node): self.node = node @property def label(self): _label = '' try: fc = self.node.getchildren()[0] if fc.tag == 'label': _label = fc.text except IndexError: ... finally: return _label @property def text(self): return '\n'.join([t for t in self.node.itertext()])
{ "content_hash": "ec8221d12bc00d9df52d35f02f639a04", "timestamp": "", "source": "github", "line_count": 50, "max_line_length": 101, "avg_line_length": 23.54, "alnum_prop": 0.5072217502124045, "repo_name": "scieloorg/packtools", "id": "03c36a6098e1adae1f71e76c830436572b441985", "size": "1186", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "packtools/sps/models/article_errata.py", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "CSS", "bytes": "11822" }, { "name": "Dockerfile", "bytes": "982" }, { "name": "HTML", "bytes": "34245" }, { "name": "Makefile", "bytes": "2421" }, { "name": "Python", "bytes": "2928346" }, { "name": "Shell", "bytes": "1508" }, { "name": "XSLT", "bytes": "273710" } ], "symlink_target": "" }
import time, sys import mote from moteCache import MoteCache if __name__ == "__main__": cache = MoteCache() cache.read() # print "GetMotes:", cache.getMotes() allDevs = mote.detectAllPossible() selectedMoteId = None for d in allDevs: if d in cache.getMotes(): selectedMoteId = d break if selectedMoteId == None: print "No motes found. Device ids:", allDevs, "cacheMoteIds:", cache.getMotes().keys() sys.exit() # mote = cache.getMotes().values()[0] mote = cache.getMotes()[selectedMoteId] mote.connect() mote.setLeds(1,0,0,0) mote.startReadThread() mote.irBasicModeOn() printPoints = False if "print" in sys.argv: printPoints = True while(1): if printPoints: pts = mote.extractNormalizedPoints() if len(pts) > 0: print pts time.sleep(1) mote.disconnect()
{ "content_hash": "bc52af5f64d8ee9b77abcbbabe53eb9f", "timestamp": "", "source": "github", "line_count": 44, "max_line_length": 95, "avg_line_length": 21.545454545454547, "alnum_prop": 0.580168776371308, "repo_name": "rpwagner/tiled-display", "id": "4ca2378c48ffee693beae6fabe939bc23ac65df5", "size": "948", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "flWii/ir.py", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
import os import random import string import time from eventlet import queue from oslo_log import log from dragonflow.common import utils as df_utils from dragonflow import conf as cfg from dragonflow.controller.common import constants as ctrl_const from dragonflow.db import api_nb from dragonflow.db import db_common from dragonflow.switch.drivers.ovs import datapath from dragonflow.tests import base from dragonflow.tests.common import app_testing_objects as test_objects from dragonflow.tests.common import clients from dragonflow.tests.common import constants as const from dragonflow.tests.common import utils LOG = log.getLogger(__name__) _publisher = None class DFTestBase(base.BaseTestCase): def setUp(self): super(DFTestBase, self).setUp() if os.environ.get('DF_FULLSTACK_USE_ENV'): try: self.neutron = clients.get_neutron_client_from_env() except KeyError as e: message = ('Cannot find environment variable %s. ' 'Have you sourced openrc?') LOG.error(message, e.args[0]) self.fail(message % e.args[0]) else: self.neutron = clients.get_neutron_client_from_cloud_config() self.neutron.format = 'json' # NOTE: Each env can only have one default subnetpool for each # IP version. if not self.get_default_subnetpool(): self.create_default_subnetpool() df_utils.config_parse() self.conf = cfg.CONF.df self.integration_bridge = self.conf.integration_bridge self._queue = queue.PriorityQueue() self.nb_api = api_nb.NbApi.get_instance() # As we are running in the same process over and over, # do not perform redundant calls to the subscriber if not self.nb_api.subscriber.is_running: self.nb_api.set_db_change_callback(self._db_change_callback) self.mgt_ip = self.conf.management_ip self.vswitch_api = utils.OvsTestApi(self.mgt_ip) self.vswitch_api.initialize(self._db_change_callback) if cfg.CONF.df.enable_selective_topology_distribution: self.start_subscribing() if cfg.CONF.df.enable_df_pub_sub: self._publish_log_event('started') self.addCleanup(self._publish_log_event, 'finished') self.dfdp = datapath.load_datapath() def _db_change_callback(self, table, key, action, value, topic=None): update = db_common.DbUpdate(table, key, action, value, topic=topic) self._queue.put(update) time.sleep(0) def _publish_log_event(self, event): global _publisher if _publisher is None: _publisher = self.get_publisher(port=23456) test_name = self.id() _publisher.send_event( db_common.DbUpdate( action=ctrl_const.CONTROLLER_LOG, table='testing', key=test_name, value='\n{0} {1}\n'.format(test_name, event), ), ) def _get_publisher(self, pubsub_driver_name): pub_sub_driver = df_utils.load_driver( pubsub_driver_name, df_utils.DF_PUBSUB_DRIVER_NAMESPACE) publisher = pub_sub_driver.get_publisher() publisher.initialize() return publisher def get_publisher(self, port=None): pubsub_driver_name = cfg.CONF.df.pub_sub_driver if port is not None: cfg.CONF.set_override('publisher_port', port, group='df') return self._get_publisher(pubsub_driver_name) def check_app_loaded(self, app_name): apps_list = cfg.CONF.df.apps_list if app_name in apps_list: return True return False def get_default_subnetpool(self): default_subnetpool = None subnetpool_filter = {'is_default': True, 'ip_version': 4} subnetpools = self.neutron.list_subnetpools( **subnetpool_filter).get('subnetpools') if subnetpools: default_subnetpool = subnetpools[0] return default_subnetpool def create_default_subnetpool(self): default_subnetpool = {'prefixes': ['10.0.0.0/8'], 'name': 'default_subnetpool_v4', 'is_default': True, 'default_prefixlen': 24} self.neutron.create_subnetpool( body={'subnetpool': default_subnetpool}) def start_subscribing(self): self._topology = test_objects.Topology(self.neutron, self.nb_api) self.addCleanup(self._topology.close) subnet = self._topology.create_subnet(cidr="192.168.200.0/24") port = subnet.create_port() utils.wait_until_true( lambda: port.name is not None, timeout=const.DEFAULT_RESOURCE_READY_TIMEOUT, exception=Exception('Port was not created') ) def stop_subscribing(self): if hasattr(self, '_topology'): self._topology.close() def _create_random_string(self, length=16): alphabet = string.printable ret = ''.join([random.choice(alphabet) for _ in range(length)]) return ret.encode('utf-8', 'ignore')
{ "content_hash": "1c9fac064d82bfaf9622674c55228fda", "timestamp": "", "source": "github", "line_count": 149, "max_line_length": 75, "avg_line_length": 35.355704697986575, "alnum_prop": 0.6148443432042521, "repo_name": "openstack/dragonflow", "id": "9e20b027d04e1a40b29cfba6385deb85d4290248", "size": "5841", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "dragonflow/tests/fullstack/test_base.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Awk", "bytes": "2386" }, { "name": "Dockerfile", "bytes": "690" }, { "name": "Mako", "bytes": "1053" }, { "name": "Python", "bytes": "1740942" }, { "name": "Ruby", "bytes": "4449" }, { "name": "Shell", "bytes": "70410" } ], "symlink_target": "" }
from django.db import models ADDRESS_DISPLAY_CHARS = 25 class ZipCode(models.Model): zip_code = models.CharField(db_index=True, max_length=10) latitude = models.DecimalField(db_index=True, max_digits=10, decimal_places=6) longitude = models.DecimalField(db_index=True, max_digits=10, decimal_places=6) state = models.CharField(max_length=2) city = models.CharField(max_length=30) def __unicode__(self): return self.zip_code
{ "content_hash": "75fedab7d1137d3cde2eb9b2c173660a", "timestamp": "", "source": "github", "line_count": 11, "max_line_length": 83, "avg_line_length": 41.54545454545455, "alnum_prop": 0.7177242888402626, "repo_name": "philipkimmey/django-geo", "id": "76bd22bb633c5a2d6a7f4ee9b2203b1a8cc1db74", "size": "457", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "django_geo/models.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "18182" } ], "symlink_target": "" }
""" A Django staticfiles post-processor for optimizing with RequireJS. Developed by Dave Hall. <http://www.etianen.com/> """ __version__ = (1, 0, 8)
{ "content_hash": "f65b8e55320714065b24c03ca9ad17ec", "timestamp": "", "source": "github", "line_count": 10, "max_line_length": 66, "avg_line_length": 15.3, "alnum_prop": 0.673202614379085, "repo_name": "muhammad-ammar/django-require", "id": "3e124c3e049602897cddea5deedbf8e090a86af3", "size": "153", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "require/__init__.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "JavaScript", "bytes": "1219323" }, { "name": "Python", "bytes": "30108" } ], "symlink_target": "" }
""" If the user requests "Docs.Doxygen" then this will call this script as a tool In which case we include all the Doxygen related builders """ from . import Doxygen, DoxygenDefaultTemplate def generate(env): Doxygen.generate(env) DoxygenDefaultTemplate.generate(env) def exists(env): if (Doxygen.exists(env) == False): return False if (DoxygenDefaultTemplate.exists(env) == False): return False return True
{ "content_hash": "41c421a8d47b6c7d851905de962811b4", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 77, "avg_line_length": 29.733333333333334, "alnum_prop": 0.7130044843049327, "repo_name": "ASoftTech/Scons-Tools-Grbd", "id": "e09d5e026e62078fe5864d2e5a7ed6d555822d05", "size": "446", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "scons_tools_grbd/Tools/Docs/Doxygen/__init__.py", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "33" }, { "name": "CSS", "bytes": "3966" }, { "name": "HTML", "bytes": "11346" }, { "name": "Python", "bytes": "67078" } ], "symlink_target": "" }
"""Checker decorators""" from functools import wraps from flask import render_template from pysistem.checkers.model import Checker def yield_checker(field='checker_id', yield_field='checker'): """Decorator Get checker identified by 'field' keyword argument and save it to 'yield_field' keyword argument. If checker does not exist return 404 Not Found error """ def decorator(func): """Decorator of yield_checker""" @wraps(func) def decorated_function(*args, **kwargs): """Decorated of yield_checker""" checker = Checker.query.get(int(kwargs.get(field))) if checker is None: return render_template('errors/404.html'), 404 kwargs[yield_field] = checker return func(*args, **kwargs) return decorated_function return decorator
{ "content_hash": "7e94e4a2a34b2de2b7698f7070cb7d20", "timestamp": "", "source": "github", "line_count": 26, "max_line_length": 63, "avg_line_length": 33.19230769230769, "alnum_prop": 0.6442641946697567, "repo_name": "TsarN/pysistem", "id": "06bd5b68aa5b5c94d5e967a5d5901d3a95147a6a", "size": "888", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "pysistem/checkers/decorators.py", "mode": "33188", "license": "mit", "language": [ { "name": "C++", "bytes": "112692" }, { "name": "CSS", "bytes": "7381" }, { "name": "HTML", "bytes": "90904" }, { "name": "JavaScript", "bytes": "904" }, { "name": "Mako", "bytes": "412" }, { "name": "Pascal", "bytes": "51054" }, { "name": "Python", "bytes": "247159" } ], "symlink_target": "" }
from novaclient import exceptions from novaclient.tests import utils from novaclient.tests.v1_1 import fakes from novaclient.v1_1 import flavors class FlavorsTest(utils.TestCase): def setUp(self): super(FlavorsTest, self).setUp() self.cs = self._get_fake_client() self.flavor_type = self._get_flavor_type() def _get_fake_client(self): return fakes.FakeClient() def _get_flavor_type(self): return flavors.Flavor def test_list_flavors(self): fl = self.cs.flavors.list() self.cs.assert_called('GET', '/flavors/detail') for flavor in fl: self.assertIsInstance(flavor, self.flavor_type) def test_list_flavors_undetailed(self): fl = self.cs.flavors.list(detailed=False) self.cs.assert_called('GET', '/flavors') for flavor in fl: self.assertIsInstance(flavor, self.flavor_type) def test_list_flavors_is_public_none(self): fl = self.cs.flavors.list(is_public=None) self.cs.assert_called('GET', '/flavors/detail?is_public=None') for flavor in fl: self.assertIsInstance(flavor, self.flavor_type) def test_list_flavors_is_public_false(self): fl = self.cs.flavors.list(is_public=False) self.cs.assert_called('GET', '/flavors/detail?is_public=False') for flavor in fl: self.assertIsInstance(flavor, self.flavor_type) def test_list_flavors_is_public_true(self): fl = self.cs.flavors.list(is_public=True) self.cs.assert_called('GET', '/flavors/detail') for flavor in fl: self.assertIsInstance(flavor, self.flavor_type) def test_get_flavor_details(self): f = self.cs.flavors.get(1) self.cs.assert_called('GET', '/flavors/1') self.assertIsInstance(f, self.flavor_type) self.assertEqual(f.ram, 256) self.assertEqual(f.disk, 10) self.assertEqual(f.ephemeral, 10) self.assertEqual(f.is_public, True) def test_get_flavor_details_alphanum_id(self): f = self.cs.flavors.get('aa1') self.cs.assert_called('GET', '/flavors/aa1') self.assertIsInstance(f, self.flavor_type) self.assertEqual(f.ram, 128) self.assertEqual(f.disk, 0) self.assertEqual(f.ephemeral, 0) self.assertEqual(f.is_public, True) def test_get_flavor_details_diablo(self): f = self.cs.flavors.get(3) self.cs.assert_called('GET', '/flavors/3') self.assertIsInstance(f, self.flavor_type) self.assertEqual(f.ram, 256) self.assertEqual(f.disk, 10) self.assertEqual(f.ephemeral, 'N/A') self.assertEqual(f.is_public, 'N/A') def test_find(self): f = self.cs.flavors.find(ram=256) self.cs.assert_called('GET', '/flavors/detail') self.assertEqual(f.name, '256 MB Server') f = self.cs.flavors.find(disk=0) self.assertEqual(f.name, '128 MB Server') self.assertRaises(exceptions.NotFound, self.cs.flavors.find, disk=12345) def _create_body(self, name, ram, vcpus, disk, ephemeral, id, swap, rxtx_factor, is_public): return { "flavor": { "name": name, "ram": ram, "vcpus": vcpus, "disk": disk, "OS-FLV-EXT-DATA:ephemeral": ephemeral, "id": id, "swap": swap, "rxtx_factor": rxtx_factor, "os-flavor-access:is_public": is_public, } } def test_create(self): f = self.cs.flavors.create("flavorcreate", 512, 1, 10, 1234, ephemeral=10, is_public=False) body = self._create_body("flavorcreate", 512, 1, 10, 10, 1234, 0, 1.0, False) self.cs.assert_called('POST', '/flavors', body) self.assertIsInstance(f, self.flavor_type) def test_create_with_id_as_string(self): flavor_id = 'foobar' f = self.cs.flavors.create("flavorcreate", 512, 1, 10, flavor_id, ephemeral=10, is_public=False) body = self._create_body("flavorcreate", 512, 1, 10, 10, flavor_id, 0, 1.0, False) self.cs.assert_called('POST', '/flavors', body) self.assertIsInstance(f, self.flavor_type) def test_create_ephemeral_ispublic_defaults(self): f = self.cs.flavors.create("flavorcreate", 512, 1, 10, 1234) body = self._create_body("flavorcreate", 512, 1, 10, 0, 1234, 0, 1.0, True) self.cs.assert_called('POST', '/flavors', body) self.assertIsInstance(f, self.flavor_type) def test_invalid_parameters_create(self): self.assertRaises(exceptions.CommandError, self.cs.flavors.create, "flavorcreate", "invalid", 1, 10, 1234, swap=0, ephemeral=0, rxtx_factor=1.0, is_public=True) self.assertRaises(exceptions.CommandError, self.cs.flavors.create, "flavorcreate", 512, "invalid", 10, 1234, swap=0, ephemeral=0, rxtx_factor=1.0, is_public=True) self.assertRaises(exceptions.CommandError, self.cs.flavors.create, "flavorcreate", 512, 1, "invalid", 1234, swap=0, ephemeral=0, rxtx_factor=1.0, is_public=True) self.assertRaises(exceptions.CommandError, self.cs.flavors.create, "flavorcreate", 512, 1, 10, 1234, swap="invalid", ephemeral=0, rxtx_factor=1.0, is_public=True) self.assertRaises(exceptions.CommandError, self.cs.flavors.create, "flavorcreate", 512, 1, 10, 1234, swap=0, ephemeral="invalid", rxtx_factor=1.0, is_public=True) self.assertRaises(exceptions.CommandError, self.cs.flavors.create, "flavorcreate", 512, 1, 10, 1234, swap=0, ephemeral=0, rxtx_factor="invalid", is_public=True) self.assertRaises(exceptions.CommandError, self.cs.flavors.create, "flavorcreate", 512, 1, 10, 1234, swap=0, ephemeral=0, rxtx_factor=1.0, is_public='invalid') def test_delete(self): self.cs.flavors.delete("flavordelete") self.cs.assert_called('DELETE', '/flavors/flavordelete') def test_delete_with_flavor_instance(self): f = self.cs.flavors.get(2) self.cs.flavors.delete(f) self.cs.assert_called('DELETE', '/flavors/2') def test_delete_with_flavor_instance_method(self): f = self.cs.flavors.get(2) f.delete() self.cs.assert_called('DELETE', '/flavors/2') def test_set_keys(self): f = self.cs.flavors.get(1) f.set_keys({'k1': 'v1'}) self.cs.assert_called('POST', '/flavors/1/os-extra_specs', {"extra_specs": {'k1': 'v1'}}) def test_set_with_valid_keys(self): valid_keys = ['key4', 'month.price', 'I-Am:AK-ey.44-', 'key with spaces and _'] f = self.cs.flavors.get(4) for key in valid_keys: f.set_keys({key: 'v4'}) self.cs.assert_called('POST', '/flavors/4/os-extra_specs', {"extra_specs": {key: 'v4'}}) def test_set_with_invalid_keys(self): invalid_keys = ['/1', '?1', '%1', '<', '>'] f = self.cs.flavors.get(1) for key in invalid_keys: self.assertRaises(exceptions.CommandError, f.set_keys, {key: 'v1'}) def test_unset_keys(self): f = self.cs.flavors.get(1) f.unset_keys(['k1']) self.cs.assert_called('DELETE', '/flavors/1/os-extra_specs/k1')
{ "content_hash": "ef63205bad33403e3d0c9d61c859a693", "timestamp": "", "source": "github", "line_count": 197, "max_line_length": 79, "avg_line_length": 40.13705583756345, "alnum_prop": 0.5662071582142405, "repo_name": "metacloud/python-novaclient", "id": "c18305401a56c1fb2bf707b352d7a8165346342f", "size": "8537", "binary": false, "copies": "1", "ref": "refs/heads/mc/2013.1.7", "path": "novaclient/tests/v1_1/test_flavors.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "1060810" }, { "name": "Shell", "bytes": "4466" } ], "symlink_target": "" }
""" iven two integers: L and R, find the maximal values of A xor B given, L <= A <= B <= R Input Format The input contains two lines, L is present in the first line. R in the second line. Constraints 1 <= L <= R <= 103 Output Format The maximal value as mentioned in the problem statement. """ __author__ = 'Danyang' class Solution(object): def solve(self, cipher): """ main solution function :param cipher: the cipher """ global_max = -1 A, B = cipher for i in xrange(A, B + 1): for j in xrange(i + 1, B + 1): global_max = max(global_max, i ^ j) return global_max if __name__ == "__main__": import sys # f = open("1.in", "r") f = sys.stdin A = int(f.readline().strip()) B = int(f.readline().strip()) cipher = (A, B) s = "%s\n" % (Solution().solve(cipher)) print s,
{ "content_hash": "659f608f10bddb841e8095303d0d225c", "timestamp": "", "source": "github", "line_count": 42, "max_line_length": 61, "avg_line_length": 21.5, "alnum_prop": 0.5459579180509413, "repo_name": "algorhythms/HackerRankAlgorithms", "id": "cb424ebc0e6990ea5a21e9c2c3da0c1a337728ae", "size": "903", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Maximizing XOR.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C++", "bytes": "5450" }, { "name": "Java", "bytes": "2566" }, { "name": "Python", "bytes": "208997" } ], "symlink_target": "" }
from __future__ import absolute_import from __future__ import print_function import os import tempfile import nose.tools as nt import nbformat import notedown simple_backtick = """ ``` code1 space_indent more code ``` text1 `` ``` code2 tab_indent ~~~ ``` text2""" simple_tilde = """ ~~~ code1 space_indent more code ~~~ text1 `` ~~~~ code2 tab_indent ~~~ ~~~~ text2""" simple_indented = """ code1 space_indent more code text1 `` code2 tab_indent ~~~ text2""" simple_code_cells = ['code1\n space_indent\n\n\nmore code', 'code2\n tab_indent\n~~~'] # note: ipython markdown cells do not end with a newline unless # explicitly present. simple_markdown_cells = ['text1\n``', 'text2'] alt_lang = """ This is how you write a code block in another language: ```bash echo "This is bash ${BASH_VERSION}!" ``` """ alt_lang_code = '%%bash\necho "This is bash ${BASH_VERSION}!"' sample_markdown = u"""### Create IPython Notebooks from markdown This is a simple tool to convert markdown with code into an IPython Notebook. Usage: ``` notedown input.md > output.ipynb ``` It is really simple and separates your markdown into code and not code. Code goes into code cells, not-code goes into markdown cells. Installation: pip install notedown """ # Generate the sample notebook from the markdown using # # import notedown # reader = notedown.MarkdownReader() # sample_notebook = reader.reads(sample_markdown) # print nbformat.writes(sample_notebook) # # which is defined in create_json_notebook() below sample_notebook = r"""{ "cells": [ { "cell_type": "markdown", "metadata": {}, "source": [ "### Create IPython Notebooks from markdown\n", "\n", "This is a simple tool to convert markdown with code into an IPython\n", "Notebook.\n", "\n", "Usage:" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "notedown input.md > output.ipynb" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "It is really simple and separates your markdown into code and not\n", "code. Code goes into code cells, not-code goes into markdown cells.\n", "\n", "Installation:" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "pip install notedown" ] } ], "metadata": {}, "nbformat": 4, "nbformat_minor": 2 }""" roundtrip_markdown = u"""## A roundtrip test Here is a code cell: ```python a = 1 ``` and here is another one: ```python b = 2 ``` """ attribute_markdown = u"""Attribute test ```lang code1 ``` ```{.attr} code2 ``` ``` {.attr} code3 ``` """ ref_attributes = ['lang', r'{.attr}', r'{.attr}'] def create_json_notebook(markdown): reader = notedown.MarkdownReader() notebook = reader.reads(markdown) json_notebook = nbformat.writes(notebook) return json_notebook def test_notedown(): """Integration test the whole thing.""" from difflib import ndiff notebook = create_json_notebook(sample_markdown) diff = ndiff(sample_notebook.splitlines(1), notebook.splitlines(1)) print('\n'.join(diff)) nt.assert_multi_line_equal(create_json_notebook(sample_markdown), sample_notebook) def parse_cells(text, regex=None): reader = notedown.MarkdownReader(code_regex=regex) return reader.parse_blocks(text) def separate_code_cells(cells): codetype = notedown.MarkdownReader.code code_cells = [c['content'] for c in cells if c['type'] == codetype] return code_cells def separate_markdown_cells(cells): markdowntype = notedown.MarkdownReader.markdown markdown_cells = [c['content'] for c in cells if c['type'] == markdowntype] return markdown_cells def test_parse_gfm(): """Test with GFM code blocks.""" all_cells = parse_cells(simple_backtick, 'fenced') code_cells = separate_code_cells(all_cells) markdown_cells = separate_markdown_cells(all_cells) print("out: ", code_cells) print("ref: ", simple_code_cells) print("out: ", markdown_cells) print("ref: ", simple_markdown_cells) assert(code_cells == simple_code_cells) assert(markdown_cells == simple_markdown_cells) def test_parse_tilde(): """Test with ~~~ delimited code blocks.""" all_cells = parse_cells(simple_tilde, 'fenced') code_cells = separate_code_cells(all_cells) markdown_cells = separate_markdown_cells(all_cells) assert(code_cells == simple_code_cells) assert(markdown_cells == simple_markdown_cells) def test_parse_indented(): """Test with indented code blocks.""" all_cells = parse_cells(simple_indented, 'indented') code_cells = separate_code_cells(all_cells) markdown_cells = separate_markdown_cells(all_cells) print("out: ", code_cells) print("ref: ", simple_code_cells) print("out: ", markdown_cells) print("ref: ", simple_markdown_cells) assert(code_cells == simple_code_cells) assert(markdown_cells == simple_markdown_cells) def test_alt_lang(): """Specifying a language that isn't python should generate code blocks using %%language magic.""" reader = notedown.MarkdownReader(code_regex='fenced') all_blocks = reader.parse_blocks(alt_lang) code_blocks = [b for b in all_blocks if b['type'] == reader.code] magic_block = code_blocks[0] reader.process_code_block(magic_block) assert(magic_block['content'] == alt_lang_code) def test_format_agnostic(): """Test whether we can process markdown with either fenced or indented blocks.""" fenced_cells = parse_cells(simple_backtick, None) indented_cells = parse_cells(simple_indented, None) fenced_code_cells = separate_code_cells(fenced_cells) indented_code_cells = separate_code_cells(indented_cells) fenced_markdown_cells = separate_markdown_cells(fenced_cells) indented_markdown_cells = separate_markdown_cells(indented_cells) assert(fenced_code_cells == indented_code_cells) assert(fenced_markdown_cells == indented_markdown_cells) def test_attributes(): """Are code block attributes correctly parsed?""" cells = parse_cells(attribute_markdown) attributes = [cell['attributes'] for cell in cells if cell['type'] == 'code'] for attr, ref in zip(attributes, ref_attributes): assert attr == ref def test_pre_process_text(): """test the stripping of blank lines""" block = {} ref = "\t \n\n \t\n\ntext \t \n\n\n" block['content'] = ref notedown.MarkdownReader.pre_process_text_block(block) expected = "text" print("---") print("in: ") print(ref) print("---") print("out: ") print(block['content']) print("---") print("expected: ") print(expected) print("---") assert(block['content'] == expected) def test_roundtrip(): """Run nbconvert using our custom markdown template to recover original markdown from a notebook. """ # create a notebook from the markdown mr = notedown.MarkdownReader() roundtrip_notebook = mr.to_notebook(roundtrip_markdown) # write the notebook into json notebook_json = nbformat.writes(roundtrip_notebook) # write the json back into notebook notebook = nbformat.reads(notebook_json, as_version=4) # convert notebook to markdown mw = notedown.MarkdownWriter(template_file='notedown/templates/markdown.tpl', strip_outputs=True) markdown = mw.writes(notebook) nt.assert_multi_line_equal(roundtrip_markdown, markdown) def test_template_load_absolute(): """Load a template from an absolute path. IPython 3 requires a relative path in a child directory. """ template_abspath = os.path.abspath('notedown/templates/markdown.tpl') writer = notedown.MarkdownWriter(template_file=template_abspath) import jinja2 assert(isinstance(writer.exporter.template, jinja2.Template)) def test_template_load_nonchild(): """Load a template from a non-child directory. IPython 3 requires a relative path in a child directory. """ temp = tempfile.NamedTemporaryFile(delete=False, mode='w+t') template_path = 'notedown/templates/markdown.tpl' with open(template_path, 'rt') as source: temp.write(source.read()) temp.close() writer = notedown.MarkdownWriter(template_file=temp.name) import jinja2 assert(isinstance(writer.exporter.template, jinja2.Template)) os.remove(temp.name) def test_markdown_markdown(): mr = notedown.MarkdownReader() mw = notedown.MarkdownWriter(notedown.markdown_template) nb = mr.reads(roundtrip_markdown) markdown = mw.writes(nb) nt.assert_multi_line_equal(markdown, roundtrip_markdown) def test_R(): """Check that the R notebook generated from Rmd looks the same as the reference (without output cells). """ knitr = notedown.Knitr() with open('r-examples/r-example.Rmd') as rmd: knitted_markdown_file = knitr.knit(rmd) reader = notedown.MarkdownReader(precode=r"%load_ext rpy2.ipython", magic=True) notebook = reader.read(knitted_markdown_file) with open('r-examples/r-example.ipynb') as f: reference_notebook = nbformat.read(f, as_version=4) notedown.main.strip(notebook) notedown.main.strip(reference_notebook) writer = nbformat nbjson = writer.writes(notebook) reference_nbjson = writer.writes(reference_notebook) nt.assert_multi_line_equal(nbjson, reference_nbjson) def test_match_fenced(): mr = notedown.MarkdownReader(match='fenced') nb = mr.to_notebook(sample_markdown) assert(nb.cells[1]['cell_type'] == 'code') assert(nb.cells[3]['cell_type'] == 'markdown') def test_match_arbitrary(): mr = notedown.MarkdownReader(match='attr') nb = mr.to_notebook(attribute_markdown) assert(nb.cells[0]['cell_type'] == 'markdown') assert(nb.cells[2]['cell_type'] == 'code') assert(nb.cells[3]['cell_type'] == 'code') class TestCommandLine(object): @property def default_args(self): parser = notedown.main.command_line_parser() return parser.parse_args() def run(self, args): notedown.main.main(args) def test_basic(self): args = self.default_args args.input_file = 'example.md' self.run(args) def test_reverse(self): args = self.default_args args.input_file = 'example.ipynb' self.run(args) def test_markdown_to_notebook(self): args = self.default_args args.input_file = 'example.md' args.informat = 'markdown' args.outformat = 'notebook' self.run(args) def test_markdown_to_markdown(self): args = self.default_args args.input_file = 'example.md' args.informat = 'markdown' args.outformat = 'markdown' self.run(args) def test_notebook_to_markdown(self): args = self.default_args args.input_file = 'example.ipynb' args.informat = 'notebook' args.outformat = 'markdown' self.run(args) def test_notebook_to_notebook(self): args = self.default_args args.input_file = 'example.ipynb' args.informat = 'notebook' args.outformat = 'notebook' self.run(args)
{ "content_hash": "98916c6f5e0fee20a1f0c48118f98709", "timestamp": "", "source": "github", "line_count": 480, "max_line_length": 101, "avg_line_length": 23.764583333333334, "alnum_prop": 0.6527570789865872, "repo_name": "aaren/notedown", "id": "777a5af876c0f406acf0ad4875c1376ad5fe2803", "size": "11407", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tests.py", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "Jupyter Notebook", "bytes": "100425" }, { "name": "Python", "bytes": "51586" }, { "name": "Shell", "bytes": "459" }, { "name": "Smarty", "bytes": "2313" } ], "symlink_target": "" }
"""The main module of the QA Dashboard.""" import json import time import datetime import os import sys import requests import csv import shutil import re from coreapi import * from jobsapi import * from configuration import * from results import * from html_generator import * from perf_tests import * from smoke_tests import * from sla import * from ci_jobs import * from cliargs import * from config import * from repositories import * from progress_bar import * from source_files import * from unit_tests import * def check_environment_variable(env_var_name): """Check if the given environment variable exists.""" print("Checking: {e} environment variable existence".format( e=env_var_name)) if env_var_name not in os.environ: print("Fatal: {e} environment variable has to be specified" .format(e=env_var_name)) sys.exit(1) else: print(" ok") def check_environment_variables(): """Check if all required environment variables exist.""" environment_variables = [ "F8A_API_URL_STAGE", "F8A_API_URL_PROD", "F8A_JOB_API_URL_STAGE", "F8A_JOB_API_URL_PROD", "RECOMMENDER_API_TOKEN_STAGE", "RECOMMENDER_API_TOKEN_PROD", "JOB_API_TOKEN_STAGE", "JOB_API_TOKEN_PROD", "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "S3_REGION_NAME"] for environment_variable in environment_variables: check_environment_variable(environment_variable) def check_system(core_api, jobs_api): """Check if all system endpoints are available and that tokens are valid.""" # try to access system endpoints print("Checking: core API and JOBS API endpoints") core_api_available = core_api.is_api_running() jobs_api_available = jobs_api.is_api_running() if core_api_available and jobs_api_available: print(" ok") else: print(" Fatal: tested system is not available") # check the authorization token for the core API print("Checking: authorization token for the core API") core_api_auth_token = core_api.check_auth_token_validity() if core_api_auth_token: print(" ok") else: print(" error") # check the authorization token for the jobs API print("Checking: authorization token for the jobs API") jobs_api_auth_token = jobs_api.check_auth_token_validity() if jobs_api_auth_token: print(" ok") else: print(" error") return {"core_api_available": core_api_available, "jobs_api_available": jobs_api_available, "core_api_auth_token": core_api_auth_token, "jobs_api_auth_token": jobs_api_auth_token} # files that are to be ignored by Pylint ignored_files_for_pylint = { } # files that are to be ignored by Pydocchecker ignored_files_for_pydocstyle = { "fabric8-analytics-worker": ["tests/data/license/license.py"] } ci_job_types = [ "test_job", "build_job", "pylint_job", "pydoc_job" ] teams = [ "core", "integration" ] JENKINS_URL = "https://ci.centos.org" JOBS_STATUSES_FILENAME = "jobs.json" def is_repository_cloned(repository): """Check if the directory with cloned repository exist.""" return os.path.isdir(repository) def clone_repository(repository): """Clone the selected repository.""" print("Cloning the repository {repository}".format(repository=repository)) prefix = "https://github.com/fabric8-analytics" command = "git clone --single-branch --depth 1 {prefix}/{repo}.git".format(prefix=prefix, repo=repository) os.system(command) def fetch_repository(repository): """Fetch the selected repository.""" print("Fetching changes from the repository {repository}".format(repository=repository)) command = "pushd {repository}; git fetch; popd".format(repository=repository) os.system(command) def clone_or_fetch_repository(repository): """Clone or fetch the selected repository.""" if is_repository_cloned(repository): fetch_repository(repository) else: clone_repository(repository) def run_pylint(repository): """Run Pylint checker against the selected repository.""" command = "pushd {repo};./run-linter.sh > ../{repo}.linter.txt;popd".format(repo=repository) os.system(command) def run_docstyle_check(repository): """Run PyDocsStyle checker against the selected repository.""" command = "pushd {repo};./check-docstyle.sh > ../{repo}.pydocstyle.txt;popd".format( repo=repository) os.system(command) def percentage(part1, part2): """Compute percentage of failed tests.""" total = part1 + part2 if total == 0: return "0" perc = 100.0 * part2 / total return "{:.0f}".format(perc) def parse_linter_results(filename): """Parse results generated by Python linter or by PyDocStyle.""" source = None files = {} passed = 0 failed = 0 total = 0 with open(filename) as fin: for line in fin: line = line.rstrip() if line.endswith(".py"): source = line.strip() if line.endswith(" Pass"): if source: passed += 1 total += 1 files[source] = True if line.endswith(" Fail"): if source: failed += 1 total += 1 files[source] = False return {"files": files, "total": total, "passed": passed, "failed": failed, "passed%": percentage(failed, passed), "failed%": percentage(passed, failed), "progress_bar_class": progress_bar_class(percentage(failed, passed)), "progress_bar_width": progress_bar_width(percentage(failed, passed))} def parse_pylint_results(repository): """Parse results generated by Python linter.""" return parse_linter_results(repository + ".linter.txt") def parse_docstyle_results(repository): """Parse results generated by PyDocStyle.""" return parse_linter_results(repository + ".pydocstyle.txt") def update_overall_status(results, repository): """Update the overall status of all tested systems (stage, prod).""" remarks = "" source_files = results.source_files[repository]["count"] linter_checks = results.repo_linter_checks[repository] docstyle_checks = results.repo_docstyle_checks[repository] unit_test_coverage = results.unit_test_coverage[repository] linter_checks_total = linter_checks["total"] docstyle_checks_total = docstyle_checks["total"] ignored_pylint_files = len(ignored_files_for_pylint.get(repository, [])) ignored_pydocstyle_files = len(ignored_files_for_pydocstyle.get(repository, [])) status = source_files == (linter_checks_total + ignored_pylint_files) and \ source_files == (docstyle_checks_total + ignored_pydocstyle_files) and \ linter_checks["failed"] == 0 and docstyle_checks["failed"] == 0 and \ unit_test_coverage_ok(unit_test_coverage) if source_files != linter_checks_total + ignored_pylint_files: remarks += "not all source files are checked by linter<br>" if source_files != docstyle_checks_total + ignored_pydocstyle_files: remarks += "not all source files are checked by pydocstyle<br>" if linter_checks_total + ignored_pylint_files != \ docstyle_checks_total + ignored_pydocstyle_files: remarks += ", linter checked {n1} files, but pydocstyle checked {n2} files".format( n1=linter_checks_total, n2=docstyle_checks_total) if unit_test_coverage is not None: if not unit_test_coverage_ok(unit_test_coverage): remarks += "improve code coverage<br>" else: remarks += "unit tests has not been setup<br>" if linter_checks["failed"] != 0: remarks += "linter failed<br>" if docstyle_checks["failed"] != 0: remarks += "pydocstyle check failed<br>" if ignored_pylint_files: remarks += "{n} file{s} ignored by pylint<br>".format( n=ignored_pylint_files, s="s" if ignored_pylint_files > 1 else "") if ignored_pydocstyle_files: remarks += "{n} file{s} ignored by pydocstyle<br>".format( n=ignored_pydocstyle_files, s="s" if ignored_pydocstyle_files > 1 else "") results.overall_status[repository] = status results.remarks[repository] = remarks def delete_work_files(repository): """Cleanup the CWD from the work files used to analyze given repository.""" os.remove("{repo}.count".format(repo=repository)) os.remove("{repo}.linter.txt".format(repo=repository)) os.remove("{repo}.pydocstyle.txt".format(repo=repository)) def cleanup_repository(repository): """Cleanup the directory with the clone of specified repository.""" # let's do very basic check that the repository is really local dir if '/' not in repository: print("cleanup " + repository) shutil.rmtree(repository, ignore_errors=True) def export_into_csv(results): """Export the results into CSV file.""" record = [ datetime.date.today().strftime("%Y-%m-%d"), int(results.stage["core_api_available"]), int(results.stage["jobs_api_available"]), int(results.stage["core_api_auth_token"]), int(results.stage["jobs_api_auth_token"]), int(results.production["core_api_available"]), int(results.production["jobs_api_available"]), int(results.production["core_api_auth_token"]), int(results.production["jobs_api_auth_token"]) ] for repository in repositories: record.append(results.source_files[repository]["count"]) record.append(results.source_files[repository]["total_lines"]) record.append(results.repo_linter_checks[repository]["total"]) record.append(results.repo_linter_checks[repository]["passed"]) record.append(results.repo_linter_checks[repository]["failed"]) record.append(results.repo_docstyle_checks[repository]["total"]) record.append(results.repo_docstyle_checks[repository]["passed"]) record.append(results.repo_docstyle_checks[repository]["failed"]) with open('dashboard.csv', 'a') as fout: writer = csv.writer(fout) writer.writerow(record) def prepare_data_for_liveness_table(results, ci_jobs, job_statuses): """Prepare data for sevices liveness/readiness table on the dashboard.""" cfg = Configuration() core_api = CoreApi(cfg.stage.core_api_url, cfg.stage.core_api_token) jobs_api = JobsApi(cfg.stage.jobs_api_url, cfg.stage.jobs_api_token) results.stage = check_system(core_api, jobs_api) core_api = CoreApi(cfg.prod.core_api_url, cfg.prod.core_api_token) jobs_api = JobsApi(cfg.prod.jobs_api_url, cfg.prod.jobs_api_token) results.production = check_system(core_api, jobs_api) smoke_tests = SmokeTests(ci_jobs, job_statuses) results.smoke_tests_results = smoke_tests.results results.smoke_tests_links = smoke_tests.ci_jobs_links results.smoke_tests_statuses = smoke_tests.ci_jobs_statuses def prepare_data_for_sla_table(results): """Prepare data for SLA table on the dashboard.""" perf_tests = PerfTests() perf_tests.read_results() perf_tests.compute_statistic() results.perf_tests_results = perf_tests.results results.perf_tests_statistic = perf_tests.statistic results.sla_thresholds = SLA def prepare_data_for_repositories(repositories, results, ci_jobs, job_statuses, clone_repositories_enabled, cleanup_repositories_enabled, code_quality_table_enabled, ci_jobs_table_enabled): """Perform clone/fetch repositories + run pylint + run docstyle script + accumulate results.""" for repository in repositories: # clone or fetch the repository if the cloning/fetching is not disabled via CLI arguments if clone_repositories_enabled: clone_or_fetch_repository(repository) if code_quality_table_enabled: run_pylint(repository) run_docstyle_check(repository) results.source_files[repository] = get_source_files(repository) results.repo_linter_checks[repository] = parse_pylint_results(repository) results.repo_docstyle_checks[repository] = parse_docstyle_results(repository) # delete_work_files(repository) if cleanup_repositories_enabled: cleanup_repository(repository) if ci_jobs_table_enabled: for job_type in ci_job_types: url = ci_jobs.get_job_url(repository, job_type) name = ci_jobs.get_job_name(repository, job_type) job_status = job_statuses.get(name) results.ci_jobs_links[repository][job_type] = url results.ci_jobs_statuses[repository][job_type] = job_status results.unit_test_coverage[repository] = read_unit_test_coverage(ci_jobs, JENKINS_URL, repository) if code_quality_table_enabled: update_overall_status(results, repository) def read_jobs_statuses(filename): """Deserialize statuses for all jobs from the JSON file.""" with open(filename) as fin: return json.load(fin)["jobs"] def store_jobs_statuses(filename, data): """Serialize statuses of all jobs into the JSON file.""" with open(filename, "w") as fout: fout.write(data) def jenkins_api_query_job_statuses(jenkins_url): """Construct API query to Jenkins (CI).""" return "{url}/api/json?tree=jobs[name,color]".format(url=jenkins_url) def jenkins_api_query_build_statuses(jenkins_url): """Construct API query to Jenkins (CI).""" return "{url}/api/json?tree=builds[result]".format(url=jenkins_url) def jobs_as_dict(raw_jobs): """Construct a dictionary with job name as key and job status as value.""" return dict((job["name"], job["color"]) for job in raw_jobs if "color" in job) def read_ci_jobs_statuses(jenkins_url): """Read statuses of all jobs from the Jenkins (CI).""" api_query = jenkins_api_query_job_statuses(jenkins_url) response = requests.get(api_query) raw_jobs = response.json()["jobs"] # for debugging purposes only # store_jobs_statuses(JOBS_STATUSES_FILENAME, response.text) # raw_jobs = read_jobs_statuses(JOBS_STATUSES_FILENAME) return jobs_as_dict(raw_jobs) def read_job_statuses(ci_jobs, ci_jobs_table_enabled, liveness_table_enabled): """Read job statuses from the CI, but only if its necessary.""" if ci_jobs_table_enabled or liveness_table_enabled: return read_ci_jobs_statuses(JENKINS_URL) else: return None def production_smoketests_status(ci_jobs): """Read total number of remembered builds and succeeded builds as well.""" job_url = ci_jobs.get_job_url("production", "smoketests") api_query = jenkins_api_query_build_statuses(job_url) response = requests.get(api_query) builds = response.json()["builds"] total_builds = [b for b in builds if b["result"] is not None] success_builds = [b for b in builds if b["result"] == "SUCCESS"] return len(total_builds), len(success_builds) def main(): """Entry point to the QA Dashboard.""" config = Config() cli_arguments = cli_parser.parse_args() # some CLI arguments are used to DISABLE given feature of the dashboard, # but let's not use double negation everywhere :) ci_jobs_table_enabled = not cli_arguments.disable_ci_jobs code_quality_table_enabled = not cli_arguments.disable_code_quality liveness_table_enabled = not cli_arguments.disable_liveness sla_table_enabled = not cli_arguments.disable_sla clone_repositories_enabled = cli_arguments.clone_repositories cleanup_repositories_enabled = cli_arguments.cleanup_repositories check_environment_variables() results = Results() # list of repositories to check results.repositories = repositories # we need to know which tables are enabled or disabled to proper process the template results.sla_table_enabled = sla_table_enabled results.liveness_table_enabled = liveness_table_enabled results.code_quality_table_enabled = code_quality_table_enabled results.ci_jobs_table_enabled = ci_jobs_table_enabled results.teams = teams results.sprint = config.get_sprint() print("Sprint: " + results.sprint) ci_jobs = CIJobs() job_statuses = read_job_statuses(ci_jobs, ci_jobs_table_enabled, liveness_table_enabled) results.smoke_tests_total_builds, results.smoke_tests_success_builds = \ production_smoketests_status(ci_jobs) for team in teams: results.issues_list_url[team] = config.get_list_of_issues_url(team) if liveness_table_enabled: prepare_data_for_liveness_table(results, ci_jobs, job_statuses) prepare_data_for_repositories(repositories, results, ci_jobs, job_statuses, clone_repositories_enabled, cleanup_repositories_enabled, code_quality_table_enabled, ci_jobs_table_enabled) if sla_table_enabled: prepare_data_for_sla_table(results) if code_quality_table_enabled and liveness_table_enabled: export_into_csv(results) generate_dashboard(results, ignored_files_for_pylint, ignored_files_for_pydocstyle) if __name__ == "__main__": # execute only if run as a script main()
{ "content_hash": "f19059cc5dccfcee39a08c048aa68596", "timestamp": "", "source": "github", "line_count": 496, "max_line_length": 99, "avg_line_length": 35.72379032258065, "alnum_prop": 0.6569219481912072, "repo_name": "jpopelka/fabric8-analytics-common", "id": "eefd9ebdf2ebd1618692cee3a187c398434942d4", "size": "17719", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "dashboard/src/dashboard.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "998" }, { "name": "Gherkin", "bytes": "140658" }, { "name": "HTML", "bytes": "25307" }, { "name": "Python", "bytes": "354439" }, { "name": "Shell", "bytes": "9619" } ], "symlink_target": "" }
"""Implementation of in-memory backend.""" import copy import logging import threading from taskflow import decorators from taskflow import exceptions as exc from taskflow.openstack.common import timeutils from taskflow.persistence.backends import base from taskflow.utils import persistence_utils as p_utils LOG = logging.getLogger(__name__) # TODO(harlowja): we likely need to figure out a better place to put these # rather than globals. _LOG_BOOKS = {} _FLOW_DETAILS = {} _TASK_DETAILS = {} # For now this will be a pretty big lock, since it is not expected that saves # will be that frequent this seems ok for the time being. I imagine that this # can be done better but it will require much more careful usage of a dict as # a key/value map. Aka I wish python had a concurrent dict that was safe and # known good to use. _SAVE_LOCK = threading.RLock() _READ_LOCK = threading.RLock() _READ_SAVE_ORDER = (_READ_LOCK, _SAVE_LOCK) def _copy(obj): return copy.deepcopy(obj) class MemoryBackend(base.Backend): def get_connection(self): return Connection(self) def close(self): pass class Connection(base.Connection): def __init__(self, backend): self._read_lock = _READ_LOCK self._save_locks = _READ_SAVE_ORDER self._backend = backend def upgrade(self): pass @property def backend(self): return self._backend def close(self): pass @decorators.locked(lock="_save_locks") def clear_all(self): count = 0 for uuid in list(_LOG_BOOKS.iterkeys()): self.destroy_logbook(uuid) count += 1 return count @decorators.locked(lock="_save_locks") def destroy_logbook(self, book_uuid): try: # Do the same cascading delete that the sql layer does. lb = _LOG_BOOKS.pop(book_uuid) for fd in lb: _FLOW_DETAILS.pop(fd.uuid, None) for td in fd: _TASK_DETAILS.pop(td.uuid, None) except KeyError: raise exc.NotFound("No logbook found with id: %s" % book_uuid) @decorators.locked(lock="_save_locks") def update_task_details(self, task_detail): try: return p_utils.task_details_merge(_TASK_DETAILS[task_detail.uuid], task_detail) except KeyError: raise exc.NotFound("No task details found with id: %s" % task_detail.uuid) @decorators.locked(lock="_save_locks") def update_flow_details(self, flow_detail): try: e_fd = p_utils.flow_details_merge(_FLOW_DETAILS[flow_detail.uuid], flow_detail) for task_detail in flow_detail: if e_fd.find(task_detail.uuid) is None: _TASK_DETAILS[task_detail.uuid] = _copy(task_detail) e_fd.add(task_detail) if task_detail.uuid not in _TASK_DETAILS: _TASK_DETAILS[task_detail.uuid] = _copy(task_detail) task_detail.update(self.update_task_details(task_detail)) return e_fd except KeyError: raise exc.NotFound("No flow details found with id: %s" % flow_detail.uuid) @decorators.locked(lock="_save_locks") def save_logbook(self, book): # Get a existing logbook model (or create it if it isn't there). try: e_lb = p_utils.logbook_merge(_LOG_BOOKS[book.uuid], book) # Add anything in to the new logbook that isn't already # in the existing logbook. for flow_detail in book: if e_lb.find(flow_detail.uuid) is None: _FLOW_DETAILS[flow_detail.uuid] = _copy(flow_detail) e_lb.add(flow_detail) if flow_detail.uuid not in _FLOW_DETAILS: _FLOW_DETAILS[flow_detail.uuid] = _copy(flow_detail) flow_detail.update(self.update_flow_details(flow_detail)) # TODO(harlowja): figure out a better way to set this property # without actually setting a 'private' property. e_lb._updated_at = timeutils.utcnow() except KeyError: # Ok the one given is now the one we will save e_lb = _copy(book) # TODO(harlowja): figure out a better way to set this property # without actually setting a 'private' property. e_lb._created_at = timeutils.utcnow() # Record all the pieces as being saved. _LOG_BOOKS[e_lb.uuid] = e_lb for flow_detail in e_lb: _FLOW_DETAILS[flow_detail.uuid] = _copy(flow_detail) flow_detail.update(self.update_flow_details(flow_detail)) return e_lb @decorators.locked(lock='_read_lock') def get_logbook(self, book_uuid): try: return _LOG_BOOKS[book_uuid] except KeyError: raise exc.NotFound("No logbook found with id: %s" % book_uuid) def get_logbooks(self): # NOTE(harlowja): don't hold the lock while iterating with self._read_lock: books = list(_LOG_BOOKS.values()) for lb in books: yield lb
{ "content_hash": "e4c092be4f6f4f25ff0f074f9dcbe542", "timestamp": "", "source": "github", "line_count": 147, "max_line_length": 78, "avg_line_length": 36.23809523809524, "alnum_prop": 0.58832363431575, "repo_name": "jessicalucci/TaskManagement", "id": "6b937f4ac90fd8bf4dad9e8d392e194a358fd698", "size": "6092", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "taskflow/persistence/backends/impl_memory.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "363177" }, { "name": "Shell", "bytes": "3255" } ], "symlink_target": "" }
def split_string(string, chunk_size): """ Generator function that splits string into chunks. :param string: string to be split. :param chunk_size: size of a single chunk. :return: yields individual chunks. """ if not isinstance(string, str): raise TypeError("string parameter must be an instance of str") if not isinstance(chunk_size, int): raise TypeError("chunk_size parameter must be an instance of int") for index in range(0, len(string), chunk_size): yield string[index:index + chunk_size] def hex_string(string, byte_separator, chunk_separator, chunk_size): """ Returns HEXized string. :param string: string to be converted to hex representation. :param byte_separator: string to be inserted in-between bytes. :param chunk_separator: string to be inserted in-between chunks. :param chunk_size: integer specifying the size of a single chunk (in bytes). :return: hexadecimal representation of the string. """ if not isinstance(string, str): raise TypeError("string parameter must be an instance of str") if not isinstance(byte_separator, str): raise TypeError("byte_separator parameter must be an instance of str") if not isinstance(chunk_separator, str): raise TypeError("chunk_separator parameter must be an instance of str") if not isinstance(chunk_size, int): raise TypeError("chunk_size parameter must be an instance of int") if chunk_size < 2: raise ValueError("chunk_size must be greater than 1") return chunk_separator.join( [byte_separator.join( ["{0:02x}".format(ord(byte)) for byte in x] ) for x in split_string(string, chunk_size)] ) def safe_string(string, safe_char): """ Replaces all non-printable characters with safe_char. :param string: string to be scanned for unprintable characters. :param safe_char: a single character string to be used in place of unsafe characters. :return: safe to print string. """ if not isinstance(string, str): raise TypeError("string parameter must be an instance of str") if not isinstance(safe_char, str): raise TypeError("safe_char parameter must be an instance of str") if len(safe_char) != 1: raise ValueError("safe_char length must be 1") unsafe_char_codes = range(0, 32) + [127] return "".join([char if ord(char) not in unsafe_char_codes else safe_char for char in string])
{ "content_hash": "908ef0327c53f7b3f73d9c635fb81ab2", "timestamp": "", "source": "github", "line_count": 62, "max_line_length": 98, "avg_line_length": 40.29032258064516, "alnum_prop": 0.6809447558046438, "repo_name": "Paulius-Maruska/pyhex", "id": "6d495965d7957a754339315b14e9ce03c65727f6", "size": "2580", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "pyhex/helper.py", "mode": "33188", "license": "mit", "language": [], "symlink_target": "" }
"""The TensorBoard Text plugin.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import collections import json import textwrap # pylint: disable=g-bad-import-order # Necessary for an internal test with special behavior for numpy. import numpy as np # pylint: enable=g-bad-import-order import tensorflow as tf from werkzeug import wrappers from tensorboard import plugin_util from tensorboard.backend import http_util from tensorboard.plugins import base_plugin # The prefix of routes provided by this plugin. _PLUGIN_PREFIX_ROUTE = 'text' # HTTP routes TAGS_ROUTE = '/tags' TEXT_ROUTE = '/text' WARNING_TEMPLATE = textwrap.dedent("""\ **Warning:** This text summary contained data of dimensionality %d, but only \ 2d tables are supported. Showing a 2d slice of the data instead.""") def make_table_row(contents, tag='td'): """Given an iterable of string contents, make a table row. Args: contents: An iterable yielding strings. tag: The tag to place contents in. Defaults to 'td', you might want 'th'. Returns: A string containing the content strings, organized into a table row. Example: make_table_row(['one', 'two', 'three']) == ''' <tr> <td>one</td> <td>two</td> <td>three</td> </tr>''' """ columns = ('<%s>%s</%s>\n' % (tag, s, tag) for s in contents) return '<tr>\n' + ''.join(columns) + '</tr>\n' def make_table(contents, headers=None): """Given a numpy ndarray of strings, concatenate them into a html table. Args: contents: A np.ndarray of strings. May be 1d or 2d. In the 1d case, the table is laid out vertically (i.e. row-major). headers: A np.ndarray or list of string header names for the table. Returns: A string containing all of the content strings, organized into a table. Raises: ValueError: If contents is not a np.ndarray. ValueError: If contents is not 1d or 2d. ValueError: If contents is empty. ValueError: If headers is present and not a list, tuple, or ndarray. ValueError: If headers is not 1d. ValueError: If number of elements in headers does not correspond to number of columns in contents. """ if not isinstance(contents, np.ndarray): raise ValueError('make_table contents must be a numpy ndarray') if contents.ndim not in [1, 2]: raise ValueError('make_table requires a 1d or 2d numpy array, was %dd' % contents.ndim) if headers: if isinstance(headers, (list, tuple)): headers = np.array(headers) if not isinstance(headers, np.ndarray): raise ValueError('Could not convert headers %s into np.ndarray' % headers) if headers.ndim != 1: raise ValueError('Headers must be 1d, is %dd' % headers.ndim) expected_n_columns = contents.shape[1] if contents.ndim == 2 else 1 if headers.shape[0] != expected_n_columns: raise ValueError('Number of headers %d must match number of columns %d' % (headers.shape[0], expected_n_columns)) header = '<thead>\n%s</thead>\n' % make_table_row(headers, tag='th') else: header = '' n_rows = contents.shape[0] if contents.ndim == 1: # If it's a vector, we need to wrap each element in a new list, otherwise # we would turn the string itself into a row (see test code) rows = (make_table_row([contents[i]]) for i in range(n_rows)) else: rows = (make_table_row(contents[i, :]) for i in range(n_rows)) return '<table>\n%s<tbody>\n%s</tbody>\n</table>' % (header, ''.join(rows)) def reduce_to_2d(arr): """Given a np.npdarray with nDims > 2, reduce it to 2d. It does this by selecting the zeroth coordinate for every dimension greater than two. Args: arr: a numpy ndarray of dimension at least 2. Returns: A two-dimensional subarray from the input array. Raises: ValueError: If the argument is not a numpy ndarray, or the dimensionality is too low. """ if not isinstance(arr, np.ndarray): raise ValueError('reduce_to_2d requires a numpy.ndarray') ndims = len(arr.shape) if ndims < 2: raise ValueError('reduce_to_2d requires an array of dimensionality >=2') # slice(None) is equivalent to `:`, so we take arr[0,0,...0,:,:] slices = ([0] * (ndims - 2)) + [slice(None), slice(None)] return arr[slices] def text_array_to_html(text_arr): """Take a numpy.ndarray containing strings, and convert it into html. If the ndarray contains a single scalar string, that string is converted to html via our sanitized markdown parser. If it contains an array of strings, the strings are individually converted to html and then composed into a table using make_table. If the array contains dimensionality greater than 2, all but two of the dimensions are removed, and a warning message is prefixed to the table. Args: text_arr: A numpy.ndarray containing strings. Returns: The array converted to html. """ if not text_arr.shape: # It is a scalar. No need to put it in a table, just apply markdown return plugin_util.markdown_to_safe_html( text_arr.astype(np.dtype(str)).tostring()) warning = '' if len(text_arr.shape) > 2: warning = plugin_util.markdown_to_safe_html(WARNING_TEMPLATE % len(text_arr.shape)) text_arr = reduce_to_2d(text_arr) html_arr = [plugin_util.markdown_to_safe_html(x) for x in text_arr.reshape(-1)] html_arr = np.array(html_arr).reshape(text_arr.shape) return warning + make_table(html_arr) def process_string_tensor_event(event): """Convert a TensorEvent into a JSON-compatible response.""" string_arr = tf.make_ndarray(event.tensor_proto) html = text_array_to_html(string_arr) return { 'wall_time': event.wall_time, 'step': event.step, 'text': html, } class TextPlugin(base_plugin.TBPlugin): """Text Plugin for TensorBoard.""" plugin_name = _PLUGIN_PREFIX_ROUTE def __init__(self, context): """Instantiates TextPlugin via TensorBoard core. Args: context: A base_plugin.TBContext instance. """ self._multiplexer = context.multiplexer def index_impl(self): # A previous system of collecting and serving text summaries involved # storing the tags of text summaries within tensors.json files. See if we # are currently using that system. We do not want to drop support for that # use case. run_to_series = collections.defaultdict(list) name = 'tensorboard_text' run_to_assets = self._multiplexer.PluginAssets(name) for run, assets in run_to_assets.items(): if 'tensors.json' in assets: tensors_json = self._multiplexer.RetrievePluginAsset( run, name, 'tensors.json') tensors = json.loads(tensors_json) run_to_series[run] = tensors else: run_to_series[run] = [] # TensorBoard is obtaining summaries related to the text plugin based on # SummaryMetadata stored within Value protos. mapping = self._multiplexer.PluginRunToTagToContent(_PLUGIN_PREFIX_ROUTE) # Augment the summaries created via the deprecated (plugin asset based) # method with these summaries created with the new method. When they # conflict, the summaries created via the new method overrides. for (run, tags) in mapping.items(): run_to_series[run] += tags.keys() return run_to_series @wrappers.Request.application def tags_route(self, request): # Map from run to a list of tags. response = { run: tag_listing for (run, tag_listing) in self.index_impl().items() } return http_util.Respond(request, response, 'application/json') def text_impl(self, run, tag): try: text_events = self._multiplexer.Tensors(run, tag) except KeyError: text_events = [] responses = [process_string_tensor_event(ev) for ev in text_events] return responses @wrappers.Request.application def text_route(self, request): run = request.args.get('run') tag = request.args.get('tag') response = self.text_impl(run, tag) return http_util.Respond(request, response, 'application/json') def get_plugin_apps(self): return { TAGS_ROUTE: self.tags_route, TEXT_ROUTE: self.text_route, } def is_active(self): """Determines whether this plugin is active. This plugin is only active if TensorBoard sampled any text summaries. Returns: Whether this plugin is active. """ return bool(self._multiplexer and any(self.index_impl().values()))
{ "content_hash": "213962501fac487f645ea99d095b3502", "timestamp": "", "source": "github", "line_count": 260, "max_line_length": 80, "avg_line_length": 32.965384615384615, "alnum_prop": 0.6757671216894178, "repo_name": "ioeric/tensorboard", "id": "a564cad432e29e55c167eda638deb4d62f02130f", "size": "9260", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tensorboard/plugins/text/text_plugin.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "HTML", "bytes": "727010" }, { "name": "Java", "bytes": "26959" }, { "name": "JavaScript", "bytes": "3438" }, { "name": "Protocol Buffer", "bytes": "9258" }, { "name": "Python", "bytes": "1079129" }, { "name": "Shell", "bytes": "7322" }, { "name": "TypeScript", "bytes": "834655" } ], "symlink_target": "" }
from django.core.management.base import BaseCommand from django.utils import timezone from probe.lib import pyspeedtest from probe.models import Speed, AverageQuality from datetime import datetime, timedelta from probe.lib.helpers import is_first_run def fill(): if is_first_run(): return speed_values = pyspeedtest.SpeedTest() ping = round(speed_values.ping()) download = round(speed_values.download()) upload = round(speed_values.upload()) entry = Speed(time=timezone.now(), server=speed_values.host, ping=ping, download=download, upload=upload) entry.save() def calc_qos(): if is_first_run(): return last_24 = Speed.objects.all().filter(time__gte=datetime.now() - timedelta(days=1)) avg_down = 0 avg_up = 0 for o in last_24: avg_down += o.download avg_up += o.upload avg_down /= len(last_24) avg_up /= len(last_24) qos = AverageQuality(time=timezone.now(), avg_download=avg_down, avg_upload=avg_up) qos.save() class Command(BaseCommand): def handle(self, *args, **options): fill() calc_qos()
{ "content_hash": "3e6db0024c23d1e56b972247801f71c8", "timestamp": "", "source": "github", "line_count": 41, "max_line_length": 109, "avg_line_length": 27.317073170731707, "alnum_prop": 0.6642857142857143, "repo_name": "krstnschwpwr/speedcontrol", "id": "83223eba9a1bd52b4a95c2b12cf4a2ae8dbfc15c", "size": "1120", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "probe/management/commands/fill_values.py", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "10566" }, { "name": "Python", "bytes": "39655" }, { "name": "Shell", "bytes": "5138" } ], "symlink_target": "" }
import datetime import slugify as unicodeslugify from django.conf import settings from django.contrib.auth import get_user_model from django.db import models, transaction from django.utils.translation import gettext_lazy as _ from asylum.models import AsylumModel def get_sentinel_user(): """Gets a "sentinel" user ("deleted") and for assigning as uploader""" return get_user_model().objects.get_or_create(username='deleted')[0] def datestamped_and_normalized(instance, filename): """Normalized filename and places in datestamped path""" file_parts = filename.split('.') if len(file_parts) > 1: name = '.'.join(file_parts[:-1]) ext = '.' + file_parts[-1] else: ext = '' name = filename filename_normalized = unicodeslugify.slugify( name, only_ascii=True, lower=True, spaces=False, space_replacement='_' ) + ext return datetime.datetime.now().strftime("ndaparser/%Y/%m/%d/{}").format(filename_normalized) class UploadedTransaction(AsylumModel): """Track uploaded transaction files""" user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET(get_sentinel_user)) file = models.FileField(upload_to=datestamped_and_normalized) stamp = models.DateTimeField(auto_now_add=True, editable=False) last_transaction = models.DateField() class Meta: verbose_name = _('Uploaded transaction') verbose_name_plural = _('Uploaded transaction') ordering = ['-stamp']
{ "content_hash": "81dcbb7a345dd105e698a2a9fe721d3b", "timestamp": "", "source": "github", "line_count": 43, "max_line_length": 96, "avg_line_length": 34.95348837209303, "alnum_prop": 0.6899534264803726, "repo_name": "HelsinkiHacklab/asylum", "id": "16ef60faa1357a31c66bddfd5c63737fee5f9cec", "size": "1527", "binary": false, "copies": "1", "ref": "refs/heads/hhl_changes", "path": "project/ndaparser/models.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "31215" }, { "name": "Dockerfile", "bytes": "3192" }, { "name": "HTML", "bytes": "9736" }, { "name": "JavaScript", "bytes": "2309" }, { "name": "Python", "bytes": "223215" }, { "name": "Shell", "bytes": "5899" } ], "symlink_target": "" }
''' Created on 2014-8-11 @author: wendal ''' from distutils.core import setup import py2exe setup(console=['main.py'], options={ # And now, configure py2exe by passing more options; 'py2exe': { # This is magic: if you don't add these, your .exe may # or may not work on older/newer versions of windows. "dll_excludes": [ "MSVCP90.dll", "MSWSOCK.dll", "mswsock.dll", "powrprof.dll", ], # Py2exe will not figure out that you need these on its own. # You may need one, the other, or both. 'includes': [ 'sip', 'PyQt4.QtNetwork', ], # Optional: make one big exe with everything in it, or # a folder with many things in it. Your choice # 'bundle_files': 1, } }, # Qt's dynamically loaded plugins and py2exe really don't # get along. data_files = [ ('phonon_backend', [ 'C:\Python27\Lib\site-packages\PyQt4\plugins\phonon_backend\phonon_ds94.dll' ]), ('imageplugins', [ 'c:\Python27\lib\site-packages\PyQt4\plugins\imageformats\qgif4.dll', 'c:\Python27\lib\site-packages\PyQt4\plugins\imageformats\qjpeg4.dll', 'c:\Python27\lib\site-packages\PyQt4\plugins\imageformats\qsvg4.dll', ]), ], # If you choose the bundle above, you may want to use this, too. # zipfile=None, )
{ "content_hash": "e3d567aa8d4fbc1551857ffa14eb7abb", "timestamp": "", "source": "github", "line_count": 58, "max_line_length": 92, "avg_line_length": 25.896551724137932, "alnum_prop": 0.5552596537949401, "repo_name": "wendal/yeelink_tester", "id": "b130c9b3ae4debb43a24b659c935ea4ba295c6c3", "size": "1502", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "package_py2exe.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "292344" } ], "symlink_target": "" }
from spcControl.monitor import main    main()   
{ "content_hash": "bfff833e7d7f81812dc3ab5b8029d53d", "timestamp": "", "source": "github", "line_count": 4, "max_line_length": 35, "avg_line_length": 12, "alnum_prop": 0.7291666666666666, "repo_name": "HPPTECH/hpp_IOSTressTest", "id": "095e04ee179ee642646b64df45489825a2e39f1d", "size": "52", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Refer/expect_01/monitor/__main__.py", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "5571" }, { "name": "C", "bytes": "5083" }, { "name": "CSS", "bytes": "53608" }, { "name": "HTML", "bytes": "2732176" }, { "name": "JavaScript", "bytes": "945408" }, { "name": "Makefile", "bytes": "5568" }, { "name": "Python", "bytes": "5810318" }, { "name": "Shell", "bytes": "21948" } ], "symlink_target": "" }
"""Utilities.""" import numpy as np import nibabel as nb from nipype import logging from nipype.utils.filemanip import fname_presuffix from nipype.interfaces.base import ( traits, isdefined, File, InputMultiPath, TraitedSpec, BaseInterfaceInputSpec, SimpleInterface, ) LOG = logging.getLogger("nipype.interface") class _TPM2ROIInputSpec(BaseInterfaceInputSpec): in_tpm = File( exists=True, mandatory=True, desc="Tissue probability map file in T1 space" ) in_mask = File( exists=True, mandatory=True, desc="Binary mask of skull-stripped T1w image" ) mask_erode_mm = traits.Float( xor=["mask_erode_prop"], desc="erode input mask (kernel width in mm)" ) erode_mm = traits.Float( xor=["erode_prop"], desc="erode output mask (kernel width in mm)" ) mask_erode_prop = traits.Float( xor=["mask_erode_mm"], desc="erode input mask (target volume ratio)" ) erode_prop = traits.Float( xor=["erode_mm"], desc="erode output mask (target volume ratio)" ) prob_thresh = traits.Float( 0.95, usedefault=True, desc="threshold for the tissue probability maps" ) class _TPM2ROIOutputSpec(TraitedSpec): roi_file = File(exists=True, desc="output ROI file") eroded_mask = File(exists=True, desc="resulting eroded mask") class TPM2ROI(SimpleInterface): """ Convert tissue probability maps (TPMs) into ROIs. This interface follows the following logic: #. Erode ``in_mask`` by ``mask_erode_mm`` and apply to ``in_tpm`` #. Threshold masked TPM at ``prob_thresh`` #. Erode resulting mask by ``erode_mm`` """ input_spec = _TPM2ROIInputSpec output_spec = _TPM2ROIOutputSpec def _run_interface(self, runtime): mask_erode_mm = self.inputs.mask_erode_mm if not isdefined(mask_erode_mm): mask_erode_mm = None erode_mm = self.inputs.erode_mm if not isdefined(erode_mm): erode_mm = None mask_erode_prop = self.inputs.mask_erode_prop if not isdefined(mask_erode_prop): mask_erode_prop = None erode_prop = self.inputs.erode_prop if not isdefined(erode_prop): erode_prop = None roi_file, eroded_mask = _tpm2roi( self.inputs.in_tpm, self.inputs.in_mask, mask_erode_mm, erode_mm, mask_erode_prop, erode_prop, self.inputs.prob_thresh, newpath=runtime.cwd, ) self._results["roi_file"] = roi_file self._results["eroded_mask"] = eroded_mask return runtime class _AddTPMsInputSpec(BaseInterfaceInputSpec): in_files = InputMultiPath( File(exists=True), mandatory=True, desc="input list of ROIs" ) indices = traits.List(traits.Int, desc="select specific maps") class _AddTPMsOutputSpec(TraitedSpec): out_file = File(exists=True, desc="union of binarized input files") class AddTPMs(SimpleInterface): """Calculate the union of several :abbr:`TPMs (tissue-probability maps)`.""" input_spec = _AddTPMsInputSpec output_spec = _AddTPMsOutputSpec def _run_interface(self, runtime): in_files = self.inputs.in_files indices = list(range(len(in_files))) if isdefined(self.inputs.indices): indices = self.inputs.indices if len(self.inputs.in_files) < 2: self._results["out_file"] = in_files[0] return runtime first_fname = in_files[indices[0]] if len(indices) == 1: self._results["out_file"] = first_fname return runtime im = nb.concat_images([in_files[i] for i in indices]) data = im.get_fdata().sum(axis=3) data = np.clip(data, a_min=0.0, a_max=1.0) out_file = fname_presuffix(first_fname, suffix="_tpmsum", newpath=runtime.cwd) newnii = im.__class__(data, im.affine, im.header) newnii.set_data_dtype(np.float32) # Set visualization thresholds newnii.header["cal_max"] = 1.0 newnii.header["cal_min"] = 0.0 newnii.to_filename(out_file) self._results["out_file"] = out_file return runtime def _tpm2roi( in_tpm, in_mask, mask_erosion_mm=None, erosion_mm=None, mask_erosion_prop=None, erosion_prop=None, pthres=0.95, newpath=None, ): """ Generate a mask from a tissue probability map """ import scipy.ndimage as nd tpm_img = nb.load(in_tpm) roi_mask = (tpm_img.get_fdata() >= pthres).astype(np.uint8) eroded_mask_file = None erode_in = (mask_erosion_mm is not None and mask_erosion_mm > 0) or ( mask_erosion_prop is not None and mask_erosion_prop < 1 ) if erode_in: eroded_mask_file = fname_presuffix(in_mask, suffix="_eroded", newpath=newpath) mask_img = nb.load(in_mask) mask_data = np.asanyarray(mask_img.dataobj).astype(np.uint8) if mask_erosion_mm: iter_n = max(int(mask_erosion_mm / max(mask_img.header.get_zooms())), 1) mask_data = nd.binary_erosion(mask_data, iterations=iter_n) else: orig_vol = np.sum(mask_data > 0) while np.sum(mask_data > 0) / orig_vol > mask_erosion_prop: mask_data = nd.binary_erosion(mask_data, iterations=1) # Store mask eroded = nb.Nifti1Image(mask_data, mask_img.affine, mask_img.header) eroded.set_data_dtype(np.uint8) eroded.to_filename(eroded_mask_file) # Mask TPM data (no effect if not eroded) roi_mask[~mask_data] = 0 # shrinking erode_out = (erosion_mm is not None and erosion_mm > 0) or ( erosion_prop is not None and erosion_prop < 1 ) if erode_out: if erosion_mm: iter_n = max(int(erosion_mm / max(tpm_img.header.get_zooms())), 1) iter_n = int(erosion_mm / max(tpm_img.header.get_zooms())) roi_mask = nd.binary_erosion(roi_mask, iterations=iter_n) else: orig_vol = np.sum(roi_mask > 0) while np.sum(roi_mask > 0) / orig_vol > erosion_prop: roi_mask = nd.binary_erosion(roi_mask, iterations=1) # Create image to resample roi_fname = fname_presuffix(in_tpm, suffix="_roi", newpath=newpath) roi_img = nb.Nifti1Image(roi_mask, tpm_img.affine, tpm_img.header) roi_img.set_data_dtype(np.uint8) roi_img.to_filename(roi_fname) return roi_fname, eroded_mask_file or in_mask
{ "content_hash": "d3787e0ca5bdb46fdef92a2ec34356e3", "timestamp": "", "source": "github", "line_count": 204, "max_line_length": 86, "avg_line_length": 32.05392156862745, "alnum_prop": 0.6147729010552072, "repo_name": "oesteban/niworkflows", "id": "e3ad2a71aadeaf7c47204fd5970cacf68e9978d8", "size": "7418", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "niworkflows/interfaces/probmaps.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Dockerfile", "bytes": "11035" }, { "name": "HTML", "bytes": "500" }, { "name": "Makefile", "bytes": "413" }, { "name": "Python", "bytes": "791805" }, { "name": "Shell", "bytes": "1717" }, { "name": "Smarty", "bytes": "5875" } ], "symlink_target": "" }
from django.db import models class Member(models.Model): id = models.IntegerField(primary_key=True) name = models.CharField(max_length=100, null=True) logoff_ts = models.DateTimeField(null=True) join_ts = models.DateTimeField(null=True) ship = models.ForeignKey('main.Ship', null=True) location = models.ForeignKey('main.Location', null=True) def __unicode__(self): return self.name class Ship(models.Model): id = models.IntegerField(primary_key=True) name = models.CharField(max_length=100, null=True) def __unicode__(self): return self.name class Location(models.Model): id = models.IntegerField(primary_key=True) name = models.CharField(max_length=100, null=True) def __unicode__(self): return self.name
{ "content_hash": "73fa4f09ff73d5b1a0cbe211362d6d7f", "timestamp": "", "source": "github", "line_count": 29, "max_line_length": 60, "avg_line_length": 27.310344827586206, "alnum_prop": 0.6830808080808081, "repo_name": "all-out/lightswitch", "id": "8223ca4884f338f9d42656a1a7c8c0b3981e5d51", "size": "792", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lightswitch/main/models.py", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "2680" }, { "name": "Python", "bytes": "41255" } ], "symlink_target": "" }
"""Compute-related Utilities and helpers.""" import contextlib import functools import inspect import itertools import math import traceback import netifaces from oslo_log import log from oslo_serialization import jsonutils from oslo_utils import excutils from nova.accelerator import cyborg from nova import block_device from nova.compute import power_state from nova.compute import task_states from nova.compute import vm_states import nova.conf from nova import exception from nova import notifications from nova.notifications.objects import aggregate as aggregate_notification from nova.notifications.objects import base as notification_base from nova.notifications.objects import compute_task as task_notification from nova.notifications.objects import exception as notification_exception from nova.notifications.objects import flavor as flavor_notification from nova.notifications.objects import instance as instance_notification from nova.notifications.objects import keypair as keypair_notification from nova.notifications.objects import libvirt as libvirt_notification from nova.notifications.objects import metrics as metrics_notification from nova.notifications.objects import request_spec as reqspec_notification from nova.notifications.objects import scheduler as scheduler_notification from nova.notifications.objects import server_group as sg_notification from nova.notifications.objects import volume as volume_notification from nova import objects from nova.objects import fields from nova import rpc from nova import safe_utils from nova import utils CONF = nova.conf.CONF LOG = log.getLogger(__name__) # These properties are specific to a particular image by design. It # does not make sense for them to be inherited by server snapshots. # This list is distinct from the configuration option of the same # (lowercase) name. NON_INHERITABLE_IMAGE_PROPERTIES = frozenset([ 'cinder_encryption_key_id', 'cinder_encryption_key_deletion_policy', 'img_signature', 'img_signature_hash_method', 'img_signature_key_type', 'img_signature_certificate_uuid']) def exception_to_dict(fault, message=None): """Converts exceptions to a dict for use in notifications. :param fault: Exception that occurred :param message: Optional fault message, otherwise the message is derived from the fault itself. :returns: dict with the following items: - exception: the fault itself - message: one of (in priority order): - the provided message to this method - a formatted NovaException message - the fault class name - code: integer code for the fault (defaults to 500) """ # TODO(johngarbutt) move to nova/exception.py to share with wrap_exception code = 500 if hasattr(fault, "kwargs"): code = fault.kwargs.get('code', 500) # get the message from the exception that was thrown # if that does not exist, use the name of the exception class itself try: if not message: message = fault.format_message() # These exception handlers are broad so we don't fail to log the fault # just because there is an unexpected error retrieving the message except Exception: # In this case either we have a NovaException which failed to format # the message or we have a non-nova exception which could contain # sensitive details. Since we're not sure, be safe and set the message # to the exception class name. Note that we don't guard on # context.is_admin here because the message is always shown in the API, # even to non-admin users (e.g. NoValidHost) but only the traceback # details are shown to users with the admin role. Checking for admin # context here is also not helpful because admins can perform # operations on a tenant user's server (migrations, reboot, etc) and # service startup and periodic tasks could take actions on a server # and those use an admin context. message = fault.__class__.__name__ # NOTE(dripton) The message field in the database is limited to 255 chars. # MySQL silently truncates overly long messages, but PostgreSQL throws an # error if we don't truncate it. u_message = utils.safe_truncate(message, 255) fault_dict = dict(exception=fault) fault_dict["message"] = u_message fault_dict["code"] = code return fault_dict def _get_fault_details(exc_info, error_code): details = '' # TODO(mriedem): Why do we only include the details if the code is 500? # Though for non-nova exceptions the code will probably be 500. if exc_info and error_code == 500: # We get the full exception details including the value since # the fault message may not contain that information for non-nova # exceptions (see exception_to_dict). details = ''.join(traceback.format_exception( exc_info[0], exc_info[1], exc_info[2])) return str(details) def add_instance_fault_from_exc(context, instance, fault, exc_info=None, fault_message=None): """Adds the specified fault to the database.""" fault_obj = objects.InstanceFault(context=context) fault_obj.host = CONF.host fault_obj.instance_uuid = instance.uuid fault_obj.update(exception_to_dict(fault, message=fault_message)) code = fault_obj.code fault_obj.details = _get_fault_details(exc_info, code) fault_obj.create() def get_device_name_for_instance(instance, bdms, device): """Validates (or generates) a device name for instance. This method is a wrapper for get_next_device_name that gets the list of used devices and the root device from a block device mapping. :raises TooManyDiskDevices: if the maxmimum allowed devices to attach to a single instance is exceeded. """ mappings = block_device.instance_block_mapping(instance, bdms) return get_next_device_name(instance, mappings.values(), mappings['root'], device) def default_device_names_for_instance(instance, root_device_name, *block_device_lists): """Generate missing device names for an instance. :raises TooManyDiskDevices: if the maxmimum allowed devices to attach to a single instance is exceeded. """ dev_list = [bdm.device_name for bdm in itertools.chain(*block_device_lists) if bdm.device_name] if root_device_name not in dev_list: dev_list.append(root_device_name) for bdm in itertools.chain(*block_device_lists): dev = bdm.device_name if not dev: dev = get_next_device_name(instance, dev_list, root_device_name) bdm.device_name = dev bdm.save() dev_list.append(dev) def check_max_disk_devices_to_attach(num_devices): maximum = CONF.compute.max_disk_devices_to_attach if maximum < 0: return if num_devices > maximum: raise exception.TooManyDiskDevices(maximum=maximum) def get_next_device_name(instance, device_name_list, root_device_name=None, device=None): """Validates (or generates) a device name for instance. If device is not set, it will generate a unique device appropriate for the instance. It uses the root_device_name (if provided) and the list of used devices to find valid device names. If the device name is valid but applicable to a different backend (for example /dev/vdc is specified but the backend uses /dev/xvdc), the device name will be converted to the appropriate format. :raises TooManyDiskDevices: if the maxmimum allowed devices to attach to a single instance is exceeded. """ req_prefix = None req_letter = None if device: try: req_prefix, req_letter = block_device.match_device(device) except (TypeError, AttributeError, ValueError): raise exception.InvalidDevicePath(path=device) if not root_device_name: root_device_name = block_device.DEFAULT_ROOT_DEV_NAME try: prefix = block_device.match_device( block_device.prepend_dev(root_device_name))[0] except (TypeError, AttributeError, ValueError): raise exception.InvalidDevicePath(path=root_device_name) if req_prefix != prefix: LOG.debug("Using %(prefix)s instead of %(req_prefix)s", {'prefix': prefix, 'req_prefix': req_prefix}) used_letters = set() for device_path in device_name_list: letter = block_device.get_device_letter(device_path) used_letters.add(letter) check_max_disk_devices_to_attach(len(used_letters) + 1) if not req_letter: req_letter = _get_unused_letter(used_letters) if req_letter in used_letters: raise exception.DevicePathInUse(path=device) return prefix + req_letter def get_root_bdm(context, instance, bdms=None): if bdms is None: if isinstance(instance, objects.Instance): uuid = instance.uuid else: uuid = instance['uuid'] bdms = objects.BlockDeviceMappingList.get_by_instance_uuid( context, uuid) return bdms.root_bdm() def is_volume_backed_instance(context, instance, bdms=None): root_bdm = get_root_bdm(context, instance, bdms) if root_bdm is not None: return root_bdm.is_volume # in case we hit a very old instance without root bdm, we _assume_ that # instance is backed by a volume, if and only if image_ref is not set if isinstance(instance, objects.Instance): return not instance.image_ref return not instance['image_ref'] def heal_reqspec_is_bfv(ctxt, request_spec, instance): """Calculates the is_bfv flag for a RequestSpec created before Rocky. Starting in Rocky, new instances have their RequestSpec created with the "is_bfv" flag to indicate if they are volume-backed which is used by the scheduler when determining root disk resource allocations. RequestSpecs created before Rocky will not have the is_bfv flag set so we need to calculate it here and update the RequestSpec. :param ctxt: nova.context.RequestContext auth context :param request_spec: nova.objects.RequestSpec used for scheduling :param instance: nova.objects.Instance being scheduled """ if 'is_bfv' in request_spec: return # Determine if this is a volume-backed instance and set the field # in the request spec accordingly. request_spec.is_bfv = is_volume_backed_instance(ctxt, instance) request_spec.save() def convert_mb_to_ceil_gb(mb_value): gb_int = 0 if mb_value: gb_float = mb_value / 1024.0 # ensure we reserve/allocate enough space by rounding up to nearest GB gb_int = int(math.ceil(gb_float)) return gb_int def _get_unused_letter(used_letters): # Return the first unused device letter index = 0 while True: letter = block_device.generate_device_letter(index) if letter not in used_letters: return letter index += 1 def get_value_from_system_metadata(instance, key, type, default): """Get a value of a specified type from image metadata. @param instance: The instance object @param key: The name of the property to get @param type: The python type the value is be returned as @param default: The value to return if key is not set or not the right type """ value = instance.system_metadata.get(key, default) try: return type(value) except ValueError: LOG.warning("Metadata value %(value)s for %(key)s is not of " "type %(type)s. Using default value %(default)s.", {'value': value, 'key': key, 'type': type, 'default': default}, instance=instance) return default def notify_usage_exists(notifier, context, instance_ref, host, current_period=False, ignore_missing_network_data=True, system_metadata=None, extra_usage_info=None): """Generates 'exists' unversioned legacy and transformed notification for an instance for usage auditing purposes. :param notifier: a messaging.Notifier :param context: request context for the current operation :param instance_ref: nova.objects.Instance object from which to report usage :param host: the host emitting the notification :param current_period: if True, this will generate a usage for the current usage period; if False, this will generate a usage for the previous audit period. :param ignore_missing_network_data: if True, log any exceptions generated while getting network info; if False, raise the exception. :param system_metadata: system_metadata override for the instance. If None, the instance_ref.system_metadata will be used. :param extra_usage_info: Dictionary containing extra values to add or override in the notification if not None. """ audit_start, audit_end = notifications.audit_period_bounds(current_period) if system_metadata is None: system_metadata = utils.instance_sys_meta(instance_ref) # add image metadata to the notification: image_meta = notifications.image_meta(system_metadata) extra_info = dict(audit_period_beginning=str(audit_start), audit_period_ending=str(audit_end), bandwidth={}, image_meta=image_meta) if extra_usage_info: extra_info.update(extra_usage_info) notify_about_instance_usage(notifier, context, instance_ref, 'exists', extra_usage_info=extra_info) audit_period = instance_notification.AuditPeriodPayload( audit_period_beginning=audit_start, audit_period_ending=audit_end) payload = instance_notification.InstanceExistsPayload( context=context, instance=instance_ref, audit_period=audit_period, bandwidth=[]) notification = instance_notification.InstanceExistsNotification( context=context, priority=fields.NotificationPriority.INFO, publisher=notification_base.NotificationPublisher( host=host, source=fields.NotificationSource.COMPUTE), event_type=notification_base.EventType( object='instance', action=fields.NotificationAction.EXISTS), payload=payload) notification.emit(context) def notify_about_instance_usage(notifier, context, instance, event_suffix, network_info=None, extra_usage_info=None, fault=None): """Send an unversioned legacy notification about an instance. All new notifications should use notify_about_instance_action which sends a versioned notification. :param notifier: a messaging.Notifier :param event_suffix: Event type like "delete.start" or "exists" :param network_info: Networking information, if provided. :param extra_usage_info: Dictionary containing extra values to add or override in the notification. """ if not extra_usage_info: extra_usage_info = {} usage_info = notifications.info_from_instance(context, instance, network_info, populate_image_ref_url=True, **extra_usage_info) if fault: # NOTE(johngarbutt) mirrors the format in wrap_exception fault_payload = exception_to_dict(fault) LOG.debug(fault_payload["message"], instance=instance) usage_info.update(fault_payload) if event_suffix.endswith("error"): method = notifier.error else: method = notifier.info method(context, 'compute.instance.%s' % event_suffix, usage_info) def _get_fault_and_priority_from_exception(exception: Exception): fault = None priority = fields.NotificationPriority.INFO if not exception: return fault, priority fault = notification_exception.ExceptionPayload.from_exception(exception) priority = fields.NotificationPriority.ERROR return fault, priority @rpc.if_notifications_enabled def notify_about_instance_action(context, instance, host, action, phase=None, source=fields.NotificationSource.COMPUTE, exception=None, bdms=None): """Send versioned notification about the action made on the instance :param instance: the instance which the action performed on :param host: the host emitting the notification :param action: the name of the action :param phase: the phase of the action :param source: the source of the notification :param exception: the thrown exception (used in error notifications) :param bdms: BlockDeviceMappingList object for the instance. If it is not provided then we will load it from the db if so configured """ fault, priority = _get_fault_and_priority_from_exception(exception) payload = instance_notification.InstanceActionPayload( context=context, instance=instance, fault=fault, bdms=bdms) notification = instance_notification.InstanceActionNotification( context=context, priority=priority, publisher=notification_base.NotificationPublisher( host=host, source=source), event_type=notification_base.EventType( object='instance', action=action, phase=phase), payload=payload) notification.emit(context) @rpc.if_notifications_enabled def notify_about_instance_create(context, instance, host, phase=None, exception=None, bdms=None): """Send versioned notification about instance creation :param context: the request context :param instance: the instance being created :param host: the host emitting the notification :param phase: the phase of the creation :param exception: the thrown exception (used in error notifications) :param bdms: BlockDeviceMappingList object for the instance. If it is not provided then we will load it from the db if so configured """ fault, priority = _get_fault_and_priority_from_exception(exception) payload = instance_notification.InstanceCreatePayload( context=context, instance=instance, fault=fault, bdms=bdms) notification = instance_notification.InstanceCreateNotification( context=context, priority=priority, publisher=notification_base.NotificationPublisher( host=host, source=fields.NotificationSource.COMPUTE), event_type=notification_base.EventType( object='instance', action=fields.NotificationAction.CREATE, phase=phase), payload=payload) notification.emit(context) @rpc.if_notifications_enabled def notify_about_scheduler_action(context, request_spec, action, phase=None, source=fields.NotificationSource.SCHEDULER): """Send versioned notification about the action made by the scheduler :param context: the RequestContext object :param request_spec: the RequestSpec object :param action: the name of the action :param phase: the phase of the action :param source: the source of the notification """ payload = reqspec_notification.RequestSpecPayload( request_spec=request_spec) notification = scheduler_notification.SelectDestinationsNotification( context=context, priority=fields.NotificationPriority.INFO, publisher=notification_base.NotificationPublisher( host=CONF.host, source=source), event_type=notification_base.EventType( object='scheduler', action=action, phase=phase), payload=payload) notification.emit(context) @rpc.if_notifications_enabled def notify_about_volume_attach_detach(context, instance, host, action, phase, volume_id=None, exception=None): """Send versioned notification about the action made on the instance :param instance: the instance which the action performed on :param host: the host emitting the notification :param action: the name of the action :param phase: the phase of the action :param volume_id: id of the volume will be attached :param exception: the thrown exception (used in error notifications) """ fault, priority = _get_fault_and_priority_from_exception(exception) payload = instance_notification.InstanceActionVolumePayload( context=context, instance=instance, fault=fault, volume_id=volume_id) notification = instance_notification.InstanceActionVolumeNotification( context=context, priority=priority, publisher=notification_base.NotificationPublisher( host=host, source=fields.NotificationSource.COMPUTE), event_type=notification_base.EventType( object='instance', action=action, phase=phase), payload=payload) notification.emit(context) @rpc.if_notifications_enabled def notify_about_instance_rescue_action(context, instance, host, rescue_image_ref, phase=None, exception=None): """Send versioned notification about the action made on the instance :param instance: the instance which the action performed on :param host: the host emitting the notification :param rescue_image_ref: the rescue image ref :param phase: the phase of the action :param exception: the thrown exception (used in error notifications) """ fault, priority = _get_fault_and_priority_from_exception(exception) payload = instance_notification.InstanceActionRescuePayload( context=context, instance=instance, fault=fault, rescue_image_ref=rescue_image_ref) notification = instance_notification.InstanceActionRescueNotification( context=context, priority=priority, publisher=notification_base.NotificationPublisher( host=host, source=fields.NotificationSource.COMPUTE), event_type=notification_base.EventType( object='instance', action=fields.NotificationAction.RESCUE, phase=phase), payload=payload) notification.emit(context) @rpc.if_notifications_enabled def notify_about_keypair_action(context, keypair, action, phase): """Send versioned notification about the keypair action on the instance :param context: the request context :param keypair: the keypair which the action performed on :param action: the name of the action :param phase: the phase of the action """ payload = keypair_notification.KeypairPayload(keypair=keypair) notification = keypair_notification.KeypairNotification( priority=fields.NotificationPriority.INFO, publisher=notification_base.NotificationPublisher( host=CONF.host, source=fields.NotificationSource.API), event_type=notification_base.EventType( object='keypair', action=action, phase=phase), payload=payload) notification.emit(context) @rpc.if_notifications_enabled def notify_about_volume_swap(context, instance, host, phase, old_volume_id, new_volume_id, exception=None): """Send versioned notification about the volume swap action on the instance :param context: the request context :param instance: the instance which the action performed on :param host: the host emitting the notification :param phase: the phase of the action :param old_volume_id: the ID of the volume that is copied from and detached :param new_volume_id: the ID of the volume that is copied to and attached :param exception: an exception """ fault, priority = _get_fault_and_priority_from_exception(exception) payload = instance_notification.InstanceActionVolumeSwapPayload( context=context, instance=instance, fault=fault, old_volume_id=old_volume_id, new_volume_id=new_volume_id) instance_notification.InstanceActionVolumeSwapNotification( context=context, priority=priority, publisher=notification_base.NotificationPublisher( host=host, source=fields.NotificationSource.COMPUTE), event_type=notification_base.EventType( object='instance', action=fields.NotificationAction.VOLUME_SWAP, phase=phase), payload=payload).emit(context) @rpc.if_notifications_enabled def notify_about_instance_snapshot(context, instance, host, phase, snapshot_image_id): """Send versioned notification about the snapshot action executed on the instance :param context: the request context :param instance: the instance from which a snapshot image is being created :param host: the host emitting the notification :param phase: the phase of the action :param snapshot_image_id: the ID of the snapshot """ payload = instance_notification.InstanceActionSnapshotPayload( context=context, instance=instance, fault=None, snapshot_image_id=snapshot_image_id) instance_notification.InstanceActionSnapshotNotification( context=context, priority=fields.NotificationPriority.INFO, publisher=notification_base.NotificationPublisher( host=host, source=fields.NotificationSource.COMPUTE), event_type=notification_base.EventType( object='instance', action=fields.NotificationAction.SNAPSHOT, phase=phase), payload=payload).emit(context) @rpc.if_notifications_enabled def notify_about_resize_prep_instance(context, instance, host, phase, new_flavor): """Send versioned notification about the instance resize action on the instance :param context: the request context :param instance: the instance which the resize action performed on :param host: the host emitting the notification :param phase: the phase of the action :param new_flavor: new flavor """ payload = instance_notification.InstanceActionResizePrepPayload( context=context, instance=instance, fault=None, new_flavor=flavor_notification.FlavorPayload(flavor=new_flavor)) instance_notification.InstanceActionResizePrepNotification( context=context, priority=fields.NotificationPriority.INFO, publisher=notification_base.NotificationPublisher( host=host, source=fields.NotificationSource.COMPUTE), event_type=notification_base.EventType( object='instance', action=fields.NotificationAction.RESIZE_PREP, phase=phase), payload=payload).emit(context) def notify_about_server_group_update(context, event_suffix, sg_payload): """Send a notification about server group update. :param event_suffix: Event type like "create.start" or "create.end" :param sg_payload: payload for server group update """ notifier = rpc.get_notifier(service='servergroup') notifier.info(context, 'servergroup.%s' % event_suffix, sg_payload) def notify_about_aggregate_update(context, event_suffix, aggregate_payload): """Send a notification about aggregate update. :param event_suffix: Event type like "create.start" or "create.end" :param aggregate_payload: payload for aggregate update """ aggregate_identifier = aggregate_payload.get('aggregate_id', None) if not aggregate_identifier: aggregate_identifier = aggregate_payload.get('name', None) if not aggregate_identifier: LOG.debug("No aggregate id or name specified for this " "notification and it will be ignored") return notifier = rpc.get_notifier(service='aggregate', host=aggregate_identifier) notifier.info(context, 'aggregate.%s' % event_suffix, aggregate_payload) @rpc.if_notifications_enabled def notify_about_aggregate_action(context, aggregate, action, phase): payload = aggregate_notification.AggregatePayload(aggregate) notification = aggregate_notification.AggregateNotification( priority=fields.NotificationPriority.INFO, publisher=notification_base.NotificationPublisher( host=CONF.host, source=fields.NotificationSource.API), event_type=notification_base.EventType( object='aggregate', action=action, phase=phase), payload=payload) notification.emit(context) @rpc.if_notifications_enabled def notify_about_aggregate_cache(context, aggregate, host, image_status, index, total): """Send a notification about aggregate cache_images progress. :param context: The RequestContext :param aggregate: The target aggregate :param host: The host within the aggregate for which to report status :param image_status: The result from the compute host, which is a dict of {image_id: status} :param index: An integer indicating progress toward completion, between 1 and $total :param total: The total number of hosts being processed in this operation, to bound $index """ success_statuses = ('cached', 'existing') payload = aggregate_notification.AggregateCachePayload(aggregate, host, index, total) payload.images_cached = [] payload.images_failed = [] for img, status in image_status.items(): if status in success_statuses: payload.images_cached.append(img) else: payload.images_failed.append(img) notification = aggregate_notification.AggregateCacheNotification( priority=fields.NotificationPriority.INFO, publisher=notification_base.NotificationPublisher( host=CONF.host, source=fields.NotificationSource.CONDUCTOR), event_type=notification_base.EventType( object='aggregate', action=fields.NotificationAction.IMAGE_CACHE, phase=fields.NotificationPhase.PROGRESS), payload=payload) notification.emit(context) def notify_about_host_update(context, event_suffix, host_payload): """Send a notification about host update. :param event_suffix: Event type like "create.start" or "create.end" :param host_payload: payload for host update. It is a dict and there should be at least the 'host_name' key in this dict. """ host_identifier = host_payload.get('host_name') if not host_identifier: LOG.warning("No host name specified for the notification of " "HostAPI.%s and it will be ignored", event_suffix) return notifier = rpc.get_notifier(service='api', host=host_identifier) notifier.info(context, 'HostAPI.%s' % event_suffix, host_payload) @rpc.if_notifications_enabled def notify_about_server_group_action(context, group, action): payload = sg_notification.ServerGroupPayload(group) notification = sg_notification.ServerGroupNotification( priority=fields.NotificationPriority.INFO, publisher=notification_base.NotificationPublisher( host=CONF.host, source=fields.NotificationSource.API), event_type=notification_base.EventType( object='server_group', action=action), payload=payload) notification.emit(context) @rpc.if_notifications_enabled def notify_about_server_group_add_member(context, group_id): group = objects.InstanceGroup.get_by_uuid(context, group_id) payload = sg_notification.ServerGroupPayload(group) notification = sg_notification.ServerGroupNotification( priority=fields.NotificationPriority.INFO, publisher=notification_base.NotificationPublisher( host=CONF.host, source=fields.NotificationSource.API), event_type=notification_base.EventType( object='server_group', action=fields.NotificationAction.ADD_MEMBER), payload=payload) notification.emit(context) @rpc.if_notifications_enabled def notify_about_instance_rebuild(context, instance, host, action=fields.NotificationAction.REBUILD, phase=None, source=fields.NotificationSource.COMPUTE, exception=None, bdms=None): """Send versioned notification about instance rebuild :param instance: the instance which the action performed on :param host: the host emitting the notification :param action: the name of the action :param phase: the phase of the action :param source: the source of the notification :param exception: the thrown exception (used in error notifications) :param bdms: BlockDeviceMappingList object for the instance. If it is not provided then we will load it from the db if so configured """ fault, priority = _get_fault_and_priority_from_exception(exception) payload = instance_notification.InstanceActionRebuildPayload( context=context, instance=instance, fault=fault, bdms=bdms) notification = instance_notification.InstanceActionRebuildNotification( context=context, priority=priority, publisher=notification_base.NotificationPublisher( host=host, source=source), event_type=notification_base.EventType( object='instance', action=action, phase=phase), payload=payload) notification.emit(context) @rpc.if_notifications_enabled def notify_about_metrics_update(context, host, host_ip, nodename, monitor_metric_list): """Send versioned notification about updating metrics :param context: the request context :param host: the host emitting the notification :param host_ip: the IP address of the host :param nodename: the node name :param monitor_metric_list: the MonitorMetricList object """ payload = metrics_notification.MetricsPayload( host=host, host_ip=host_ip, nodename=nodename, monitor_metric_list=monitor_metric_list) notification = metrics_notification.MetricsNotification( context=context, priority=fields.NotificationPriority.INFO, publisher=notification_base.NotificationPublisher( host=host, source=fields.NotificationSource.COMPUTE), event_type=notification_base.EventType( object='metrics', action=fields.NotificationAction.UPDATE), payload=payload) notification.emit(context) @rpc.if_notifications_enabled def notify_about_libvirt_connect_error(context, ip, exception): """Send a versioned notification about libvirt connect error. :param context: the request context :param ip: the IP address of the host :param exception: the thrown exception """ fault, _ = _get_fault_and_priority_from_exception(exception) payload = libvirt_notification.LibvirtErrorPayload(ip=ip, reason=fault) notification = libvirt_notification.LibvirtErrorNotification( priority=fields.NotificationPriority.ERROR, publisher=notification_base.NotificationPublisher( host=CONF.host, source=fields.NotificationSource.COMPUTE), event_type=notification_base.EventType( object='libvirt', action=fields.NotificationAction.CONNECT, phase=fields.NotificationPhase.ERROR), payload=payload) notification.emit(context) @rpc.if_notifications_enabled def notify_about_volume_usage(context, vol_usage, host): """Send versioned notification about the volume usage :param context: the request context :param vol_usage: the volume usage object :param host: the host emitting the notification """ payload = volume_notification.VolumeUsagePayload( vol_usage=vol_usage) notification = volume_notification.VolumeUsageNotification( context=context, priority=fields.NotificationPriority.INFO, publisher=notification_base.NotificationPublisher( host=host, source=fields.NotificationSource.COMPUTE), event_type=notification_base.EventType( object='volume', action=fields.NotificationAction.USAGE), payload=payload) notification.emit(context) @rpc.if_notifications_enabled def notify_about_compute_task_error(context, action, instance_uuid, request_spec, state, exception): """Send a versioned notification about compute task error. :param context: the request context :param action: the name of the action :param instance_uuid: the UUID of the instance :param request_spec: the request spec object or the dict includes request spec information :param state: the vm state of the instance :param exception: the thrown exception :param tb: the traceback """ if (request_spec is not None and not isinstance(request_spec, objects.RequestSpec)): request_spec = objects.RequestSpec.from_primitives( context, request_spec, {}) fault, _ = _get_fault_and_priority_from_exception(exception) payload = task_notification.ComputeTaskPayload( instance_uuid=instance_uuid, request_spec=request_spec, state=state, reason=fault) notification = task_notification.ComputeTaskNotification( priority=fields.NotificationPriority.ERROR, publisher=notification_base.NotificationPublisher( host=CONF.host, source=fields.NotificationSource.CONDUCTOR), event_type=notification_base.EventType( object='compute_task', action=action, phase=fields.NotificationPhase.ERROR), payload=payload) notification.emit(context) def refresh_info_cache_for_instance(context, instance): """Refresh the info cache for an instance. :param instance: The instance object. """ if instance.info_cache is not None and not instance.deleted: # Catch the exception in case the instance got deleted after the check # instance.deleted was executed try: instance.info_cache.refresh() except exception.InstanceInfoCacheNotFound: LOG.debug("Can not refresh info_cache because instance " "was not found", instance=instance) def get_reboot_type(task_state, current_power_state): """Checks if the current instance state requires a HARD reboot.""" if current_power_state != power_state.RUNNING: return 'HARD' if task_state in task_states.soft_reboot_states: return 'SOFT' return 'HARD' def get_machine_ips(): """Get the machine's ip addresses :returns: list of Strings of ip addresses """ addresses = [] for interface in netifaces.interfaces(): try: iface_data = netifaces.ifaddresses(interface) for family in iface_data: if family not in (netifaces.AF_INET, netifaces.AF_INET6): continue for address in iface_data[family]: addr = address['addr'] # If we have an ipv6 address remove the # %ether_interface at the end if family == netifaces.AF_INET6: addr = addr.split('%')[0] addresses.append(addr) except ValueError: pass return addresses def upsize_quota_delta(new_flavor, old_flavor): """Calculate deltas required to adjust quota for an instance upsize. :param new_flavor: the target instance type :param old_flavor: the original instance type """ def _quota_delta(resource): return (new_flavor[resource] - old_flavor[resource]) deltas = {} if _quota_delta('vcpus') > 0: deltas['cores'] = _quota_delta('vcpus') if _quota_delta('memory_mb') > 0: deltas['ram'] = _quota_delta('memory_mb') return deltas def get_headroom(quotas, usages, deltas): headroom = {res: quotas[res] - usages[res] for res in quotas.keys()} # If quota_cores is unlimited [-1]: # - set cores headroom based on instances headroom: if quotas.get('cores') == -1: if deltas.get('cores'): hc = headroom.get('instances', 1) * deltas['cores'] headroom['cores'] = hc / deltas.get('instances', 1) else: headroom['cores'] = headroom.get('instances', 1) # If quota_ram is unlimited [-1]: # - set ram headroom based on instances headroom: if quotas.get('ram') == -1: if deltas.get('ram'): hr = headroom.get('instances', 1) * deltas['ram'] headroom['ram'] = hr / deltas.get('instances', 1) else: headroom['ram'] = headroom.get('instances', 1) return headroom def check_num_instances_quota(context, instance_type, min_count, max_count, project_id=None, user_id=None, orig_num_req=None): """Enforce quota limits on number of instances created.""" # project_id is also used for the TooManyInstances error message if project_id is None: project_id = context.project_id if user_id is None: user_id = context.user_id # Check whether we need to count resources per-user and check a per-user # quota limit. If we have no per-user quota limit defined for a # project/user, we can avoid wasteful resource counting. user_quotas = objects.Quotas.get_all_by_project_and_user( context, project_id, user_id) if not any(r in user_quotas for r in ['instances', 'cores', 'ram']): user_id = None # Determine requested cores and ram req_cores = max_count * instance_type.vcpus req_ram = max_count * instance_type.memory_mb deltas = {'instances': max_count, 'cores': req_cores, 'ram': req_ram} try: objects.Quotas.check_deltas(context, deltas, project_id, user_id=user_id, check_project_id=project_id, check_user_id=user_id) except exception.OverQuota as exc: quotas = exc.kwargs['quotas'] overs = exc.kwargs['overs'] usages = exc.kwargs['usages'] # This is for the recheck quota case where we used a delta of zero. if min_count == max_count == 0: # orig_num_req is the original number of instances requested in the # case of a recheck quota, for use in the over quota exception. req_cores = orig_num_req * instance_type.vcpus req_ram = orig_num_req * instance_type.memory_mb requested = {'instances': orig_num_req, 'cores': req_cores, 'ram': req_ram} (overs, reqs, total_alloweds, useds) = get_over_quota_detail( deltas, overs, quotas, requested) msg = "Cannot run any more instances of this type." params = {'overs': overs, 'pid': project_id, 'msg': msg} LOG.debug("%(overs)s quota exceeded for %(pid)s. %(msg)s", params) raise exception.TooManyInstances(overs=overs, req=reqs, used=useds, allowed=total_alloweds) # OK, we exceeded quota; let's figure out why... headroom = get_headroom(quotas, usages, deltas) allowed = headroom.get('instances', 1) # Reduce 'allowed' instances in line with the cores & ram headroom if instance_type.vcpus: allowed = min(allowed, headroom['cores'] // instance_type.vcpus) if instance_type.memory_mb: allowed = min(allowed, headroom['ram'] // instance_type.memory_mb) # Convert to the appropriate exception message if allowed <= 0: msg = "Cannot run any more instances of this type." elif min_count <= allowed <= max_count: # We're actually OK, but still need to check against allowed return check_num_instances_quota(context, instance_type, min_count, allowed, project_id=project_id, user_id=user_id) else: msg = "Can only run %s more instances of this type." % allowed num_instances = (str(min_count) if min_count == max_count else "%s-%s" % (min_count, max_count)) requested = dict(instances=num_instances, cores=req_cores, ram=req_ram) (overs, reqs, total_alloweds, useds) = get_over_quota_detail( headroom, overs, quotas, requested) params = {'overs': overs, 'pid': project_id, 'min_count': min_count, 'max_count': max_count, 'msg': msg} if min_count == max_count: LOG.debug("%(overs)s quota exceeded for %(pid)s," " tried to run %(min_count)d instances. " "%(msg)s", params) else: LOG.debug("%(overs)s quota exceeded for %(pid)s," " tried to run between %(min_count)d and" " %(max_count)d instances. %(msg)s", params) raise exception.TooManyInstances(overs=overs, req=reqs, used=useds, allowed=total_alloweds) return max_count def get_over_quota_detail(headroom, overs, quotas, requested): reqs = [] useds = [] total_alloweds = [] for resource in overs: reqs.append(str(requested[resource])) useds.append(str(quotas[resource] - headroom[resource])) total_alloweds.append(str(quotas[resource])) (overs, reqs, useds, total_alloweds) = map(', '.join, ( overs, reqs, useds, total_alloweds)) return overs, reqs, total_alloweds, useds def remove_shelved_keys_from_system_metadata(instance): # Delete system_metadata for a shelved instance for key in ['shelved_at', 'shelved_image_id', 'shelved_host']: if key in instance.system_metadata: del (instance.system_metadata[key]) def create_image(context, instance, name, image_type, image_api, extra_properties=None): """Create new image entry in the image service. This new image will be reserved for the compute manager to upload a snapshot or backup. :param context: security context :param instance: nova.objects.instance.Instance object :param name: string for name of the snapshot :param image_type: snapshot | backup :param image_api: instance of nova.image.glance.API :param extra_properties: dict of extra image properties to include """ properties = { 'instance_uuid': instance.uuid, 'user_id': str(context.user_id), 'image_type': image_type, } properties.update(extra_properties or {}) image_meta = initialize_instance_snapshot_metadata( context, instance, name, properties) # if we're making a snapshot, omit the disk and container formats, # since the image may have been converted to another format, and the # original values won't be accurate. The driver will populate these # with the correct values later, on image upload. if image_type == 'snapshot': image_meta.pop('disk_format', None) image_meta.pop('container_format', None) return image_api.create(context, image_meta) def initialize_instance_snapshot_metadata(context, instance, name, extra_properties=None): """Initialize new metadata for a snapshot of the given instance. :param context: authenticated RequestContext; note that this may not be the owner of the instance itself, e.g. an admin creates a snapshot image of some user instance :param instance: nova.objects.instance.Instance object :param name: string for name of the snapshot :param extra_properties: dict of extra metadata properties to include :returns: the new instance snapshot metadata """ image_meta = utils.get_image_from_system_metadata( instance.system_metadata) image_meta['name'] = name # If the user creating the snapshot is not in the same project as # the owner of the instance, then the image visibility should be # "shared" so the owner of the instance has access to the image, like # in the case of an admin creating a snapshot of another user's # server, either directly via the createImage API or via shelve. extra_properties = extra_properties or {} if context.project_id != instance.project_id: # The glance API client-side code will use this to add the # instance project as a member of the image for access. image_meta['visibility'] = 'shared' extra_properties['instance_owner'] = instance.project_id # TODO(mriedem): Should owner_project_name and owner_user_name # be removed from image_meta['properties'] here, or added to # [DEFAULT]/non_inheritable_image_properties? It is confusing # otherwise to see the owner project not match those values. else: # The request comes from the owner of the instance so make the # image private. image_meta['visibility'] = 'private' # Delete properties that are non-inheritable properties = image_meta['properties'] keys_to_pop = set(CONF.non_inheritable_image_properties).union( NON_INHERITABLE_IMAGE_PROPERTIES) for key in keys_to_pop: properties.pop(key, None) # The properties in extra_properties have precedence properties.update(extra_properties) return image_meta def delete_image(context, instance, image_api, image_id, log_exc_info=False): """Deletes the image if it still exists. Ignores ImageNotFound if the image is already gone. :param context: the nova auth request context where the context.project_id matches the owner of the image :param instance: the instance for which the snapshot image was created :param image_api: the image API used to delete the image :param image_id: the ID of the image to delete :param log_exc_info: True if this is being called from an exception handler block and traceback should be logged at DEBUG level, False otherwise. """ LOG.debug("Cleaning up image %s", image_id, instance=instance, log_exc_info=log_exc_info) try: image_api.delete(context, image_id) except exception.ImageNotFound: # Since we're trying to cleanup an image, we don't care if # if it's already gone. pass except Exception: LOG.exception("Error while trying to clean up image %s", image_id, instance=instance) def may_have_ports_or_volumes(instance): """Checks to see if an instance may have ports or volumes based on vm_state This is primarily only useful when instance.host is None. :param instance: The nova.objects.Instance in question. :returns: True if the instance may have ports of volumes, False otherwise """ # NOTE(melwitt): When an instance build fails in the compute manager, # the instance host and node are set to None and the vm_state is set # to ERROR. In the case, the instance with host = None has actually # been scheduled and may have ports and/or volumes allocated on the # compute node. if instance.vm_state in (vm_states.SHELVED_OFFLOADED, vm_states.ERROR): return True return False def get_stashed_volume_connector(bdm, instance): """Lookup a connector dict from the bdm.connection_info if set Gets the stashed connector dict out of the bdm.connection_info if set and the connector host matches the instance host. :param bdm: nova.objects.block_device.BlockDeviceMapping :param instance: nova.objects.instance.Instance :returns: volume connector dict or None """ if 'connection_info' in bdm and bdm.connection_info is not None: # NOTE(mriedem): We didn't start stashing the connector in the # bdm.connection_info until Mitaka so it might not be there on old # attachments. Also, if the volume was attached when the instance # was in shelved_offloaded state and it hasn't been unshelved yet # we don't have the attachment/connection information either. connector = jsonutils.loads(bdm.connection_info).get('connector') if connector: if connector.get('host') == instance.host: return connector LOG.debug('Found stashed volume connector for instance but ' 'connector host %(connector_host)s does not match ' 'the instance host %(instance_host)s.', {'connector_host': connector.get('host'), 'instance_host': instance.host}, instance=instance) if (instance.host is None and may_have_ports_or_volumes(instance)): LOG.debug('Allowing use of stashed volume connector with ' 'instance host None because instance with ' 'vm_state %(vm_state)s has been scheduled in ' 'the past.', {'vm_state': instance.vm_state}, instance=instance) return connector class EventReporter(object): """Context manager to report instance action events. If constructed with ``graceful_exit=True`` the __exit__ function will handle and not re-raise on InstanceActionNotFound. """ def __init__(self, context, event_name, host, *instance_uuids, graceful_exit=False): self.context = context self.event_name = event_name self.instance_uuids = instance_uuids self.host = host self.graceful_exit = graceful_exit def __enter__(self): for uuid in self.instance_uuids: objects.InstanceActionEvent.event_start( self.context, uuid, self.event_name, want_result=False, host=self.host) return self def __exit__(self, exc_type, exc_val, exc_tb): for uuid in self.instance_uuids: try: objects.InstanceActionEvent.event_finish_with_failure( self.context, uuid, self.event_name, exc_val=exc_val, exc_tb=exc_tb, want_result=False) except exception.InstanceActionNotFound: # If the instance action was not found then determine if we # should re-raise based on the graceful_exit attribute. with excutils.save_and_reraise_exception( reraise=not self.graceful_exit): if self.graceful_exit: return True return False def wrap_instance_event(prefix, graceful_exit=False): """Wraps a method to log the event taken on the instance, and result. This decorator wraps a method to log the start and result of an event, as part of an action taken on an instance. :param prefix: prefix for the event name, usually a service binary like "compute" or "conductor" to indicate the origin of the event. :param graceful_exit: True if the decorator should gracefully handle InstanceActionNotFound errors, False otherwise. This should rarely be True. """ @utils.expects_func_args('instance') def helper(function): @functools.wraps(function) def decorated_function(self, context, *args, **kwargs): wrapped_func = safe_utils.get_wrapped_function(function) keyed_args = inspect.getcallargs(wrapped_func, self, context, *args, **kwargs) instance_uuid = keyed_args['instance']['uuid'] event_name = '{0}_{1}'.format(prefix, function.__name__) host = self.host if hasattr(self, 'host') else None with EventReporter(context, event_name, host, instance_uuid, graceful_exit=graceful_exit): return function(self, context, *args, **kwargs) return decorated_function return helper class UnlimitedSemaphore(object): def __enter__(self): pass def __exit__(self, exc_type, exc_val, exc_tb): pass @property def balance(self): return 0 # This semaphore is used to enforce a limit on disk-IO-intensive operations # (image downloads, image conversions) at any given time. # It is initialized at ComputeManager.init_host() disk_ops_semaphore = UnlimitedSemaphore() @contextlib.contextmanager def notify_about_instance_delete(notifier, context, instance, delete_type='delete', source=fields.NotificationSource.API): try: notify_about_instance_usage(notifier, context, instance, "%s.start" % delete_type) # Note(gibi): force_delete types will be handled in a # subsequent patch if delete_type in ['delete', 'soft_delete']: notify_about_instance_action( context, instance, host=CONF.host, source=source, action=delete_type, phase=fields.NotificationPhase.START) yield finally: notify_about_instance_usage(notifier, context, instance, "%s.end" % delete_type) if delete_type in ['delete', 'soft_delete']: notify_about_instance_action( context, instance, host=CONF.host, source=source, action=delete_type, phase=fields.NotificationPhase.END) def update_pci_request_spec_with_allocated_interface_name( context, report_client, instance, provider_mapping): """Update the instance's PCI request based on the request group - resource provider mapping and the device RP name from placement. :param context: the request context :param report_client: a SchedulerReportClient instance :param instance: an Instance object to be updated :param provider_mapping: the request group - resource provider mapping in the form returned by the RequestSpec.get_request_group_mapping() call. :raises AmbigousResourceProviderForPCIRequest: if more than one resource provider provides resource for the given PCI request. :raises UnexpectResourceProviderNameForPCIRequest: if the resource provider, which provides resource for the pci request, does not have a well formatted name so we cannot parse the parent interface name out of it. """ if not instance.pci_requests: return def needs_update(pci_request, mapping): return (pci_request.requester_id and pci_request.requester_id in mapping) for pci_request in instance.pci_requests.requests: if needs_update(pci_request, provider_mapping): provider_uuids = provider_mapping[pci_request.requester_id] if len(provider_uuids) != 1: raise exception.AmbiguousResourceProviderForPCIRequest( providers=provider_uuids, requester=pci_request.requester_id) dev_rp_name = report_client.get_resource_provider_name( context, provider_uuids[0]) # NOTE(gibi): the device RP name reported by neutron is # structured like <hostname>:<agentname>:<interfacename> rp_name_pieces = dev_rp_name.split(':') if len(rp_name_pieces) != 3: ex = exception.UnexpectedResourceProviderNameForPCIRequest raise ex( provider=provider_uuids[0], requester=pci_request.requester_id, provider_name=dev_rp_name) for spec in pci_request.spec: spec['parent_ifname'] = rp_name_pieces[2] def delete_arqs_if_needed(context, instance): """Delete Cyborg ARQs for the instance.""" dp_name = instance.flavor.extra_specs.get('accel:device_profile') if dp_name is None: return cyclient = cyborg.get_client(context) LOG.debug('Calling Cyborg to delete ARQs for instance %(instance)s', {'instance': instance.uuid}) try: cyclient.delete_arqs_for_instance(instance.uuid) except exception.AcceleratorRequestOpFailed as e: LOG.exception('Failed to delete accelerator requests for ' 'instance %s. Exception: %s', instance.uuid, e)
{ "content_hash": "5990bc105eb8ea2c0717cb3f162d030a", "timestamp": "", "source": "github", "line_count": 1529, "max_line_length": 79, "avg_line_length": 40.774362328319164, "alnum_prop": 0.6495733350442705, "repo_name": "klmitch/nova", "id": "1617f1dbe34bd305292ab9bc02200e9960d96e26", "size": "62961", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "nova/compute/utils.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Assembly", "bytes": "851" }, { "name": "HTML", "bytes": "1386" }, { "name": "PHP", "bytes": "44222" }, { "name": "Python", "bytes": "22328409" }, { "name": "Shell", "bytes": "29138" }, { "name": "Smarty", "bytes": "405441" } ], "symlink_target": "" }
from django.conf.urls import url from .views import * urlpatterns = [ url(r'^local/$', 'main.views.local_item', name='main_local'), url(r'^$', 'main.views.global_item', name='main_global'), ]
{ "content_hash": "84a48ec4887512dcaacf086a2335cc58", "timestamp": "", "source": "github", "line_count": 7, "max_line_length": 65, "avg_line_length": 28.714285714285715, "alnum_prop": 0.6467661691542289, "repo_name": "yerohin/context_processors_test", "id": "c4fc2f7266dd048e761a1ee75281f687beb13df7", "size": "201", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "main/urls.py", "mode": "33261", "license": "mit", "language": [ { "name": "HTML", "bytes": "657" }, { "name": "Python", "bytes": "5326" } ], "symlink_target": "" }
from model.contact import Contact def test_delete_first_contact(app): if app.contact.count() == 0: app.contact.create_contact(Contact(firstname="test")) old_contacts = app.contact.get_contact_list() app.contact.delete_first_contact() new_contacts = app.contact.get_contact_list() # print(old_contacts) # print(new_contacts) assert (len(old_contacts) - 1) == len(new_contacts) old_contacts[0:1] = [] assert old_contacts == new_contacts
{ "content_hash": "71329d4ea4fadfcae0517a6f48172a88", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 61, "avg_line_length": 36.84615384615385, "alnum_prop": 0.6764091858037579, "repo_name": "beorlow/python_training", "id": "429e51887c0f1eab41a9771d2af81d11c237e1e7", "size": "479", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "test/test_del_contact.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "14832" } ], "symlink_target": "" }
import re import collections from enum import Enum from ydk._core._dm_meta_info import _MetaInfoClassMember, _MetaInfoClass, _MetaInfoEnum from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict from ydk._core._dm_meta_info import ATTRIBUTE, REFERENCE_CLASS, REFERENCE_LIST, REFERENCE_LEAFLIST, REFERENCE_IDENTITY_CLASS, REFERENCE_ENUM_CLASS, REFERENCE_BITS, REFERENCE_UNION, ANYXML_CLASS from ydk.errors import YPYError, YPYModelError from ydk.providers._importer import _yang_ns _meta_table = { 'SessionClassEnum' : _MetaInfoEnum('SessionClassEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', { 'ethernet-class':'ethernet_class', 'ipv4-class':'ipv4_class', 'ipv6-class':'ipv6_class', 'invalid-class':'invalid_class', }, 'Cisco-IOS-XR-Ethernet-SPAN-oper', _yang_ns._namespaces['Cisco-IOS-XR-Ethernet-SPAN-oper']), 'ImStateEnumEnum' : _MetaInfoEnum('ImStateEnumEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', { 'im-state-not-ready':'im_state_not_ready', 'im-state-admin-down':'im_state_admin_down', 'im-state-down':'im_state_down', 'im-state-up':'im_state_up', 'im-state-shutdown':'im_state_shutdown', 'im-state-err-disable':'im_state_err_disable', 'im-state-down-immediate':'im_state_down_immediate', 'im-state-down-immediate-admin':'im_state_down_immediate_admin', 'im-state-down-graceful':'im_state_down_graceful', 'im-state-begin-shutdown':'im_state_begin_shutdown', 'im-state-end-shutdown':'im_state_end_shutdown', 'im-state-begin-error-disable':'im_state_begin_error_disable', 'im-state-end-error-disable':'im_state_end_error_disable', 'im-state-begin-down-graceful':'im_state_begin_down_graceful', 'im-state-reset':'im_state_reset', 'im-state-operational':'im_state_operational', 'im-state-not-operational':'im_state_not_operational', 'im-state-unknown':'im_state_unknown', 'im-state-last':'im_state_last', }, 'Cisco-IOS-XR-Ethernet-SPAN-oper', _yang_ns._namespaces['Cisco-IOS-XR-Ethernet-SPAN-oper']), 'DestinationClassEnum' : _MetaInfoEnum('DestinationClassEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', { 'interface-class':'interface_class', 'pseudowire-class':'pseudowire_class', 'next-hop-ipv4-class':'next_hop_ipv4_class', 'next-hop-ipv6-class':'next_hop_ipv6_class', 'invalid-class':'invalid_class', }, 'Cisco-IOS-XR-Ethernet-SPAN-oper', _yang_ns._namespaces['Cisco-IOS-XR-Ethernet-SPAN-oper']), 'TrafficDirectionEnum' : _MetaInfoEnum('TrafficDirectionEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', { 'invalid':'invalid', 'rx-only':'rx_only', 'tx-only':'tx_only', 'both':'both', }, 'Cisco-IOS-XR-Ethernet-SPAN-oper', _yang_ns._namespaces['Cisco-IOS-XR-Ethernet-SPAN-oper']), 'MirrorIntervalEnum' : _MetaInfoEnum('MirrorIntervalEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', { 'mirror-interval-all':'mirror_interval_all', 'mirror-interval512':'mirror_interval512', 'mirror-interval1k':'mirror_interval1k', 'mirror-interval2k':'mirror_interval2k', 'mirror-interval4k':'mirror_interval4k', 'mirror-interval8k':'mirror_interval8k', 'mirror-interval16k':'mirror_interval16k', }, 'Cisco-IOS-XR-Ethernet-SPAN-oper', _yang_ns._namespaces['Cisco-IOS-XR-Ethernet-SPAN-oper']), 'SpanMonitorSession.Global_.Statistics.Statistic' : { 'meta_info' : _MetaInfoClass('SpanMonitorSession.Global_.Statistics.Statistic', False, [ _MetaInfoClassMember('session', ATTRIBUTE, 'str' , None, None, [(1, 79)], [], ''' Session Name ''', 'session', 'Cisco-IOS-XR-Ethernet-SPAN-oper', True), _MetaInfoClassMember('interface', ATTRIBUTE, 'str' , None, None, [], [b'(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'], ''' Interface ''', 'interface', 'Cisco-IOS-XR-Ethernet-SPAN-oper', True), _MetaInfoClassMember('octets-not-mirrored', ATTRIBUTE, 'int' , None, None, [('0', '18446744073709551615')], [], ''' Octets Not Mirrored ''', 'octets_not_mirrored', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('packets-not-mirrored', ATTRIBUTE, 'int' , None, None, [('0', '18446744073709551615')], [], ''' Packets Not Mirrored ''', 'packets_not_mirrored', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('rx-octets-mirrored', ATTRIBUTE, 'int' , None, None, [('0', '18446744073709551615')], [], ''' RX Octets Mirrored ''', 'rx_octets_mirrored', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('rx-packets-mirrored', ATTRIBUTE, 'int' , None, None, [('0', '18446744073709551615')], [], ''' RX Packets Mirrored ''', 'rx_packets_mirrored', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('tx-octets-mirrored', ATTRIBUTE, 'int' , None, None, [('0', '18446744073709551615')], [], ''' TX Octets Mirrored ''', 'tx_octets_mirrored', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('tx-packets-mirrored', ATTRIBUTE, 'int' , None, None, [('0', '18446744073709551615')], [], ''' TX Packets Mirrored ''', 'tx_packets_mirrored', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), ], 'Cisco-IOS-XR-Ethernet-SPAN-oper', 'statistic', _yang_ns._namespaces['Cisco-IOS-XR-Ethernet-SPAN-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper' ), }, 'SpanMonitorSession.Global_.Statistics' : { 'meta_info' : _MetaInfoClass('SpanMonitorSession.Global_.Statistics', False, [ _MetaInfoClassMember('statistic', REFERENCE_LIST, 'Statistic' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'SpanMonitorSession.Global_.Statistics.Statistic', [], [], ''' Statistics for a particular source interface ''', 'statistic', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), ], 'Cisco-IOS-XR-Ethernet-SPAN-oper', 'statistics', _yang_ns._namespaces['Cisco-IOS-XR-Ethernet-SPAN-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper' ), }, 'SpanMonitorSession.Global_.GlobalSessions.GlobalSession.DestinationData.InterfaceData' : { 'meta_info' : _MetaInfoClass('SpanMonitorSession.Global_.GlobalSessions.GlobalSession.DestinationData.InterfaceData', False, [ _MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None, [], [], ''' Interface Name ''', 'interface_name', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('interface-state', REFERENCE_ENUM_CLASS, 'ImStateEnumEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'ImStateEnumEnum', [], [], ''' Interface State ''', 'interface_state', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), ], 'Cisco-IOS-XR-Ethernet-SPAN-oper', 'interface-data', _yang_ns._namespaces['Cisco-IOS-XR-Ethernet-SPAN-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper' ), }, 'SpanMonitorSession.Global_.GlobalSessions.GlobalSession.DestinationData.PseudowireData' : { 'meta_info' : _MetaInfoClass('SpanMonitorSession.Global_.GlobalSessions.GlobalSession.DestinationData.PseudowireData', False, [ _MetaInfoClassMember('pseudowire-is-up', ATTRIBUTE, 'bool' , None, None, [], [], ''' Pseudowire State ''', 'pseudowire_is_up', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('pseudowire-name', ATTRIBUTE, 'str' , None, None, [], [], ''' Pseudowire Name ''', 'pseudowire_name', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), ], 'Cisco-IOS-XR-Ethernet-SPAN-oper', 'pseudowire-data', _yang_ns._namespaces['Cisco-IOS-XR-Ethernet-SPAN-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper' ), }, 'SpanMonitorSession.Global_.GlobalSessions.GlobalSession.DestinationData.NextHopIpv4Data' : { 'meta_info' : _MetaInfoClass('SpanMonitorSession.Global_.GlobalSessions.GlobalSession.DestinationData.NextHopIpv4Data', False, [ _MetaInfoClassMember('address-is-reachable', ATTRIBUTE, 'bool' , None, None, [], [], ''' Address is reachable ''', 'address_is_reachable', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('ipv4-address', ATTRIBUTE, 'str' , None, None, [], [b'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'], ''' IPv4 address ''', 'ipv4_address', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('vrf-name', ATTRIBUTE, 'str' , None, None, [], [], ''' VRF name ''', 'vrf_name', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), ], 'Cisco-IOS-XR-Ethernet-SPAN-oper', 'next-hop-ipv4-data', _yang_ns._namespaces['Cisco-IOS-XR-Ethernet-SPAN-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper' ), }, 'SpanMonitorSession.Global_.GlobalSessions.GlobalSession.DestinationData.NextHopIpv6Data' : { 'meta_info' : _MetaInfoClass('SpanMonitorSession.Global_.GlobalSessions.GlobalSession.DestinationData.NextHopIpv6Data', False, [ _MetaInfoClassMember('address-is-reachable', ATTRIBUTE, 'bool' , None, None, [], [], ''' Address is reachable ''', 'address_is_reachable', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('ipv6-address', ATTRIBUTE, 'str' , None, None, [], [b'((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\p{N}\\p{L}]+)?'], ''' IPv6 address ''', 'ipv6_address', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('vrf-name', ATTRIBUTE, 'str' , None, None, [], [], ''' VRF name ''', 'vrf_name', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), ], 'Cisco-IOS-XR-Ethernet-SPAN-oper', 'next-hop-ipv6-data', _yang_ns._namespaces['Cisco-IOS-XR-Ethernet-SPAN-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper' ), }, 'SpanMonitorSession.Global_.GlobalSessions.GlobalSession.DestinationData' : { 'meta_info' : _MetaInfoClass('SpanMonitorSession.Global_.GlobalSessions.GlobalSession.DestinationData', False, [ _MetaInfoClassMember('destination-class', REFERENCE_ENUM_CLASS, 'DestinationClassEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'DestinationClassEnum', [], [], ''' DestinationClass ''', 'destination_class', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('interface-data', REFERENCE_CLASS, 'InterfaceData' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'SpanMonitorSession.Global_.GlobalSessions.GlobalSession.DestinationData.InterfaceData', [], [], ''' Interface data ''', 'interface_data', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('invalid-value', ATTRIBUTE, 'int' , None, None, [('0', '4294967295')], [], ''' Invalid Parameter ''', 'invalid_value', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('next-hop-ipv4-data', REFERENCE_CLASS, 'NextHopIpv4Data' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'SpanMonitorSession.Global_.GlobalSessions.GlobalSession.DestinationData.NextHopIpv4Data', [], [], ''' Next-hop IPv4 data ''', 'next_hop_ipv4_data', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('next-hop-ipv6-data', REFERENCE_CLASS, 'NextHopIpv6Data' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'SpanMonitorSession.Global_.GlobalSessions.GlobalSession.DestinationData.NextHopIpv6Data', [], [], ''' Next-hop IPv6 data ''', 'next_hop_ipv6_data', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('pseudowire-data', REFERENCE_CLASS, 'PseudowireData' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'SpanMonitorSession.Global_.GlobalSessions.GlobalSession.DestinationData.PseudowireData', [], [], ''' Pseudowire data ''', 'pseudowire_data', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), ], 'Cisco-IOS-XR-Ethernet-SPAN-oper', 'destination-data', _yang_ns._namespaces['Cisco-IOS-XR-Ethernet-SPAN-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper' ), }, 'SpanMonitorSession.Global_.GlobalSessions.GlobalSession.DestinationId.Ipv4AddressAndVrf' : { 'meta_info' : _MetaInfoClass('SpanMonitorSession.Global_.GlobalSessions.GlobalSession.DestinationId.Ipv4AddressAndVrf', False, [ _MetaInfoClassMember('ipv4-address', ATTRIBUTE, 'str' , None, None, [], [b'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'], ''' IPv4 address ''', 'ipv4_address', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('vrf-name', ATTRIBUTE, 'str' , None, None, [], [], ''' VRF ''', 'vrf_name', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), ], 'Cisco-IOS-XR-Ethernet-SPAN-oper', 'ipv4-address-and-vrf', _yang_ns._namespaces['Cisco-IOS-XR-Ethernet-SPAN-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper' ), }, 'SpanMonitorSession.Global_.GlobalSessions.GlobalSession.DestinationId.Ipv6AddressAndVrf' : { 'meta_info' : _MetaInfoClass('SpanMonitorSession.Global_.GlobalSessions.GlobalSession.DestinationId.Ipv6AddressAndVrf', False, [ _MetaInfoClassMember('ipv6-address', ATTRIBUTE, 'str' , None, None, [], [b'((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\p{N}\\p{L}]+)?'], ''' IPv6 address ''', 'ipv6_address', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('vrf-name', ATTRIBUTE, 'str' , None, None, [], [], ''' VRF ''', 'vrf_name', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), ], 'Cisco-IOS-XR-Ethernet-SPAN-oper', 'ipv6-address-and-vrf', _yang_ns._namespaces['Cisco-IOS-XR-Ethernet-SPAN-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper' ), }, 'SpanMonitorSession.Global_.GlobalSessions.GlobalSession.DestinationId' : { 'meta_info' : _MetaInfoClass('SpanMonitorSession.Global_.GlobalSessions.GlobalSession.DestinationId', False, [ _MetaInfoClassMember('destination-class', REFERENCE_ENUM_CLASS, 'DestinationClassEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'DestinationClassEnum', [], [], ''' DestinationClass ''', 'destination_class', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('interface', ATTRIBUTE, 'str' , None, None, [], [b'(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'], ''' Interface Handle ''', 'interface', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('invalid-value', ATTRIBUTE, 'int' , None, None, [('0', '4294967295')], [], ''' Invalid Parameter ''', 'invalid_value', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('ipv4-address-and-vrf', REFERENCE_CLASS, 'Ipv4AddressAndVrf' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'SpanMonitorSession.Global_.GlobalSessions.GlobalSession.DestinationId.Ipv4AddressAndVrf', [], [], ''' IPv4 address ''', 'ipv4_address_and_vrf', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('ipv6-address-and-vrf', REFERENCE_CLASS, 'Ipv6AddressAndVrf' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'SpanMonitorSession.Global_.GlobalSessions.GlobalSession.DestinationId.Ipv6AddressAndVrf', [], [], ''' IPv6 address ''', 'ipv6_address_and_vrf', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('pseudowire-id', ATTRIBUTE, 'int' , None, None, [('0', '4294967295')], [], ''' Pseudowire XCID ''', 'pseudowire_id', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), ], 'Cisco-IOS-XR-Ethernet-SPAN-oper', 'destination-id', _yang_ns._namespaces['Cisco-IOS-XR-Ethernet-SPAN-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper' ), }, 'SpanMonitorSession.Global_.GlobalSessions.GlobalSession' : { 'meta_info' : _MetaInfoClass('SpanMonitorSession.Global_.GlobalSessions.GlobalSession', False, [ _MetaInfoClassMember('session', ATTRIBUTE, 'str' , None, None, [(1, 79)], [], ''' Session Name ''', 'session', 'Cisco-IOS-XR-Ethernet-SPAN-oper', True), _MetaInfoClassMember('destination-data', REFERENCE_CLASS, 'DestinationData' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'SpanMonitorSession.Global_.GlobalSessions.GlobalSession.DestinationData', [], [], ''' Destination data ''', 'destination_data', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('destination-error', ATTRIBUTE, 'int' , None, None, [('0', '4294967295')], [], ''' Last error observed for the destination ''', 'destination_error', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('destination-id', REFERENCE_CLASS, 'DestinationId' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'SpanMonitorSession.Global_.GlobalSessions.GlobalSession.DestinationId', [], [], ''' Destination ID ''', 'destination_id', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('destination-interface-handle', ATTRIBUTE, 'str' , None, None, [], [b'(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'], ''' Destination interface handle (deprecated by DestinationID, invalid for pseudowires) ''', 'destination_interface_handle', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('destination-interface-name', ATTRIBUTE, 'str' , None, None, [], [], ''' Destination interface name (deprecated by DestinationData, invalid for pseudowires) ''', 'destination_interface_name', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('id', ATTRIBUTE, 'int' , None, None, [('0', '4294967295')], [], ''' Numerical ID assigned to session ''', 'id', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('interface-error', ATTRIBUTE, 'int' , None, None, [('0', '4294967295')], [], ''' Last error observed for the destination interface (deprecated by DestinationError) ''', 'interface_error', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None, [], [], ''' Session Name ''', 'name', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('session-class', REFERENCE_ENUM_CLASS, 'SessionClassEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'SessionClassEnum', [], [], ''' Session class ''', 'session_class', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), ], 'Cisco-IOS-XR-Ethernet-SPAN-oper', 'global-session', _yang_ns._namespaces['Cisco-IOS-XR-Ethernet-SPAN-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper' ), }, 'SpanMonitorSession.Global_.GlobalSessions' : { 'meta_info' : _MetaInfoClass('SpanMonitorSession.Global_.GlobalSessions', False, [ _MetaInfoClassMember('global-session', REFERENCE_LIST, 'GlobalSession' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'SpanMonitorSession.Global_.GlobalSessions.GlobalSession', [], [], ''' Information about a globally-configured monitor session ''', 'global_session', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), ], 'Cisco-IOS-XR-Ethernet-SPAN-oper', 'global-sessions', _yang_ns._namespaces['Cisco-IOS-XR-Ethernet-SPAN-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper' ), }, 'SpanMonitorSession.Global_' : { 'meta_info' : _MetaInfoClass('SpanMonitorSession.Global_', False, [ _MetaInfoClassMember('global-sessions', REFERENCE_CLASS, 'GlobalSessions' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'SpanMonitorSession.Global_.GlobalSessions', [], [], ''' Global Monitor Sessions table ''', 'global_sessions', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('statistics', REFERENCE_CLASS, 'Statistics' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'SpanMonitorSession.Global_.Statistics', [], [], ''' Table of statistics for source interfaces ''', 'statistics', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), ], 'Cisco-IOS-XR-Ethernet-SPAN-oper', 'global', _yang_ns._namespaces['Cisco-IOS-XR-Ethernet-SPAN-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper' ), }, 'SpanMonitorSession.Nodes.Node.Attachments.Attachment.TrafficParameters' : { 'meta_info' : _MetaInfoClass('SpanMonitorSession.Nodes.Node.Attachments.Attachment.TrafficParameters', False, [ _MetaInfoClassMember('is-acl-enabled', ATTRIBUTE, 'bool' , None, None, [], [], ''' ACL enabled ''', 'is_acl_enabled', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('mirror-bytes', ATTRIBUTE, 'int' , None, None, [('0', '4294967295')], [], ''' Number of bytes to mirror ''', 'mirror_bytes', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('mirror-interval', REFERENCE_ENUM_CLASS, 'MirrorIntervalEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'MirrorIntervalEnum', [], [], ''' Interval between mirrored packets ''', 'mirror_interval', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('port-level', ATTRIBUTE, 'bool' , None, None, [], [], ''' Port level mirroring ''', 'port_level', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('traffic-direction', REFERENCE_ENUM_CLASS, 'TrafficDirectionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'TrafficDirectionEnum', [], [], ''' Direction ''', 'traffic_direction', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), ], 'Cisco-IOS-XR-Ethernet-SPAN-oper', 'traffic-parameters', _yang_ns._namespaces['Cisco-IOS-XR-Ethernet-SPAN-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper' ), }, 'SpanMonitorSession.Nodes.Node.Attachments.Attachment.DestinationId.Ipv4AddressAndVrf' : { 'meta_info' : _MetaInfoClass('SpanMonitorSession.Nodes.Node.Attachments.Attachment.DestinationId.Ipv4AddressAndVrf', False, [ _MetaInfoClassMember('ipv4-address', ATTRIBUTE, 'str' , None, None, [], [b'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'], ''' IPv4 address ''', 'ipv4_address', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('vrf-name', ATTRIBUTE, 'str' , None, None, [], [], ''' VRF ''', 'vrf_name', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), ], 'Cisco-IOS-XR-Ethernet-SPAN-oper', 'ipv4-address-and-vrf', _yang_ns._namespaces['Cisco-IOS-XR-Ethernet-SPAN-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper' ), }, 'SpanMonitorSession.Nodes.Node.Attachments.Attachment.DestinationId.Ipv6AddressAndVrf' : { 'meta_info' : _MetaInfoClass('SpanMonitorSession.Nodes.Node.Attachments.Attachment.DestinationId.Ipv6AddressAndVrf', False, [ _MetaInfoClassMember('ipv6-address', ATTRIBUTE, 'str' , None, None, [], [b'((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\p{N}\\p{L}]+)?'], ''' IPv6 address ''', 'ipv6_address', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('vrf-name', ATTRIBUTE, 'str' , None, None, [], [], ''' VRF ''', 'vrf_name', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), ], 'Cisco-IOS-XR-Ethernet-SPAN-oper', 'ipv6-address-and-vrf', _yang_ns._namespaces['Cisco-IOS-XR-Ethernet-SPAN-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper' ), }, 'SpanMonitorSession.Nodes.Node.Attachments.Attachment.DestinationId' : { 'meta_info' : _MetaInfoClass('SpanMonitorSession.Nodes.Node.Attachments.Attachment.DestinationId', False, [ _MetaInfoClassMember('destination-class', REFERENCE_ENUM_CLASS, 'DestinationClassEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'DestinationClassEnum', [], [], ''' DestinationClass ''', 'destination_class', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('interface', ATTRIBUTE, 'str' , None, None, [], [b'(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'], ''' Interface Handle ''', 'interface', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('invalid-value', ATTRIBUTE, 'int' , None, None, [('0', '4294967295')], [], ''' Invalid Parameter ''', 'invalid_value', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('ipv4-address-and-vrf', REFERENCE_CLASS, 'Ipv4AddressAndVrf' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'SpanMonitorSession.Nodes.Node.Attachments.Attachment.DestinationId.Ipv4AddressAndVrf', [], [], ''' IPv4 address ''', 'ipv4_address_and_vrf', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('ipv6-address-and-vrf', REFERENCE_CLASS, 'Ipv6AddressAndVrf' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'SpanMonitorSession.Nodes.Node.Attachments.Attachment.DestinationId.Ipv6AddressAndVrf', [], [], ''' IPv6 address ''', 'ipv6_address_and_vrf', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('pseudowire-id', ATTRIBUTE, 'int' , None, None, [('0', '4294967295')], [], ''' Pseudowire XCID ''', 'pseudowire_id', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), ], 'Cisco-IOS-XR-Ethernet-SPAN-oper', 'destination-id', _yang_ns._namespaces['Cisco-IOS-XR-Ethernet-SPAN-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper' ), }, 'SpanMonitorSession.Nodes.Node.Attachments.Attachment' : { 'meta_info' : _MetaInfoClass('SpanMonitorSession.Nodes.Node.Attachments.Attachment', False, [ _MetaInfoClassMember('session', ATTRIBUTE, 'str' , None, None, [(1, 79)], [], ''' Session Name ''', 'session', 'Cisco-IOS-XR-Ethernet-SPAN-oper', True), _MetaInfoClassMember('interface', ATTRIBUTE, 'str' , None, None, [], [b'(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'], ''' Interface ''', 'interface', 'Cisco-IOS-XR-Ethernet-SPAN-oper', True), _MetaInfoClassMember('dest-pw-type-not-supported', ATTRIBUTE, 'bool' , None, None, [], [], ''' The destination PW type is not supported ''', 'dest_pw_type_not_supported', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('destination-id', REFERENCE_CLASS, 'DestinationId' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'SpanMonitorSession.Nodes.Node.Attachments.Attachment.DestinationId', [], [], ''' Destination ID ''', 'destination_id', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('destination-interface', ATTRIBUTE, 'str' , None, None, [], [b'(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'], ''' Destination interface (deprecated by DestinationID, invalid for pseudowires) ''', 'destination_interface', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('global-class', REFERENCE_ENUM_CLASS, 'SessionClassEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'SessionClassEnum', [], [], ''' Global session class ''', 'global_class', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('id', ATTRIBUTE, 'int' , None, None, [('0', '4294967295')], [], ''' Numerical ID assigned to session ''', 'id', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('local-class', REFERENCE_ENUM_CLASS, 'SessionClassEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'SessionClassEnum', [], [], ''' Local attachment class ''', 'local_class', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None, [], [], ''' Session Name ''', 'name', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('pfi-error', ATTRIBUTE, 'int' , None, None, [('0', '4294967295')], [], ''' Last error returned from PFI for this interface ''', 'pfi_error', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('session-is-configured', ATTRIBUTE, 'bool' , None, None, [], [], ''' The Session is configured globally ''', 'session_is_configured', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('source-interface', ATTRIBUTE, 'str' , None, None, [], [b'(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'], ''' Source interface ''', 'source_interface', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('source-interface-is-a-destination', ATTRIBUTE, 'bool' , None, None, [], [], ''' This source interface is a destination for another monitor-session ''', 'source_interface_is_a_destination', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('source-interface-state', REFERENCE_ENUM_CLASS, 'ImStateEnumEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'ImStateEnumEnum', [], [], ''' Source interface state ''', 'source_interface_state', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('traffic-direction', REFERENCE_ENUM_CLASS, 'TrafficDirectionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'TrafficDirectionEnum', [], [], ''' Traffic mirroring direction (deprecated by TrafficParameters) ''', 'traffic_direction', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('traffic-parameters', REFERENCE_CLASS, 'TrafficParameters' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'SpanMonitorSession.Nodes.Node.Attachments.Attachment.TrafficParameters', [], [], ''' Traffic mirroring parameters ''', 'traffic_parameters', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), ], 'Cisco-IOS-XR-Ethernet-SPAN-oper', 'attachment', _yang_ns._namespaces['Cisco-IOS-XR-Ethernet-SPAN-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper' ), }, 'SpanMonitorSession.Nodes.Node.Attachments' : { 'meta_info' : _MetaInfoClass('SpanMonitorSession.Nodes.Node.Attachments', False, [ _MetaInfoClassMember('attachment', REFERENCE_LIST, 'Attachment' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'SpanMonitorSession.Nodes.Node.Attachments.Attachment', [], [], ''' Information about a particular source interface configured as attached to monitor session ''', 'attachment', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), ], 'Cisco-IOS-XR-Ethernet-SPAN-oper', 'attachments', _yang_ns._namespaces['Cisco-IOS-XR-Ethernet-SPAN-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper' ), }, 'SpanMonitorSession.Nodes.Node.HardwareSessions.HardwareSession.DestinationId.Ipv4AddressAndVrf' : { 'meta_info' : _MetaInfoClass('SpanMonitorSession.Nodes.Node.HardwareSessions.HardwareSession.DestinationId.Ipv4AddressAndVrf', False, [ _MetaInfoClassMember('ipv4-address', ATTRIBUTE, 'str' , None, None, [], [b'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'], ''' IPv4 address ''', 'ipv4_address', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('vrf-name', ATTRIBUTE, 'str' , None, None, [], [], ''' VRF ''', 'vrf_name', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), ], 'Cisco-IOS-XR-Ethernet-SPAN-oper', 'ipv4-address-and-vrf', _yang_ns._namespaces['Cisco-IOS-XR-Ethernet-SPAN-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper' ), }, 'SpanMonitorSession.Nodes.Node.HardwareSessions.HardwareSession.DestinationId.Ipv6AddressAndVrf' : { 'meta_info' : _MetaInfoClass('SpanMonitorSession.Nodes.Node.HardwareSessions.HardwareSession.DestinationId.Ipv6AddressAndVrf', False, [ _MetaInfoClassMember('ipv6-address', ATTRIBUTE, 'str' , None, None, [], [b'((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\p{N}\\p{L}]+)?'], ''' IPv6 address ''', 'ipv6_address', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('vrf-name', ATTRIBUTE, 'str' , None, None, [], [], ''' VRF ''', 'vrf_name', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), ], 'Cisco-IOS-XR-Ethernet-SPAN-oper', 'ipv6-address-and-vrf', _yang_ns._namespaces['Cisco-IOS-XR-Ethernet-SPAN-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper' ), }, 'SpanMonitorSession.Nodes.Node.HardwareSessions.HardwareSession.DestinationId' : { 'meta_info' : _MetaInfoClass('SpanMonitorSession.Nodes.Node.HardwareSessions.HardwareSession.DestinationId', False, [ _MetaInfoClassMember('destination-class', REFERENCE_ENUM_CLASS, 'DestinationClassEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'DestinationClassEnum', [], [], ''' DestinationClass ''', 'destination_class', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('interface', ATTRIBUTE, 'str' , None, None, [], [b'(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'], ''' Interface Handle ''', 'interface', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('invalid-value', ATTRIBUTE, 'int' , None, None, [('0', '4294967295')], [], ''' Invalid Parameter ''', 'invalid_value', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('ipv4-address-and-vrf', REFERENCE_CLASS, 'Ipv4AddressAndVrf' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'SpanMonitorSession.Nodes.Node.HardwareSessions.HardwareSession.DestinationId.Ipv4AddressAndVrf', [], [], ''' IPv4 address ''', 'ipv4_address_and_vrf', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('ipv6-address-and-vrf', REFERENCE_CLASS, 'Ipv6AddressAndVrf' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'SpanMonitorSession.Nodes.Node.HardwareSessions.HardwareSession.DestinationId.Ipv6AddressAndVrf', [], [], ''' IPv6 address ''', 'ipv6_address_and_vrf', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('pseudowire-id', ATTRIBUTE, 'int' , None, None, [('0', '4294967295')], [], ''' Pseudowire XCID ''', 'pseudowire_id', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), ], 'Cisco-IOS-XR-Ethernet-SPAN-oper', 'destination-id', _yang_ns._namespaces['Cisco-IOS-XR-Ethernet-SPAN-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper' ), }, 'SpanMonitorSession.Nodes.Node.HardwareSessions.HardwareSession' : { 'meta_info' : _MetaInfoClass('SpanMonitorSession.Nodes.Node.HardwareSessions.HardwareSession', False, [ _MetaInfoClassMember('destination-id', REFERENCE_CLASS, 'DestinationId' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'SpanMonitorSession.Nodes.Node.HardwareSessions.HardwareSession.DestinationId', [], [], ''' Destination ID ''', 'destination_id', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('destination-interface', ATTRIBUTE, 'str' , None, None, [], [b'(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'], ''' Destination interface (deprecated by DestinationID, invalid for pseudowires) ''', 'destination_interface', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('id', ATTRIBUTE, 'int' , None, None, [('0', '4294967295')], [], ''' Assigned numerical ID for this session ''', 'id', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None, [], [], ''' Configured Session Name ''', 'name', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('platform-error', ATTRIBUTE, 'int' , None, None, [('0', '4294967295')], [], ''' Last error observed for this session while programming the hardware ''', 'platform_error', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('session-class', REFERENCE_ENUM_CLASS, 'SpanSessionClassEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_datatypes', 'SpanSessionClassEnum', [], [], ''' Sesssion class ''', 'session_class', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('session-class-xr', REFERENCE_ENUM_CLASS, 'SessionClassEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'SessionClassEnum', [], [], ''' Session class ''', 'session_class_xr', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('session-id', ATTRIBUTE, 'int' , None, None, [('-2147483648', '2147483647')], [], ''' Session ID ''', 'session_id', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), ], 'Cisco-IOS-XR-Ethernet-SPAN-oper', 'hardware-session', _yang_ns._namespaces['Cisco-IOS-XR-Ethernet-SPAN-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper' ), }, 'SpanMonitorSession.Nodes.Node.HardwareSessions' : { 'meta_info' : _MetaInfoClass('SpanMonitorSession.Nodes.Node.HardwareSessions', False, [ _MetaInfoClassMember('hardware-session', REFERENCE_LIST, 'HardwareSession' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'SpanMonitorSession.Nodes.Node.HardwareSessions.HardwareSession', [], [], ''' Information about a particular session that is set up in the hardware ''', 'hardware_session', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), ], 'Cisco-IOS-XR-Ethernet-SPAN-oper', 'hardware-sessions', _yang_ns._namespaces['Cisco-IOS-XR-Ethernet-SPAN-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper' ), }, 'SpanMonitorSession.Nodes.Node.Interfaces.Interface.DestinationId.Ipv4AddressAndVrf' : { 'meta_info' : _MetaInfoClass('SpanMonitorSession.Nodes.Node.Interfaces.Interface.DestinationId.Ipv4AddressAndVrf', False, [ _MetaInfoClassMember('ipv4-address', ATTRIBUTE, 'str' , None, None, [], [b'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'], ''' IPv4 address ''', 'ipv4_address', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('vrf-name', ATTRIBUTE, 'str' , None, None, [], [], ''' VRF ''', 'vrf_name', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), ], 'Cisco-IOS-XR-Ethernet-SPAN-oper', 'ipv4-address-and-vrf', _yang_ns._namespaces['Cisco-IOS-XR-Ethernet-SPAN-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper' ), }, 'SpanMonitorSession.Nodes.Node.Interfaces.Interface.DestinationId.Ipv6AddressAndVrf' : { 'meta_info' : _MetaInfoClass('SpanMonitorSession.Nodes.Node.Interfaces.Interface.DestinationId.Ipv6AddressAndVrf', False, [ _MetaInfoClassMember('ipv6-address', ATTRIBUTE, 'str' , None, None, [], [b'((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\p{N}\\p{L}]+)?'], ''' IPv6 address ''', 'ipv6_address', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('vrf-name', ATTRIBUTE, 'str' , None, None, [], [], ''' VRF ''', 'vrf_name', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), ], 'Cisco-IOS-XR-Ethernet-SPAN-oper', 'ipv6-address-and-vrf', _yang_ns._namespaces['Cisco-IOS-XR-Ethernet-SPAN-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper' ), }, 'SpanMonitorSession.Nodes.Node.Interfaces.Interface.DestinationId' : { 'meta_info' : _MetaInfoClass('SpanMonitorSession.Nodes.Node.Interfaces.Interface.DestinationId', False, [ _MetaInfoClassMember('destination-class', REFERENCE_ENUM_CLASS, 'DestinationClassEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'DestinationClassEnum', [], [], ''' DestinationClass ''', 'destination_class', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('interface', ATTRIBUTE, 'str' , None, None, [], [b'(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'], ''' Interface Handle ''', 'interface', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('invalid-value', ATTRIBUTE, 'int' , None, None, [('0', '4294967295')], [], ''' Invalid Parameter ''', 'invalid_value', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('ipv4-address-and-vrf', REFERENCE_CLASS, 'Ipv4AddressAndVrf' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'SpanMonitorSession.Nodes.Node.Interfaces.Interface.DestinationId.Ipv4AddressAndVrf', [], [], ''' IPv4 address ''', 'ipv4_address_and_vrf', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('ipv6-address-and-vrf', REFERENCE_CLASS, 'Ipv6AddressAndVrf' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'SpanMonitorSession.Nodes.Node.Interfaces.Interface.DestinationId.Ipv6AddressAndVrf', [], [], ''' IPv6 address ''', 'ipv6_address_and_vrf', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('pseudowire-id', ATTRIBUTE, 'int' , None, None, [('0', '4294967295')], [], ''' Pseudowire XCID ''', 'pseudowire_id', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), ], 'Cisco-IOS-XR-Ethernet-SPAN-oper', 'destination-id', _yang_ns._namespaces['Cisco-IOS-XR-Ethernet-SPAN-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper' ), }, 'SpanMonitorSession.Nodes.Node.Interfaces.Interface.TrafficMirroringParameters' : { 'meta_info' : _MetaInfoClass('SpanMonitorSession.Nodes.Node.Interfaces.Interface.TrafficMirroringParameters', False, [ _MetaInfoClassMember('is-acl-enabled', ATTRIBUTE, 'bool' , None, None, [], [], ''' ACL enabled ''', 'is_acl_enabled', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('mirror-bytes', ATTRIBUTE, 'int' , None, None, [('0', '4294967295')], [], ''' Number of bytes to mirror ''', 'mirror_bytes', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('mirror-interval', REFERENCE_ENUM_CLASS, 'MirrorIntervalEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'MirrorIntervalEnum', [], [], ''' Interval between mirrored packets ''', 'mirror_interval', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('port-level', ATTRIBUTE, 'bool' , None, None, [], [], ''' Port level mirroring ''', 'port_level', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('traffic-direction', REFERENCE_ENUM_CLASS, 'TrafficDirectionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'TrafficDirectionEnum', [], [], ''' Direction ''', 'traffic_direction', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), ], 'Cisco-IOS-XR-Ethernet-SPAN-oper', 'traffic-mirroring-parameters', _yang_ns._namespaces['Cisco-IOS-XR-Ethernet-SPAN-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper' ), }, 'SpanMonitorSession.Nodes.Node.Interfaces.Interface.Attachment.DestinationId.Ipv4AddressAndVrf' : { 'meta_info' : _MetaInfoClass('SpanMonitorSession.Nodes.Node.Interfaces.Interface.Attachment.DestinationId.Ipv4AddressAndVrf', False, [ _MetaInfoClassMember('ipv4-address', ATTRIBUTE, 'str' , None, None, [], [b'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'], ''' IPv4 address ''', 'ipv4_address', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('vrf-name', ATTRIBUTE, 'str' , None, None, [], [], ''' VRF ''', 'vrf_name', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), ], 'Cisco-IOS-XR-Ethernet-SPAN-oper', 'ipv4-address-and-vrf', _yang_ns._namespaces['Cisco-IOS-XR-Ethernet-SPAN-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper' ), }, 'SpanMonitorSession.Nodes.Node.Interfaces.Interface.Attachment.DestinationId.Ipv6AddressAndVrf' : { 'meta_info' : _MetaInfoClass('SpanMonitorSession.Nodes.Node.Interfaces.Interface.Attachment.DestinationId.Ipv6AddressAndVrf', False, [ _MetaInfoClassMember('ipv6-address', ATTRIBUTE, 'str' , None, None, [], [b'((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\p{N}\\p{L}]+)?'], ''' IPv6 address ''', 'ipv6_address', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('vrf-name', ATTRIBUTE, 'str' , None, None, [], [], ''' VRF ''', 'vrf_name', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), ], 'Cisco-IOS-XR-Ethernet-SPAN-oper', 'ipv6-address-and-vrf', _yang_ns._namespaces['Cisco-IOS-XR-Ethernet-SPAN-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper' ), }, 'SpanMonitorSession.Nodes.Node.Interfaces.Interface.Attachment.DestinationId' : { 'meta_info' : _MetaInfoClass('SpanMonitorSession.Nodes.Node.Interfaces.Interface.Attachment.DestinationId', False, [ _MetaInfoClassMember('destination-class', REFERENCE_ENUM_CLASS, 'DestinationClassEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'DestinationClassEnum', [], [], ''' DestinationClass ''', 'destination_class', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('interface', ATTRIBUTE, 'str' , None, None, [], [b'(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'], ''' Interface Handle ''', 'interface', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('invalid-value', ATTRIBUTE, 'int' , None, None, [('0', '4294967295')], [], ''' Invalid Parameter ''', 'invalid_value', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('ipv4-address-and-vrf', REFERENCE_CLASS, 'Ipv4AddressAndVrf' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'SpanMonitorSession.Nodes.Node.Interfaces.Interface.Attachment.DestinationId.Ipv4AddressAndVrf', [], [], ''' IPv4 address ''', 'ipv4_address_and_vrf', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('ipv6-address-and-vrf', REFERENCE_CLASS, 'Ipv6AddressAndVrf' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'SpanMonitorSession.Nodes.Node.Interfaces.Interface.Attachment.DestinationId.Ipv6AddressAndVrf', [], [], ''' IPv6 address ''', 'ipv6_address_and_vrf', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('pseudowire-id', ATTRIBUTE, 'int' , None, None, [('0', '4294967295')], [], ''' Pseudowire XCID ''', 'pseudowire_id', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), ], 'Cisco-IOS-XR-Ethernet-SPAN-oper', 'destination-id', _yang_ns._namespaces['Cisco-IOS-XR-Ethernet-SPAN-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper' ), }, 'SpanMonitorSession.Nodes.Node.Interfaces.Interface.Attachment.TrafficMirroringParameters' : { 'meta_info' : _MetaInfoClass('SpanMonitorSession.Nodes.Node.Interfaces.Interface.Attachment.TrafficMirroringParameters', False, [ _MetaInfoClassMember('is-acl-enabled', ATTRIBUTE, 'bool' , None, None, [], [], ''' ACL enabled ''', 'is_acl_enabled', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('mirror-bytes', ATTRIBUTE, 'int' , None, None, [('0', '4294967295')], [], ''' Number of bytes to mirror ''', 'mirror_bytes', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('mirror-interval', REFERENCE_ENUM_CLASS, 'MirrorIntervalEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'MirrorIntervalEnum', [], [], ''' Interval between mirrored packets ''', 'mirror_interval', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('port-level', ATTRIBUTE, 'bool' , None, None, [], [], ''' Port level mirroring ''', 'port_level', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('traffic-direction', REFERENCE_ENUM_CLASS, 'TrafficDirectionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'TrafficDirectionEnum', [], [], ''' Direction ''', 'traffic_direction', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), ], 'Cisco-IOS-XR-Ethernet-SPAN-oper', 'traffic-mirroring-parameters', _yang_ns._namespaces['Cisco-IOS-XR-Ethernet-SPAN-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper' ), }, 'SpanMonitorSession.Nodes.Node.Interfaces.Interface.Attachment' : { 'meta_info' : _MetaInfoClass('SpanMonitorSession.Nodes.Node.Interfaces.Interface.Attachment', False, [ _MetaInfoClassMember('class', REFERENCE_ENUM_CLASS, 'SessionClassEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'SessionClassEnum', [], [], ''' Attachment class ''', 'class_', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('destination-id', REFERENCE_CLASS, 'DestinationId' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'SpanMonitorSession.Nodes.Node.Interfaces.Interface.Attachment.DestinationId', [], [], ''' Destination ID ''', 'destination_id', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('traffic-mirroring-parameters', REFERENCE_CLASS, 'TrafficMirroringParameters' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'SpanMonitorSession.Nodes.Node.Interfaces.Interface.Attachment.TrafficMirroringParameters', [], [], ''' Traffic mirroring parameters ''', 'traffic_mirroring_parameters', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), ], 'Cisco-IOS-XR-Ethernet-SPAN-oper', 'attachment', _yang_ns._namespaces['Cisco-IOS-XR-Ethernet-SPAN-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper' ), }, 'SpanMonitorSession.Nodes.Node.Interfaces.Interface' : { 'meta_info' : _MetaInfoClass('SpanMonitorSession.Nodes.Node.Interfaces.Interface', False, [ _MetaInfoClassMember('interface', ATTRIBUTE, 'str' , None, None, [], [b'(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'], ''' Interface ''', 'interface', 'Cisco-IOS-XR-Ethernet-SPAN-oper', True), _MetaInfoClassMember('attachment', REFERENCE_LIST, 'Attachment' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'SpanMonitorSession.Nodes.Node.Interfaces.Interface.Attachment', [], [], ''' Attachment information ''', 'attachment', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('destination-id', REFERENCE_CLASS, 'DestinationId' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'SpanMonitorSession.Nodes.Node.Interfaces.Interface.DestinationId', [], [], ''' Destination ID (deprecated by Attachment) ''', 'destination_id', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('destination-interface', ATTRIBUTE, 'str' , None, None, [], [b'(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'], ''' Destination interface (deprecated by Attachment) ''', 'destination_interface', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('platform-error', ATTRIBUTE, 'int' , None, None, [('0', '4294967295')], [], ''' Last error observed for this interface while programming the hardware ''', 'platform_error', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('source-interface', ATTRIBUTE, 'str' , None, None, [], [b'(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'], ''' Source interface ''', 'source_interface', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('traffic-direction', REFERENCE_ENUM_CLASS, 'TrafficDirectionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'TrafficDirectionEnum', [], [], ''' Traffic mirroring direction (deprecated by Attachment) ''', 'traffic_direction', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('traffic-mirroring-parameters', REFERENCE_CLASS, 'TrafficMirroringParameters' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'SpanMonitorSession.Nodes.Node.Interfaces.Interface.TrafficMirroringParameters', [], [], ''' Traffic mirroring parameters (deprecated by Attachment) ''', 'traffic_mirroring_parameters', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), ], 'Cisco-IOS-XR-Ethernet-SPAN-oper', 'interface', _yang_ns._namespaces['Cisco-IOS-XR-Ethernet-SPAN-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper' ), }, 'SpanMonitorSession.Nodes.Node.Interfaces' : { 'meta_info' : _MetaInfoClass('SpanMonitorSession.Nodes.Node.Interfaces', False, [ _MetaInfoClassMember('interface', REFERENCE_LIST, 'Interface' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'SpanMonitorSession.Nodes.Node.Interfaces.Interface', [], [], ''' Information about a particular interface that is set up in the hardware ''', 'interface', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), ], 'Cisco-IOS-XR-Ethernet-SPAN-oper', 'interfaces', _yang_ns._namespaces['Cisco-IOS-XR-Ethernet-SPAN-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper' ), }, 'SpanMonitorSession.Nodes.Node' : { 'meta_info' : _MetaInfoClass('SpanMonitorSession.Nodes.Node', False, [ _MetaInfoClassMember('node', ATTRIBUTE, 'str' , None, None, [], [b'([a-zA-Z0-9_]*\\d+/){1,2}([a-zA-Z0-9_]*\\d+)'], ''' Node ''', 'node', 'Cisco-IOS-XR-Ethernet-SPAN-oper', True), _MetaInfoClassMember('attachments', REFERENCE_CLASS, 'Attachments' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'SpanMonitorSession.Nodes.Node.Attachments', [], [], ''' Table of source interfaces configured as attached to a session ''', 'attachments', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('hardware-sessions', REFERENCE_CLASS, 'HardwareSessions' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'SpanMonitorSession.Nodes.Node.HardwareSessions', [], [], ''' Table of sessions set up in the hardware. When all sessions are operating correctly the entries in this table should match those entries in GlobalSessionTable that have a destination configured ''', 'hardware_sessions', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('interfaces', REFERENCE_CLASS, 'Interfaces' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'SpanMonitorSession.Nodes.Node.Interfaces', [], [], ''' Table of source interfaces set up in the hardware. The entries in this table should match the entries in AttachmentTable when all sessions are operating correctly ''', 'interfaces', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), ], 'Cisco-IOS-XR-Ethernet-SPAN-oper', 'node', _yang_ns._namespaces['Cisco-IOS-XR-Ethernet-SPAN-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper' ), }, 'SpanMonitorSession.Nodes' : { 'meta_info' : _MetaInfoClass('SpanMonitorSession.Nodes', False, [ _MetaInfoClassMember('node', REFERENCE_LIST, 'Node' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'SpanMonitorSession.Nodes.Node', [], [], ''' Node-specific data for a particular node ''', 'node', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), ], 'Cisco-IOS-XR-Ethernet-SPAN-oper', 'nodes', _yang_ns._namespaces['Cisco-IOS-XR-Ethernet-SPAN-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper' ), }, 'SpanMonitorSession' : { 'meta_info' : _MetaInfoClass('SpanMonitorSession', False, [ _MetaInfoClassMember('global', REFERENCE_CLASS, 'Global_' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'SpanMonitorSession.Global_', [], [], ''' Global operational data ''', 'global_', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), _MetaInfoClassMember('nodes', REFERENCE_CLASS, 'Nodes' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper', 'SpanMonitorSession.Nodes', [], [], ''' Node table for node-specific operational data ''', 'nodes', 'Cisco-IOS-XR-Ethernet-SPAN-oper', False), ], 'Cisco-IOS-XR-Ethernet-SPAN-oper', 'span-monitor-session', _yang_ns._namespaces['Cisco-IOS-XR-Ethernet-SPAN-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_oper' ), }, } _meta_table['SpanMonitorSession.Global_.Statistics.Statistic']['meta_info'].parent =_meta_table['SpanMonitorSession.Global_.Statistics']['meta_info'] _meta_table['SpanMonitorSession.Global_.GlobalSessions.GlobalSession.DestinationData.InterfaceData']['meta_info'].parent =_meta_table['SpanMonitorSession.Global_.GlobalSessions.GlobalSession.DestinationData']['meta_info'] _meta_table['SpanMonitorSession.Global_.GlobalSessions.GlobalSession.DestinationData.PseudowireData']['meta_info'].parent =_meta_table['SpanMonitorSession.Global_.GlobalSessions.GlobalSession.DestinationData']['meta_info'] _meta_table['SpanMonitorSession.Global_.GlobalSessions.GlobalSession.DestinationData.NextHopIpv4Data']['meta_info'].parent =_meta_table['SpanMonitorSession.Global_.GlobalSessions.GlobalSession.DestinationData']['meta_info'] _meta_table['SpanMonitorSession.Global_.GlobalSessions.GlobalSession.DestinationData.NextHopIpv6Data']['meta_info'].parent =_meta_table['SpanMonitorSession.Global_.GlobalSessions.GlobalSession.DestinationData']['meta_info'] _meta_table['SpanMonitorSession.Global_.GlobalSessions.GlobalSession.DestinationId.Ipv4AddressAndVrf']['meta_info'].parent =_meta_table['SpanMonitorSession.Global_.GlobalSessions.GlobalSession.DestinationId']['meta_info'] _meta_table['SpanMonitorSession.Global_.GlobalSessions.GlobalSession.DestinationId.Ipv6AddressAndVrf']['meta_info'].parent =_meta_table['SpanMonitorSession.Global_.GlobalSessions.GlobalSession.DestinationId']['meta_info'] _meta_table['SpanMonitorSession.Global_.GlobalSessions.GlobalSession.DestinationData']['meta_info'].parent =_meta_table['SpanMonitorSession.Global_.GlobalSessions.GlobalSession']['meta_info'] _meta_table['SpanMonitorSession.Global_.GlobalSessions.GlobalSession.DestinationId']['meta_info'].parent =_meta_table['SpanMonitorSession.Global_.GlobalSessions.GlobalSession']['meta_info'] _meta_table['SpanMonitorSession.Global_.GlobalSessions.GlobalSession']['meta_info'].parent =_meta_table['SpanMonitorSession.Global_.GlobalSessions']['meta_info'] _meta_table['SpanMonitorSession.Global_.Statistics']['meta_info'].parent =_meta_table['SpanMonitorSession.Global_']['meta_info'] _meta_table['SpanMonitorSession.Global_.GlobalSessions']['meta_info'].parent =_meta_table['SpanMonitorSession.Global_']['meta_info'] _meta_table['SpanMonitorSession.Nodes.Node.Attachments.Attachment.DestinationId.Ipv4AddressAndVrf']['meta_info'].parent =_meta_table['SpanMonitorSession.Nodes.Node.Attachments.Attachment.DestinationId']['meta_info'] _meta_table['SpanMonitorSession.Nodes.Node.Attachments.Attachment.DestinationId.Ipv6AddressAndVrf']['meta_info'].parent =_meta_table['SpanMonitorSession.Nodes.Node.Attachments.Attachment.DestinationId']['meta_info'] _meta_table['SpanMonitorSession.Nodes.Node.Attachments.Attachment.TrafficParameters']['meta_info'].parent =_meta_table['SpanMonitorSession.Nodes.Node.Attachments.Attachment']['meta_info'] _meta_table['SpanMonitorSession.Nodes.Node.Attachments.Attachment.DestinationId']['meta_info'].parent =_meta_table['SpanMonitorSession.Nodes.Node.Attachments.Attachment']['meta_info'] _meta_table['SpanMonitorSession.Nodes.Node.Attachments.Attachment']['meta_info'].parent =_meta_table['SpanMonitorSession.Nodes.Node.Attachments']['meta_info'] _meta_table['SpanMonitorSession.Nodes.Node.HardwareSessions.HardwareSession.DestinationId.Ipv4AddressAndVrf']['meta_info'].parent =_meta_table['SpanMonitorSession.Nodes.Node.HardwareSessions.HardwareSession.DestinationId']['meta_info'] _meta_table['SpanMonitorSession.Nodes.Node.HardwareSessions.HardwareSession.DestinationId.Ipv6AddressAndVrf']['meta_info'].parent =_meta_table['SpanMonitorSession.Nodes.Node.HardwareSessions.HardwareSession.DestinationId']['meta_info'] _meta_table['SpanMonitorSession.Nodes.Node.HardwareSessions.HardwareSession.DestinationId']['meta_info'].parent =_meta_table['SpanMonitorSession.Nodes.Node.HardwareSessions.HardwareSession']['meta_info'] _meta_table['SpanMonitorSession.Nodes.Node.HardwareSessions.HardwareSession']['meta_info'].parent =_meta_table['SpanMonitorSession.Nodes.Node.HardwareSessions']['meta_info'] _meta_table['SpanMonitorSession.Nodes.Node.Interfaces.Interface.DestinationId.Ipv4AddressAndVrf']['meta_info'].parent =_meta_table['SpanMonitorSession.Nodes.Node.Interfaces.Interface.DestinationId']['meta_info'] _meta_table['SpanMonitorSession.Nodes.Node.Interfaces.Interface.DestinationId.Ipv6AddressAndVrf']['meta_info'].parent =_meta_table['SpanMonitorSession.Nodes.Node.Interfaces.Interface.DestinationId']['meta_info'] _meta_table['SpanMonitorSession.Nodes.Node.Interfaces.Interface.Attachment.DestinationId.Ipv4AddressAndVrf']['meta_info'].parent =_meta_table['SpanMonitorSession.Nodes.Node.Interfaces.Interface.Attachment.DestinationId']['meta_info'] _meta_table['SpanMonitorSession.Nodes.Node.Interfaces.Interface.Attachment.DestinationId.Ipv6AddressAndVrf']['meta_info'].parent =_meta_table['SpanMonitorSession.Nodes.Node.Interfaces.Interface.Attachment.DestinationId']['meta_info'] _meta_table['SpanMonitorSession.Nodes.Node.Interfaces.Interface.Attachment.DestinationId']['meta_info'].parent =_meta_table['SpanMonitorSession.Nodes.Node.Interfaces.Interface.Attachment']['meta_info'] _meta_table['SpanMonitorSession.Nodes.Node.Interfaces.Interface.Attachment.TrafficMirroringParameters']['meta_info'].parent =_meta_table['SpanMonitorSession.Nodes.Node.Interfaces.Interface.Attachment']['meta_info'] _meta_table['SpanMonitorSession.Nodes.Node.Interfaces.Interface.DestinationId']['meta_info'].parent =_meta_table['SpanMonitorSession.Nodes.Node.Interfaces.Interface']['meta_info'] _meta_table['SpanMonitorSession.Nodes.Node.Interfaces.Interface.TrafficMirroringParameters']['meta_info'].parent =_meta_table['SpanMonitorSession.Nodes.Node.Interfaces.Interface']['meta_info'] _meta_table['SpanMonitorSession.Nodes.Node.Interfaces.Interface.Attachment']['meta_info'].parent =_meta_table['SpanMonitorSession.Nodes.Node.Interfaces.Interface']['meta_info'] _meta_table['SpanMonitorSession.Nodes.Node.Interfaces.Interface']['meta_info'].parent =_meta_table['SpanMonitorSession.Nodes.Node.Interfaces']['meta_info'] _meta_table['SpanMonitorSession.Nodes.Node.Attachments']['meta_info'].parent =_meta_table['SpanMonitorSession.Nodes.Node']['meta_info'] _meta_table['SpanMonitorSession.Nodes.Node.HardwareSessions']['meta_info'].parent =_meta_table['SpanMonitorSession.Nodes.Node']['meta_info'] _meta_table['SpanMonitorSession.Nodes.Node.Interfaces']['meta_info'].parent =_meta_table['SpanMonitorSession.Nodes.Node']['meta_info'] _meta_table['SpanMonitorSession.Nodes.Node']['meta_info'].parent =_meta_table['SpanMonitorSession.Nodes']['meta_info'] _meta_table['SpanMonitorSession.Global_']['meta_info'].parent =_meta_table['SpanMonitorSession']['meta_info'] _meta_table['SpanMonitorSession.Nodes']['meta_info'].parent =_meta_table['SpanMonitorSession']['meta_info']
{ "content_hash": "848c73fa02e397c2464aa5e99d5f556b", "timestamp": "", "source": "github", "line_count": 1434, "max_line_length": 264, "avg_line_length": 58.141562064156204, "alnum_prop": 0.5278920539730135, "repo_name": "111pontes/ydk-py", "id": "465c95fa5a5052746da15b5ea18c6f9dc3cb9570", "size": "83378", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "cisco-ios-xr/ydk/models/cisco_ios_xr/_meta/_Cisco_IOS_XR_Ethernet_SPAN_oper.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C++", "bytes": "7226" }, { "name": "Python", "bytes": "446117948" } ], "symlink_target": "" }
import boto3 import sure # noqa # pylint: disable=unused-import from moto import mock_quicksight from moto.core import DEFAULT_ACCOUNT_ID as ACCOUNT_ID # See our Development Tips on writing tests for hints on how to write good tests: # http://docs.getmoto.org/en/latest/docs/contributing/development_tips/tests.html @mock_quicksight def test_create_data_set(): client = boto3.client("quicksight", region_name="eu-west-1") resp = client.create_data_set( AwsAccountId=ACCOUNT_ID, DataSetId="myset", Name="My Data Set", ImportMode="SPICE", PhysicalTableMap={ "table1": { "RelationalTable": { "DataSourceArn": "d:s:arn", "Catalog": "cat", "Name": "dog", "InputColumns": [{"Name": "c1", "Type": "string"}], } } }, ) resp.should.have.key("Arn").equals( f"arn:aws:quicksight:eu-west-1:{ACCOUNT_ID}:data-set/myset" ) resp.should.have.key("DataSetId").equals("myset") resp.should.have.key("IngestionArn").equals( f"arn:aws:quicksight:eu-west-1:{ACCOUNT_ID}:ingestion/tbd" ) @mock_quicksight def test_create_ingestion(): client = boto3.client("quicksight", region_name="eu-west-1") client.create_data_set( AwsAccountId=ACCOUNT_ID, DataSetId="myset", Name="My Data Set", ImportMode="SPICE", PhysicalTableMap={ "table1": { "RelationalTable": { "DataSourceArn": "d:s:arn", "Catalog": "cat", "Name": "dog", "InputColumns": [{"Name": "c1", "Type": "string"}], } } }, ) resp = client.create_ingestion( AwsAccountId=ACCOUNT_ID, DataSetId="n_a", IngestionId="n_a2", IngestionType="FULL_REFRESH", ) resp.should.have.key("Arn").equals( f"arn:aws:quicksight:eu-west-1:{ACCOUNT_ID}:data-set/n_a/ingestions/n_a2" ) resp.should.have.key("IngestionId").equals("n_a2") resp.should.have.key("IngestionStatus").equals("INITIALIZED")
{ "content_hash": "b0c6593b215d2892508f86cbc72b363c", "timestamp": "", "source": "github", "line_count": 73, "max_line_length": 81, "avg_line_length": 30.26027397260274, "alnum_prop": 0.5563603440470801, "repo_name": "spulec/moto", "id": "4fc878276597982f2eb51abb3bbbf80dee2c51f0", "size": "2209", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tests/test_quicksight/test_quicksight_datasets.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Dockerfile", "bytes": "255" }, { "name": "HTML", "bytes": "5983" }, { "name": "Java", "bytes": "1688" }, { "name": "JavaScript", "bytes": "1424" }, { "name": "Jinja", "bytes": "2502" }, { "name": "Makefile", "bytes": "2284" }, { "name": "Python", "bytes": "14737868" }, { "name": "Ruby", "bytes": "188" }, { "name": "Scala", "bytes": "782" }, { "name": "Shell", "bytes": "5515" } ], "symlink_target": "" }
from twisted.internet.protocol import Factory, Protocol from twisted.internet import reactor from math import * from datetime import * from copy import copy import sys import argparse # Commands CMD_DID_DRAW = "D" CMD_SYNC_CLIENT_CONNECT = "S" CMD_ASYNC_CLIENT_CONNECT = "A" CMD_BROADCAST = "T" CMD_PAUSE = "P" CMD_RESET = "R" CMD_GO = "G" # Parse the command line arguments parser = argparse.ArgumentParser(description='Most Pixels Ever Server, conforms to protocol version 2.0') parser.add_argument('--screens', dest='screens', default=-1, help='The number of clients. The server won\'t start the draw loop until all of the clients are connected.') parser.add_argument('--port', dest='port_num', default=9002, help='The port number that the clients connect to.') parser.add_argument('--framerate', dest='framerate', default=60, help='The target framerate.') args = parser.parse_args() portnum = int(args.port_num) screens_required = int(args.screens) framerate = int(args.framerate) microseconds_per_frame = (1.0 / framerate) * 1000000 framecount = 0 screens_drawn = 0 is_paused = False last_frame_time = datetime.now() class BroadcastMessage: def __init__(self, body, from_client_id, to_client_ids = []): self.body = body self.from_client_id = from_client_id self.to_client_ids = to_client_ids class MPEServer(Protocol): client_id = -1 client_name = "" def connectionMade(self): print("Client connected. Total Clients: %i" % (len(MPEServer.clients) + 1)) def connectionLost(self, reason): print("Client disconnected") if MPEServer.clients[self.client_id]: del MPEServer.clients[self.client_id] if self.client_id in MPEServer.rendering_client_ids: MPEServer.rendering_client_ids.remove(self.client_id) if self.client_id in MPEServer.receiving_client_ids: MPEServer.receiving_client_ids.remove(self.client_id) # It's possible that isNextFrameReady is true after the client disconnects # if they were the last client to render and hadn't informed the server. if MPEServer.isNextFrameReady(): MPEServer.sendNextFrame() def dataReceived(self, data): global screens_drawn global framecount # Parse data as utf-8, not byte string data = data.decode("utf_8") # There may be more than 1 message in the mix messages = data.split("\n") for message in messages: if len(message) < 1: return tokens = message.split("|") token_count = len(tokens) cmd = tokens[0] if cmd == CMD_DID_DRAW: # Format # D|client_id|last_frame_rendered if token_count != 3: print("ERROR: Incorrect param count for CMD %s. " % cmd, data, tokens) client = int(tokens[1]) frame_id = int(tokens[2]) if frame_id >= framecount: screens_drawn += 1 if MPEServer.isNextFrameReady(): # all of the frames are drawn, send out the next frames MPEServer.sendNextFrame() elif (cmd == CMD_SYNC_CLIENT_CONNECT) or (cmd == CMD_ASYNC_CLIENT_CONNECT): # Formats # "S|client_id|client_name" # "A|client_id|client_name|should_receive_broadcasts" if token_count < 3 or token_count > 4: print("ERROR: Incorrect param count for CMD %s. " % cmd, data, tokens) self.client_id = int(tokens[1]) self.client_name = tokens[2] MPEServer.clients[self.client_id] = self client_receives_messages = True if cmd == CMD_SYNC_CLIENT_CONNECT: MPEServer.rendering_client_ids.append(self.client_id) elif cmd == CMD_ASYNC_CLIENT_CONNECT: client_receives_messages = tokens[3].lower() == 'true' if client_receives_messages: print("New client will receive data") MPEServer.receiving_client_ids.append(self.client_id) MPEServer.handleClientAdd(self.client_id) elif cmd == CMD_BROADCAST: # Formats: # "T|message message message" # "T|message message message|toID_1,toID_2,toID_3" if token_count < 2 or token_count > 3: print("ERROR: Incorrect param count for CMD %s. " % cmd, data, tokens) to_client_ids = [] if token_count == 2: to_client_ids = MPEServer.receiving_client_ids elif token_count == 3: to_client_ids = tokens[2].split(",") to_client_ids = [int(client_id) for client_id in to_client_ids] MPEServer.broadcastMessage(tokens[1], self.client_id, to_client_ids) elif cmd == CMD_PAUSE: # Format: # P if token_count > 1: print("ERROR: Incorrect param count for CMD %s. " % cmd, data, tokens) MPEServer.togglePause() elif cmd == CMD_RESET: # Format: # R if token_count > 1: print("ERROR: Incorrect param count for CMD %s. " % cmd, data, tokens) MPEServer.reset() else: print("Unknown message: " + message) # print("Received message: ", data, "FROM", self.client_id) def sendMessage(self, message): # Must use byte string, not unicode string message = message + "\n" self.transport.write(message.encode('utf_8')) @staticmethod def reset(): global framecount global is_paused framecount = 0 screens_drawn = 0 MPEServer.message_queue = [] MPEServer.sendReset() if is_paused: print("INFO: Reset was called when server is paused.") MPEServer.sendNextFrame() @staticmethod def sendReset(): for n in MPEServer.receiving_client_ids: MPEServer.clients[n].sendMessage(CMD_RESET) @staticmethod def togglePause(): global is_paused is_paused = not is_paused if MPEServer.isNextFrameReady(): MPEServer.sendNextFrame() @staticmethod def handleClientAdd(client_id): global framecount global screens_required print("Added client %i (%s)" % (client_id, MPEServer.clients[client_id].client_name)) num_sync_clients = len(MPEServer.rendering_client_ids) if screens_required == -1 or num_sync_clients == screens_required: # NOTE: We don't reset when an async client connects if client_id in MPEServer.rendering_client_ids: MPEServer.reset() elif num_sync_clients < screens_required: print("Waiting for %i more clients." % (screens_required - num_sync_clients)) elif num_sync_clients > screens_required: print("ERROR: More than MAX clients have connected.") @staticmethod def isNextFrameReady(): global screens_drawn global screens_required global is_paused num_sync_clients = len(MPEServer.rendering_client_ids) return screens_drawn >= num_sync_clients and not is_paused and num_sync_clients >= screens_required @staticmethod def sendNextFrame(): global last_frame_time global screens_drawn global framecount global is_paused global framerate global microseconds_per_frame if is_paused: return # Slow down if we've exceeded the target FPS delta = datetime.now() - last_frame_time while delta.seconds < 1 and delta.microseconds < microseconds_per_frame: delta = datetime.now() - last_frame_time screens_drawn = 0 framecount += 1 send_message = CMD_GO + "|%i" % framecount # Copy the clients so in case one disconnects during the loop clients = copy(MPEServer.clients) for client_id in clients: c = clients[client_id] if client_id in MPEServer.receiving_client_ids: client_messages = [] for m in MPEServer.message_queue: if len(m.to_client_ids) == 0 or client_id in m.to_client_ids: client_messages.append(str(m.from_client_id) + "," + m.body) if len(client_messages) > 0: c.sendMessage(send_message + "|" + "|".join(client_messages)) else: c.sendMessage(send_message) MPEServer.message_queue = [] last_frame_time = datetime.now() @staticmethod def broadcastMessage(message, from_client_id, to_client_ids): #print("Broadcasting message: " + message + " to client IDs: ", to_client_ids) m = BroadcastMessage(message, from_client_id, to_client_ids) MPEServer.message_queue.append(m) # NOTE: If only async clients are connected, send this message now. # Otherwise the message wont be sent until the next render frame # comes across. if len(MPEServer.rendering_client_ids) == 0: MPEServer.sendNextFrame() # Start the server factory = Factory() factory.protocol = MPEServer MPEServer.clients = {} MPEServer.rendering_client_ids = [] MPEServer.receiving_client_ids = [] MPEServer.message_queue = [] reactor.listenTCP(portnum, factory) print("MPE Server started on port %i" % portnum) print("Running at max %i FPS" % framerate) if screens_required > 0: print("Waiting for %i clients." % screens_required) reactor.run()
{ "content_hash": "f987e41dc9a010273154699968c058d5", "timestamp": "", "source": "github", "line_count": 261, "max_line_length": 169, "avg_line_length": 37.87356321839081, "alnum_prop": 0.5915022761760242, "repo_name": "wdlindmeier/mpe-python-server", "id": "a7a52b5e033d17d561c4bd25ae2b3859afad08c1", "size": "10110", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "mpe_server.py", "mode": "33261", "license": "mit", "language": [ { "name": "Python", "bytes": "10110" } ], "symlink_target": "" }
from __future__ import print_function, division, absolute_import import itertools from numba import types, intrinsics from numba.utils import PYVERSION, RANGE_ITER_OBJECTS, operator_map from numba.typing.templates import (AttributeTemplate, ConcreteTemplate, AbstractTemplate, builtin_global, builtin, builtin_attr, signature, bound_function, make_callable_template) for obj in RANGE_ITER_OBJECTS: builtin_global(obj, types.range_type) builtin_global(len, types.len_type) builtin_global(slice, types.slice_type) builtin_global(abs, types.abs_type) builtin_global(print, types.print_type) @builtin class Print(ConcreteTemplate): key = types.print_type intcases = [signature(types.none, ty) for ty in types.integer_domain] realcases = [signature(types.none, ty) for ty in types.real_domain] cases = intcases + realcases @builtin class PrintOthers(AbstractTemplate): key = types.print_type def accepted_types(self, ty): if ty in types.integer_domain or ty in types.real_domain: return True if isinstance(ty, types.CharSeq): return True def generic(self, args, kws): assert not kws, "kwargs to print is not supported." for a in args: if not self.accepted_types(a): raise TypeError("Type %s is not printable." % a) return signature(types.none, *args) @builtin class Abs(ConcreteTemplate): key = types.abs_type int_cases = [signature(ty, ty) for ty in types.signed_domain] real_cases = [signature(ty, ty) for ty in types.real_domain] complex_cases = [signature(ty.underlying_float, ty) for ty in types.complex_domain] cases = int_cases + real_cases + complex_cases @builtin class Slice(ConcreteTemplate): key = types.slice_type cases = [ signature(types.slice3_type), signature(types.slice3_type, types.none, types.none), signature(types.slice3_type, types.none, types.intp), signature(types.slice3_type, types.intp, types.none), signature(types.slice3_type, types.intp, types.intp), signature(types.slice3_type, types.intp, types.intp, types.intp), ] @builtin class Range(ConcreteTemplate): key = types.range_type cases = [ signature(types.range_state32_type, types.int32), signature(types.range_state32_type, types.int32, types.int32), signature(types.range_state32_type, types.int32, types.int32, types.int32), signature(types.range_state64_type, types.int64), signature(types.range_state64_type, types.int64, types.int64), signature(types.range_state64_type, types.int64, types.int64, types.int64), signature(types.unsigned_range_state64_type, types.uint64), signature(types.unsigned_range_state64_type, types.uint64, types.uint64), signature(types.unsigned_range_state64_type, types.uint64, types.uint64, types.uint64), ] @builtin class GetIter(AbstractTemplate): key = "getiter" def generic(self, args, kws): assert not kws [obj] = args if isinstance(obj, types.IterableType): return signature(obj.iterator_type, obj) @builtin class IterNext(AbstractTemplate): key = "iternext" def generic(self, args, kws): assert not kws [it] = args if isinstance(it, types.IteratorType): return signature(types.Pair(it.yield_type, types.boolean), it) @builtin class PairFirst(AbstractTemplate): """ Given a heterogenous pair, return the first element. """ key = "pair_first" def generic(self, args, kws): assert not kws [pair] = args if isinstance(pair, types.Pair): return signature(pair.first_type, pair) @builtin class PairSecond(AbstractTemplate): """ Given a heterogenous pair, return the second element. """ key = "pair_second" def generic(self, args, kws): assert not kws [pair] = args if isinstance(pair, types.Pair): return signature(pair.second_type, pair) def choose_result_bitwidth(*inputs): return max(types.intp.bitwidth, *(tp.bitwidth for tp in inputs)) def choose_result_int(*inputs): """ Choose the integer result type for an operation on integer inputs, according to the integer typing NBEP. """ bitwidth = choose_result_bitwidth(*inputs) signed = any(tp.signed for tp in inputs) return types.Integer.from_bitwidth(bitwidth, signed) # The "machine" integer types to take into consideration for operator typing # (according to the integer typing NBEP) machine_ints = ( sorted(set((types.intp, types.int64))) + sorted(set((types.uintp, types.uint64))) ) # Explicit integer rules for binary operators; smaller ints will be # automatically upcast. integer_binop_cases = tuple( signature(choose_result_int(op1, op2), op1, op2) for op1, op2 in itertools.product(machine_ints, machine_ints) ) class BinOp(ConcreteTemplate): cases = list(integer_binop_cases) cases += [signature(op, op, op) for op in sorted(types.real_domain)] cases += [signature(op, op, op) for op in sorted(types.complex_domain)] @builtin class BinOpAdd(BinOp): key = "+" @builtin class BinOpSub(BinOp): key = "-" @builtin class BinOpMul(BinOp): key = "*" @builtin class BinOpDiv(BinOp): key = "/?" @builtin class BinOpMod(ConcreteTemplate): key = "%" cases = list(integer_binop_cases) cases += [signature(op, op, op) for op in sorted(types.real_domain)] @builtin class BinOpTrueDiv(ConcreteTemplate): key = "/" cases = [signature(types.float64, op1, op2) for op1, op2 in itertools.product(machine_ints, machine_ints)] cases += [signature(op, op, op) for op in sorted(types.real_domain)] cases += [signature(op, op, op) for op in sorted(types.complex_domain)] @builtin class BinOpFloorDiv(ConcreteTemplate): key = "//" cases = list(integer_binop_cases) cases += [signature(op, op, op) for op in sorted(types.real_domain)] @builtin class BinOpPower(ConcreteTemplate): key = "**" cases = list(integer_binop_cases) cases += [signature(types.float64, types.float64, op) for op in sorted(types.signed_domain)] cases += [signature(types.float64, types.float64, op) for op in sorted(types.unsigned_domain)] cases += [signature(op, op, op) for op in sorted(types.real_domain)] cases += [signature(op, op, op) for op in sorted(types.complex_domain)] class PowerBuiltin(BinOpPower): key = pow # TODO add 3 operand version builtin_global(pow, types.Function(PowerBuiltin)) class BitwiseShiftOperation(ConcreteTemplate): cases = list(integer_binop_cases) @builtin class BitwiseLeftShift(BitwiseShiftOperation): key = "<<" @builtin class BitwiseRightShift(BitwiseShiftOperation): key = ">>" class BitwiseLogicOperation(BinOp): cases = list(integer_binop_cases) @builtin class BitwiseAnd(BitwiseLogicOperation): key = "&" @builtin class BitwiseOr(BitwiseLogicOperation): key = "|" @builtin class BitwiseXor(BitwiseLogicOperation): key = "^" # Bitwise invert and negate are special: we must not upcast the operand # for unsigned numbers, as that would change the result. # (i.e. ~np.int8(0) == 255 but ~np.int32(0) == 4294967295). @builtin class BitwiseInvert(ConcreteTemplate): key = "~" cases = [signature(types.int8, types.boolean)] cases += [signature(choose_result_int(op), op) for op in types.unsigned_domain] cases += [signature(choose_result_int(op), op) for op in types.signed_domain] class UnaryOp(ConcreteTemplate): cases = [signature(choose_result_int(op), op) for op in types.unsigned_domain] cases += [signature(choose_result_int(op), op) for op in types.signed_domain] cases += [signature(op, op) for op in sorted(types.real_domain)] cases += [signature(op, op) for op in sorted(types.complex_domain)] @builtin class UnaryNegate(UnaryOp): key = "-" @builtin class UnaryPositive(UnaryOp): key = "+" @builtin class UnaryNot(ConcreteTemplate): key = "not" cases = [signature(types.boolean, types.boolean)] cases += [signature(types.boolean, op) for op in sorted(types.signed_domain)] cases += [signature(types.boolean, op) for op in sorted(types.unsigned_domain)] cases += [signature(types.boolean, op) for op in sorted(types.real_domain)] cases += [signature(types.boolean, op) for op in sorted(types.complex_domain)] class OrderedCmpOp(ConcreteTemplate): cases = [signature(types.boolean, types.boolean, types.boolean)] cases += [signature(types.boolean, op, op) for op in sorted(types.signed_domain)] cases += [signature(types.boolean, op, op) for op in sorted(types.unsigned_domain)] cases += [signature(types.boolean, op, op) for op in sorted(types.real_domain)] class UnorderedCmpOp(ConcreteTemplate): cases = OrderedCmpOp.cases + [ signature(types.boolean, op, op) for op in sorted(types.complex_domain)] @builtin class CmpOpLt(OrderedCmpOp): key = '<' @builtin class CmpOpLe(OrderedCmpOp): key = '<=' @builtin class CmpOpGt(OrderedCmpOp): key = '>' @builtin class CmpOpGe(OrderedCmpOp): key = '>=' @builtin class CmpOpEq(UnorderedCmpOp): key = '==' @builtin class CmpOpNe(UnorderedCmpOp): key = '!=' class TupleCompare(AbstractTemplate): def generic(self, args, kws): [lhs, rhs] = args if isinstance(lhs, types.BaseTuple) and isinstance(rhs, types.BaseTuple): for u, v in zip(lhs, rhs): # Check element-wise comparability res = self.context.resolve_function_type(self.key, (u, v), {}) if res is None: break else: return signature(types.boolean, lhs, rhs) @builtin class TupleEq(TupleCompare): key = '==' @builtin class TupleNe(TupleCompare): key = '!=' @builtin class TupleGe(TupleCompare): key = '>=' @builtin class TupleGt(TupleCompare): key = '>' @builtin class TupleLe(TupleCompare): key = '<=' @builtin class TupleLt(TupleCompare): key = '<' # Register default implementations of binary inplace operators for # immutable types. class InplaceImmutable(AbstractTemplate): def generic(self, args, kws): lhs, rhs = args if not lhs.mutable: return self.context.resolve_function_type(self.key[:-1], args, kws) # Inplace ops on mutable arguments must be typed explicitly for _binop, _inp, op in operator_map: if _inp: template = type('InplaceImmutable_%s' % _binop, (InplaceImmutable,), dict(key=op + '=')) builtin(template) class CmpOpIdentity(AbstractTemplate): def generic(self, args, kws): [lhs, rhs] = args return signature(types.boolean, lhs, rhs) @builtin class CmpOpIs(CmpOpIdentity): key = 'is' @builtin class CmpOpIsNot(CmpOpIdentity): key = 'is not' def normalize_1d_index(index): """ Normalize the *index* type (an integer or slice) for indexing a 1D sequence. """ if index == types.slice3_type: return types.slice3_type elif isinstance(index, types.Integer): return types.intp if index.signed else types.uintp def normalize_nd_index(index): """ Normalize the *index* type (an integer, slice or tuple thereof) for indexing a N-D sequence. """ if isinstance(index, types.UniTuple): if index.dtype in types.integer_domain: idxtype = types.intp if index.dtype.signed else types.uintp return types.UniTuple(idxtype, len(index)) elif index.dtype == types.slice3_type: return index elif isinstance(index, types.Tuple): for ty in index: if (ty not in types.integer_domain and ty != types.slice3_type): raise TypeError('Type %s of index %s is unsupported for indexing' % (ty, index)) return index return normalize_1d_index(index) @builtin class GetItemCPointer(AbstractTemplate): key = "getitem" def generic(self, args, kws): assert not kws ptr, idx = args if isinstance(ptr, types.CPointer) and isinstance(idx, types.Integer): return signature(ptr.dtype, ptr, normalize_1d_index(idx)) @builtin class SetItemCPointer(AbstractTemplate): key = "setitem" def generic(self, args, kws): assert not kws ptr, idx, val = args if isinstance(ptr, types.CPointer) and isinstance(idx, types.Integer): return signature(types.none, ptr, normalize_1d_index(idx), ptr.dtype) @builtin class Len(AbstractTemplate): key = types.len_type def generic(self, args, kws): assert not kws (val,) = args if isinstance(val, (types.Buffer, types.BaseTuple)): return signature(types.intp, val) @builtin class TupleBool(AbstractTemplate): key = "is_true" def generic(self, args, kws): assert not kws (val,) = args if isinstance(val, (types.BaseTuple)): return signature(types.boolean, val) #------------------------------------------------------------------------------- @builtin_attr class MemoryViewAttribute(AttributeTemplate): key = types.MemoryView if PYVERSION >= (3,): def resolve_contiguous(self, buf): return types.boolean def resolve_c_contiguous(self, buf): return types.boolean def resolve_f_contiguous(self, buf): return types.boolean def resolve_itemsize(self, buf): return types.intp def resolve_nbytes(self, buf): return types.intp def resolve_readonly(self, buf): return types.boolean def resolve_shape(self, buf): return types.UniTuple(types.intp, buf.ndim) def resolve_strides(self, buf): return types.UniTuple(types.intp, buf.ndim) def resolve_ndim(self, buf): return types.intp #------------------------------------------------------------------------------- @builtin_attr class BooleanAttribute(AttributeTemplate): key = types.Boolean def resolve___class__(self, ty): return types.NumberClass(ty) @builtin_attr class NumberAttribute(AttributeTemplate): key = types.Number def resolve___class__(self, ty): return types.NumberClass(ty) def resolve_real(self, ty): return getattr(ty, "underlying_float", ty) def resolve_imag(self, ty): return getattr(ty, "underlying_float", ty) @bound_function("complex.conjugate") def resolve_conjugate(self, ty, args, kws): assert not args assert not kws return signature(ty) #------------------------------------------------------------------------------- @builtin_attr class NumberClassAttribute(AttributeTemplate): key = types.NumberClass def resolve___call__(self, classty): """ Resolve a number class's constructor (e.g. calling int(...)) """ ty = classty.instance_type def typer(val): return ty return types.Function(make_callable_template(key=ty, typer=typer)) def register_number_classes(register_global): nb_types = set(types.number_domain) nb_types.add(types.bool_) for ty in nb_types: register_global(ty, types.NumberClass(ty)) register_number_classes(builtin_global) #------------------------------------------------------------------------------ class Max(AbstractTemplate): key = max def generic(self, args, kws): assert not kws # max(a, b, ...) if len(args) < 2: return for a in args: if a not in types.number_domain: return retty = self.context.unify_types(*args) if retty is not None: return signature(retty, *args) class Min(AbstractTemplate): key = min def generic(self, args, kws): assert not kws # min(a, b, ...) if len(args) < 2: return for a in args: if a not in types.number_domain: return retty = self.context.unify_types(*args) if retty is not None: return signature(retty, *args) class Round(ConcreteTemplate): key = round if PYVERSION < (3, 0): cases = [ signature(types.float32, types.float32), signature(types.float64, types.float64), ] else: cases = [ signature(types.intp, types.float32), signature(types.int64, types.float64), ] cases += [ signature(types.float32, types.float32, types.intp), signature(types.float64, types.float64, types.intp), ] builtin_global(max, types.Function(Max)) builtin_global(min, types.Function(Min)) builtin_global(round, types.Function(Round)) #------------------------------------------------------------------------------ class Bool(AbstractTemplate): key = bool def generic(self, args, kws): assert not kws [arg] = args if arg in types.number_domain: return signature(types.boolean, arg) # XXX typing for bool cannot be polymorphic because of the # types.Function thing, so we redirect to the "is_true" # intrinsic. return self.context.resolve_function_type("is_true", args, kws) class Int(AbstractTemplate): key = int def generic(self, args, kws): assert not kws [arg] = args if arg not in types.number_domain: raise TypeError("int() only support for numbers") if arg in types.complex_domain: raise TypeError("int() does not support complex") if arg in types.integer_domain: return signature(arg, arg) if arg in types.real_domain: return signature(types.intp, arg) class Float(AbstractTemplate): key = float def generic(self, args, kws): assert not kws [arg] = args if arg not in types.number_domain: raise TypeError("float() only support for numbers") if arg in types.complex_domain: raise TypeError("float() does not support complex") if arg in types.integer_domain: return signature(types.float64, arg) elif arg in types.real_domain: return signature(arg, arg) class Complex(AbstractTemplate): key = complex def generic(self, args, kws): assert not kws if len(args) == 1: [arg] = args if arg not in types.number_domain: raise TypeError("complex() only support for numbers") if arg == types.float32: return signature(types.complex64, arg) else: return signature(types.complex128, arg) elif len(args) == 2: [real, imag] = args if (real not in types.number_domain or imag not in types.number_domain): raise TypeError("complex() only support for numbers") if real == imag == types.float32: return signature(types.complex64, real, imag) else: return signature(types.complex128, real, imag) builtin_global(bool, types.Function(Bool)) builtin_global(int, types.Function(Int)) builtin_global(float, types.Function(Float)) builtin_global(complex, types.Function(Complex)) #------------------------------------------------------------------------------ @builtin class Enumerate(AbstractTemplate): key = enumerate def generic(self, args, kws): assert not kws it = args[0] if len(args) > 1 and not args[1] in types.integer_domain: raise TypeError("Only integers supported as start value in " "enumerate") elif len(args) > 2: #let python raise its own error enumerate(*args) if isinstance(it, types.IterableType): enumerate_type = types.EnumerateType(it) return signature(enumerate_type, *args) builtin_global(enumerate, types.Function(Enumerate)) @builtin class Zip(AbstractTemplate): key = zip def generic(self, args, kws): assert not kws if all(isinstance(it, types.IterableType) for it in args): zip_type = types.ZipType(args) return signature(zip_type, *args) builtin_global(zip, types.Function(Zip)) @builtin class Intrinsic_array_ravel(AbstractTemplate): key = intrinsics.array_ravel def generic(self, args, kws): assert not kws [arr] = args if arr.layout in 'CF' and arr.ndim >= 1: return signature(arr.copy(ndim=1), arr) builtin_global(intrinsics.array_ravel, types.Function(Intrinsic_array_ravel)) #------------------------------------------------------------------------------ @builtin class TypeBuiltin(AbstractTemplate): key = type def generic(self, args, kws): assert not kws if len(args) == 1: # One-argument type() -> return the __class__ try: classty = self.context.resolve_getattr(args[0], "__class__") except KeyError: return else: return signature(classty, *args) builtin_global(type, types.Function(TypeBuiltin))
{ "content_hash": "428026da302d50d8ea645436e683cc2d", "timestamp": "", "source": "github", "line_count": 812, "max_line_length": 87, "avg_line_length": 26.77093596059113, "alnum_prop": 0.6162020425062104, "repo_name": "GaZ3ll3/numba", "id": "837096fb48bfdd92d7b9a1543f273a7f62f05d5b", "size": "21738", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "numba/typing/builtins.py", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "Batchfile", "bytes": "2212" }, { "name": "C", "bytes": "228078" }, { "name": "C++", "bytes": "18847" }, { "name": "Cuda", "bytes": "214" }, { "name": "HTML", "bytes": "98846" }, { "name": "PowerShell", "bytes": "3153" }, { "name": "Python", "bytes": "3075839" }, { "name": "Shell", "bytes": "120" } ], "symlink_target": "" }
from django.db import models from django.contrib.auth.models import User from django.utils import timezone from s3direct.fields import S3DirectField def get_first_name(self): return self.first_name User.add_to_class("__str__", get_first_name) RATING_CHOICES = ( (1, '1'), (2, '2'), (3, 'e'), (4, '4'), (5, '5'), (6, '6'), (7, '7'), (8, '8'), (9, '9'), (10, '10') ) class Game(models.Model): """ Represents a Community game """ name = models.CharField(max_length=250) description = models.TextField(blank=True, default="No description") estimated_length = models.PositiveIntegerField(help_text="Minutes", default=10) number_of_participants = models.PositiveIntegerField(default=1) def __str__(self): return "{}".format(self.name) # Community class CommunityInst(models.Model): """ Represents a single community, links to :model:'Community.Game' links to :model:'Community.CommunityGames' """ date = models.DateField(default=timezone.now) spectrum_id = models.PositiveIntegerField(default=0) occuring_games = models.ManyToManyField(Game, through='CommunityGames') minutes_ended_early = models.PositiveIntegerField(default=5) photo = S3DirectField(dest='community', default=None) def __str__(self): return"{}".format(self.date) class Meta: verbose_name = 'Community Instance' verbose_name_plural = 'Community Instances' ordering = ["-date"] class CommunityGames(models.Model): """ Links a community to a game, links to :model:'Community.CommunityInst' links to :model:'Community.Game' """ communityinst = models.ForeignKey(CommunityInst, on_delete=models.CASCADE) game = models.ForeignKey(Game, on_delete=models.CASCADE) class Meta: verbose_name_plural = 'Community Games' def __str__(self): return "{}".format(self.game) class CommunityGameRatings(models.Model): """ Attaches a community's game to a user rating, links to :model:'auth.User' links to :model:'Community.CommunityInst' links to :model:'Community.Game' links to :model:'Community.CommunityGames' """ user = models.ForeignKey(User) games = models.ForeignKey(CommunityGames) game_rating = models.PositiveIntegerField(choices=RATING_CHOICES, default=5) LIKE_TO_SEE_AGAIN = ( ('y', 'Yes'), ('n', 'No') ) like_to_see_again = models.CharField(max_length=1, choices=LIKE_TO_SEE_AGAIN, default='n') game_comments = models.TextField(blank=True, null=True,) class Meta: verbose_name = 'Community Game Rating' verbose_name_plural = 'Community Game Ratings' def __str__(self): return "{}-{}".format(self.user.first_name, self.games) class CommunityExtraRatings(models.Model): """ Attaches a community's extras to a user rating, links to :model:'auth.User' links to :model:'Community.CommunityInst' """ user = models.ForeignKey(User) community = models.ForeignKey(CommunityInst) overall_rating = models.PositiveIntegerField(default=5, choices=RATING_CHOICES) extra_comments = models.TextField(blank=True) how_can_we_improve_survey = models.TextField(blank=True) COMMUNITY_PACING_RATINGS = ( ('v', 'Very Good'), ('g', 'Good'), ('d', 'Decent'), ('b', 'Bad'), ('h', 'Very Bad') # h for horrible ) pacing_rating = models.CharField(max_length=20, choices=COMMUNITY_PACING_RATINGS, default='d') class Meta: verbose_name='Community Extra Ratings' verbose_name_plural='Community Extra Ratings' def __str__(self): return "{}-{}".format(self.user.first_name, self.community) class SongSuggestions(models.Model): """ Represents a single song suggestion for a single community links to :model:'auth.User' links to :model:'Community.CommunityInst' """ user = models.ForeignKey(User) community = models.ForeignKey(CommunityInst) suggestions = models.TextField(help_text='Please list links to songs, we can\'t play it with just a name') def __str__(self): return "{}={}".format(self.user.first_name, self.community)
{ "content_hash": "d4a25aafc02555e93eaed8c6b3b66be5", "timestamp": "", "source": "github", "line_count": 146, "max_line_length": 107, "avg_line_length": 26.78082191780822, "alnum_prop": 0.710230179028133, "repo_name": "ByrdOfAFeather/AlphaTrion", "id": "97ecfdec4fb41ff90712cb31182d7d66b23f24d4", "size": "3910", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Community/models.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "7527" }, { "name": "HTML", "bytes": "29482" }, { "name": "JavaScript", "bytes": "1773" }, { "name": "Python", "bytes": "106358" } ], "symlink_target": "" }
from django.forms.fields import * # noqa from django_extras.forms import widgets from django_extras.core import validators class ColorField(CharField): """ Form field that provides input for color picker """ widget = widgets.JQueryColorPicker def __init__(self, allow_alpha=False, *args, **kwargs): super(ColorField, self).__init__(*args, **kwargs) self.allow_alpha = allow_alpha if allow_alpha: self.validators.append(validators.validate_alpha_color) else: self.validators.append(validators.validate_color) class JsonField(CharField): """ Form field that validates that valid JSON is supplied. """ widget = widgets.Textarea default_validators = [validators.validate_json]
{ "content_hash": "5b99f8cae4b0a9c84bb21d21ce45314c", "timestamp": "", "source": "github", "line_count": 26, "max_line_length": 67, "avg_line_length": 29.807692307692307, "alnum_prop": 0.6748387096774193, "repo_name": "gem/django-extras", "id": "baf5f6ba91ca7e518d09c8c249ed29cb3fd35d3e", "size": "775", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "django_extras/forms/fields.py", "mode": "33188", "license": "bsd-3-clause", "language": [], "symlink_target": "" }
""" A Python Database API Specification v2.0 implementation that provides configuration loading, variable substitution, logging, query banding, etc and options to use either ODBC or REST""" # The MIT License (MIT) # # Copyright (c) 2015 by Teradata # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. import sys import string import os.path import logging import time import atexit import datetime import platform import getpass import subprocess import collections import codecs from .util import toUnicode from . import tdodbc, util, api, datatypes from . import tdrest # @UnresolvedImport from .version import __version__ # @UnresolvedImport # The module logger logger = logging.getLogger(__name__) METHOD_REST = "rest" METHOD_ODBC = "odbc" # Implement python version specific setup. if sys.version_info[0] == 2: import ConfigParser as configparser # @UnresolvedImport #@UnusedImport else: import configparser # @UnresolvedImport @UnusedImport @Reimport def handleUncaughtException(exc_type, exc_value, exc_traceback): """Make sure that uncaught exceptions are logged""" logger.error("Uncaught exception", exc_info=( exc_type, exc_value, exc_traceback)) sys.__excepthook__(exc_type, exc_value, exc_traceback) def exiting(): """Invoked when the python interpreter is exiting.""" logger.info("UdaExec exiting.") class UdaExec: """Helper class for scripting with Teradata systems""" def __init__(self, appName="${appName}", version="${version}", checkpointFile="${checkpointFile}", runNumberFile="${runNumberFile}", runNumber=None, configureLogging="${configureLogging}", logDir="${logDir}", logFile="${logFile}", logConsole="${logConsole}", logLevel="${logLevel}", logRetention="${logRetention}", systemConfigFile="/etc/udaexec.ini", userConfigFile="~/udaexec.ini", appConfigFile="udaexec.ini", configFiles=None, configSection="CONFIG", configEncoding="utf8", parseCmdLineArgs=True, gitPath="${gitPath}", production="${production}", odbcLibPath="${odbcLibPath}", dataTypeConverter=datatypes.DefaultDataTypeConverter()): """ Initializes the UdaExec framework """ # Load configuration files. if configFiles is None: configFiles = [] _appendConfigFiles( configFiles, systemConfigFile, userConfigFile, appConfigFile) logMsgs = [(logging.INFO, "Initializing UdaExec...")] self.config = UdaExecConfig(configFiles, configEncoding, configSection, parseCmdLineArgs, logMsgs) # Verify required configuration parameters are specified. self.config['appName'] = self.config.resolve( appName, errorMsg="appName is a required field, it must be " "passed in as a parameter or specified in a config file.") # Initialize runNumbers. self._initRunNumber(runNumberFile, runNumber, logMsgs) # Configure Logging self.configureLogging = util.booleanValue( self.config.resolve(configureLogging, default="True")) if self.configureLogging: self._initLogging( self.config.resolve(logDir, default="logs"), self.config.resolve( logFile, default=self.config.resolve( "${appName}.${runNumber}.log")), util.booleanValue( self.config.resolve(logConsole, default="True")), getattr( logging, self.config.resolve(logLevel, default="INFO")), int(self.config.resolve(logRetention, default="90")), logMsgs) # Log messages that were collected prior to logging being configured. for (level, msg) in logMsgs: logger.log(level, toUnicode(msg)) self._initVersion(self.config.resolve( version, default=""), self.config.resolve(gitPath, default="")) self._initQueryBands(self.config.resolve(production, default="false")) self._initCheckpoint(checkpointFile) self.odbcLibPath = self.config.resolve(odbcLibPath, default="") self.dataTypeConverter = dataTypeConverter logger.info(self) logger.debug(self.config) # Register exit function. d atexit.register(exiting) def connect(self, externalDSN=None, dataTypeConverter=None, **kwargs): """Creates a database connection""" # Construct data source configuration parameters args = {} if externalDSN is not None: externalDSN = self.config.resolve(externalDSN) args = self.config.section(externalDSN) if args is None: raise api.InterfaceError( api.CONFIG_ERROR, "No data source named \"{}\".".format(externalDSN)) args.update(self.config.resolveDict(kwargs)) # Log connection details. paramsToLog = dict(args) paramsToLog['password'] = 'XXXXXX' if externalDSN: paramsToLog['externalDSN'] = externalDSN logger.info("Creating connection: %s", paramsToLog) # Determine connection method. method = None if 'method' in args: method = args.pop('method') util.raiseIfNone('method', method) if 'queryBands' in args: queryBands = args.pop('queryBands') self.queryBands.update(queryBands) if 'autoCommit' not in args: args['autoCommit'] = "true" if not dataTypeConverter: dataTypeConverter = self.dataTypeConverter # Create the connection try: start = time.time() if method.lower() == METHOD_REST: conn = UdaExecConnection( self, tdrest.connect(queryBands=self.queryBands, dataTypeConverter=dataTypeConverter, **args)) elif method.lower() == METHOD_ODBC: conn = UdaExecConnection( self, tdodbc.connect(queryBands=self.queryBands, odbcLibPath=self.odbcLibPath, dataTypeConverter=dataTypeConverter, **args)) else: raise api.InterfaceError( api.CONFIG_ERROR, "Connection method \"{}\" not supported".format(method)) duration = time.time() - start logger.info( "Connection successful. Duration: %.3f seconds. Details: %s", duration, paramsToLog) return conn except Exception: logger.exception("Unable to create connection: %s", paramsToLog) raise def checkpoint(self, checkpointName=None): """ Sets or clears the current checkpoint.""" if checkpointName is None: logger.info("Clearing checkpoint....") self.currentCheckpoint = None self.skip = False if self.checkpointManager: self.checkpointManager.clearCheckpoint() else: self.currentCheckpoint = checkpointName if self.skip: if self.resumeFromCheckpoint == self.currentCheckpoint: logger.info( "Reached resume checkpoint: \"%s\". " "Resuming execution...", checkpointName) self.skip = False else: logger.info("Reached checkpoint: \"%s\"", checkpointName) if self.checkpointManager: self.checkpointManager.saveCheckpoint(checkpointName) def setCheckpointManager(self, checkpointManager): """ Sets a custom Checkpoint Manager. """ util.raiseIfNone("checkpointManager", checkpointManager) logger.info("Setting custom checkpoint manager: %s", checkpointManager) self.checkpointManager = checkpointManager logger.info("Loading resume checkpoint from checkpoint manager...") self.setResumeCheckpoint(checkpointManager.loadCheckpoint()) def setResumeCheckpoint(self, resumeCheckpoint): """ Sets the checkpoint that must be hit for executes to not be skipped.""" self.resumeFromCheckpoint = resumeCheckpoint if resumeCheckpoint: logger.info( "Resume checkpoint changed to \"%s\". Skipping all calls to " "execute until checkpoint is reached.", self.resumeFromCheckpoint) self.skip = True else: self.resumeFromCheckpoint = None if self.skip: self.skip = False logger.info( "Resume checkpoint cleared. Execute calls will " "no longer be skipped.") else: logger.info( "No resume checkpoint set, continuing execution...") def _initLogging(self, logDir, logFile, logConsole, level, logRetention, logMsgs): """Initialize UdaExec logging""" if not os.path.exists(logDir): os.makedirs(logDir) self._cleanupLogs(logDir, logRetention, logMsgs) self.logDir = os.path.realpath(logDir) self.logFile = os.path.join(self.logDir, logFile) formatter = logging.Formatter( "%(asctime)s - %(name)s - %(levelname)s - %(message)s") fh = logging.FileHandler(self.logFile, mode="a", encoding="utf8") fh.setFormatter(formatter) fh.setLevel(level) sh = logging.StreamHandler(sys.stdout) sh.setFormatter(formatter) sh.setLevel(level) root = logging.getLogger() root.setLevel(level) root.addHandler(fh) if logConsole: root.addHandler(sh) sys.excepthook = handleUncaughtException def _cleanupLogs(self, logDir, logRetention, logMsgs): """Cleanup older log files.""" logMsgs.append( (logging.INFO, "Cleaning up log files older than {} days.".format(logRetention))) cutoff = time.time() - (logRetention * 86400) count = 0 for f in os.listdir(logDir): f = os.path.join(logDir, f) if os.stat(f).st_mtime < cutoff: logMsgs.append( (logging.DEBUG, "Removing log file: {}".format(f))) os.remove(f) count += 1 logMsgs.append((logging.INFO, "Removed {} log files.".format(count))) def _initRunNumber(self, runNumberFile, runNumber, logMsgs): """Initialize the run number unique to this particular execution.""" if runNumber is not None: self.runNumber = runNumber logMsgs.append( (logging.INFO, "Setting run number to {}.".format(runNumber))) else: self.runNumber = "1" self.runNumberFile = self.config.resolve( runNumberFile, default='.runNumber') self.runNumberFile = os.path.abspath(self.runNumberFile) if os.path.isfile(self.runNumberFile): logMsgs.append( (logging.INFO, "Found run number " "file: \"{}\"".format(self.runNumberFile))) with open(self.runNumberFile, "r") as f: self.runNumber = f.readline() if self.runNumber is not None: try: self.runNumber = str(int(self.runNumber) + 1) except: logMsgs.append( (logging.WARN, "Unable to increment run " "number ({}) in {}. Resetting run number " "to 1.".format(self.runNumber, self.runNumberFile))) self.runNumber = "1" else: logMsgs.append( (logging.WARN, "No run number found in {}. Resetting " "run number to 1.".format(self.runNumberFile))) else: logMsgs.append( (logging.INFO, "No previous run number found as {} does " "not exist. Initializing run number to 1".format( self.runNumberFile))) with open(self.runNumberFile, 'w') as f: f.write(self.runNumber) self.runNumber = datetime.datetime.now().strftime( "%Y%m%d%H%M%S") + "-" + self.runNumber self.config['runNumber'] = self.runNumber def _initCheckpoint(self, checkpointFile): """Initialize the result checkpoint.""" self.currentCheckpoint = None self.skip = False if checkpointFile: checkpointFile = self.config.resolve( checkpointFile, default=self.config['appName'] + ".checkpoint") if checkpointFile: checkpointFile = os.path.abspath(checkpointFile) self.checkpointManager = UdaExecCheckpointManagerFileImpl( checkpointFile) self.resumeFromCheckpoint = self.checkpointManager.loadCheckpoint() if self.resumeFromCheckpoint: logger.info( "Resuming from checkpoint \"%s\".", self.resumeFromCheckpoint) self.skip = True else: logger.info("No previous checkpoint found, executing " "from beginning...") self.skip = False else: self.checkpointManager = None self.resumeFromCheckpoint = None logger.info("Checkpoint file disabled.") def _initVersion(self, version, gitPath): """Initialize the version and GIT revision.""" if not gitPath: gitPath = "git" logger.debug("Git path not specified, using system path.") self.gitVersion = None self.gitRevision = None self.gitDirty = None try: self.gitVersion = subprocess.check_output( [gitPath, "--version"], stderr=subprocess.STDOUT).decode("utf-8").strip() self.gitRevision = subprocess.check_output( [gitPath, "describe", "--tags", "--always", "HEAD"], stderr=subprocess.STDOUT).decode("utf-8").strip() self.modifiedFiles = subprocess.check_output( [gitPath, "status", "--porcelain"], stderr=subprocess.STDOUT).decode("utf-8").splitlines() self.gitDirty = True if self.modifiedFiles else False except subprocess.CalledProcessError as e: logger.debug( "Git information is not available: %s.", e.output.decode("utf-8")) except Exception as e: logger.debug("Git is not available: %s", e) if not version: version = self.gitRevision if not version: raise api.InterfaceError( api.CONFIG_ERROR, "version is a required field, it must be " "passed in as a parameter, specified in a config file, " "or pulled from a git repository.") self.config['version'] = version def _initQueryBands(self, production): """Initialize the Query Band that will be set on future connections.""" self.queryBands = collections.OrderedDict() self.queryBands['ApplicationName'] = self.config['appName'] self.queryBands['Version'] = self.config['version'] self.queryBands['JobID'] = self.runNumber self.queryBands['ClientUser'] = getpass.getuser() self.queryBands['Production'] = util.booleanValue(production) if self.configureLogging: self.queryBands['udaAppLogFile'] = self.logFile if self.gitRevision: self.queryBands['gitRevision'] = self.gitRevision if self.gitDirty is not None: self.queryBands['gitDirty'] = self.gitDirty self.queryBands['UtilityName'] = 'PyTd' self.queryBands['UtilityVersion'] = __version__ def __str__(self): value = u"Execution Details:\n/" value += u'*' * 80 value += u"\n" value += u" * Application Name: {}\n".format( toUnicode(self.config['appName'])) value += u" * Version: {}\n".format( toUnicode(self.config['version'])) value += u" * Run Number: {}\n".format(toUnicode(self.runNumber)) value += u" * Host: {}\n".format( toUnicode(platform.node())) value += u" * Platform: {}\n".format( platform.platform(aliased=True)) value += u" * OS User: {}\n".format( toUnicode(getpass.getuser())) value += u" * Python Version: {}\n".format(platform.python_version()) value += u" * Python Compiler: {}\n".format( platform.python_compiler()) value += u" * Python Build: {}\n".format(platform.python_build()) value += u" * UdaExec Version: {}\n".format(__version__) value += u" * Program Name: {}\n".format(toUnicode(sys.argv[0])) value += u" * Working Dir: {}\n".format(toUnicode(os.getcwd())) if self.gitRevision: value += u" * Git Version: {}\n".format(self.gitVersion) value += u" * Git Revision: {}\n".format(self.gitRevision) value += u" * Git Dirty: {} {}\n".format( self.gitDirty, "" if not self.gitDirty else "[" + ",".join(self.modifiedFiles) + "]") if self.configureLogging: value += u" * Log Dir: {}\n".format( toUnicode(self.logDir)) value += u" * Log File: {}\n".format( toUnicode(self.logFile)) value += u" * Config Files: {}\n".format( toUnicode(self.config.configFiles)) value += u" * Query Bands: {}\n".format( u";".join(u"{}={}".format(toUnicode(k), toUnicode(v)) for k, v in self.queryBands.items())) value += '*' * 80 value += '/' return value def _appendConfigFiles(configFiles, *args): for arg in args: if arg is None: continue if util.isString(arg): configFiles.append(arg) else: configFiles.extend(arg) class UdaExecCheckpointManager: """ Manages the initialization and saving of checkpoints. """ def loadCheckpoint(self): """ Return the checkpoint name that we should resume from. """ raise NotImplementedError( "loadCheckpoint must be implemented by sub-class") def saveCheckpoint(self, checkpointName): """ Save the specified checkpoint """ raise NotImplementedError( "raiseCheckpoint must be implemented by sub-class") def clearCheckpoint(self): """ Remove the checkpoint so that the application starts from beginning next time around. """ raise NotImplementedError( "clearCheckpoint must be implemented by sub-class") class UdaExecCheckpointManagerFileImpl (UdaExecCheckpointManager): """ Implementation of the UdaExecCheckpointMananer using a local file.""" def __init__(self, file): self.file = file def loadCheckpoint(self): resumeFromCheckpoint = None if os.path.isfile(self.file): logger.info(u"Found checkpoint file: \"%s\"", toUnicode(self.file)) with open(self.file, "r") as f: resumeFromCheckpoint = f.readline() if not resumeFromCheckpoint: logger.warn( u"No checkpoint found in %s.", toUnicode(self.file)) else: logger.info(u"Checkpoint file not found: %s", toUnicode(self.file)) return resumeFromCheckpoint def saveCheckpoint(self, checkpointName): logger.info( "Saving checkpoint \"%s\" to %s.", checkpointName, self.file) with open(self.file, 'w') as f: f.write(checkpointName) def clearCheckpoint(self): logger.info("Removing checkpoint file %s.", self.file) if os.path.isfile(self.file): os.remove(self.file) class UdaExecTemplate (string.Template): """Template used by UdaExec configuration and token replacement.""" idpattern = r'[a-z][_a-z0-9\.]*' class UdaExecConfig: """UdaExec configuration loader and resolver.""" def __init__(self, configFiles, encoding, configSection, parseCmdLineArgs, logMsgs): configParser = configparser.ConfigParser() configParser.optionxform = str configFiles = [os.path.expanduser(f) for f in configFiles] self.configFiles = [toUnicode(os.path.abspath( f)) + (": Found" if os.path.isfile(f) else ": Not Found") for f in configFiles] logMsgs.append( (logging.INFO, "Reading config files: {}".format(self.configFiles))) if sys.version_info[0] == 2: for f in configFiles: if os.path.isfile(f): configParser.readfp(codecs.open(f, "r", encoding)) else: configParser.read(configFiles, encoding) self.configSection = configSection self.sections = {configSection: {}} for section in configParser.sections(): self.sections[section] = dict(configParser.items(section)) if parseCmdLineArgs: for arg in sys.argv: if arg.startswith('--') and '=' in arg: (key, val) = arg.split("=", 1) key = key[2:] logMsgs.append( (logging.DEBUG, u"Configuration value was set via " "command line: {}={}".format(toUnicode(key), toUnicode(val)))) self.sections[configSection][key] = val def __iter__(self): return iter(self.sections[self.configSection]) def contains(self, option): return option in self.sections[self.configSection] def resolveDict(self, d, sections=None): if sections is None: sections = [self.configSection] for key, value in d.items(): if util.isString(value): d[key] = self._resolve(value, sections, None, None) return d def resolve(self, value, sections=None, default=None, errorMsg=None): if value is None: if errorMsg is not None: raise api.InterfaceError(api.CONFIG_ERROR, errorMsg) else: util.raiseIfNone("value", value) if not util.isString(value): return value if sections is None: sections = [self.configSection] return self._resolve(value, sections, default, errorMsg) def _resolve(self, value, sections, default, errorMsg): error = None for section in sections: try: s = self.sections[section] newValue = UdaExecTemplate( value.replace("$$", "$$$$")).substitute(**s) if value != newValue: value = self._resolve(newValue, sections, None, errorMsg) else: value = value.replace("$$", "$") error = None break except KeyError as e: error = e if error is not None: if default is not None: return default if errorMsg is not None: raise api.InterfaceError(api.CONFIG_ERROR, errorMsg) else: raise api.InterfaceError( api.CONFIG_ERROR, "Unable to resolve variable \"{}\". " "Not found: {}".format(value, error)) return value def section(self, section): try: return self.resolveDict(self.sections[section].copy(), (section, self.configSection)) except KeyError: return None def __getitem__(self, key): return self.resolve(self.sections[self.configSection][key]) def __setitem__(self, key, value): self.sections[self.configSection][key] = value def __str__(self): length = 0 for key in self.sections[self.configSection]: keyLength = len(key) if keyLength > length: length = keyLength value = u"Configuration Details:\n/" value += u'*' * 80 value += u"\n" for key in sorted(self.sections[self.configSection]): value += u" * {}: {}\n".format(toUnicode(key.rjust(length)), toUnicode( self.resolve("${" + key + "}")) if 'password' not in key.lower() else u'XXXX') value += '*' * 80 value += '/' return value class UdaExecConnection: """A UdaExec connection wrapper for ODBC or REST connections.""" def __init__(self, udaexec, conn): self.udaexec = udaexec self.conn = conn self.internalCursor = self.cursor() def close(self): self.internalCursor.close() self.conn.close() def commit(self): self.conn.commit() def rollback(self): self.conn.rollback() def cursor(self): return UdaExecCursor(self.udaexec, self.conn.cursor()) def __del__(self): self.close() def __enter__(self): return self def __exit__(self, exceptionType, exceptionValue, traceback): self.close() def callproc(self, procname, params, **kwargs): return self.internalCursor.callproc(procname, params, **kwargs) def execute(self, query=None, params=None, **kwargs): self.internalCursor.execute(query, params, **kwargs) return self.internalCursor def executemany(self, query, params, **kwargs): self.internalCursor.executemany(query, params, **kwargs) return self.internalCursor class UdaExecCursor: """A UdaExec cursor wrapper for ODBC or REST cursors.""" def __init__(self, udaexec, cursor): self.udaexec = udaexec self.cursor = cursor self.skip = False self.description = None self.types = None self.rowcount = -1 def callproc(self, procname, params, runAlways=False, continueOnError=False, ignoreErrors=[], **kwargs): self.skip = self.udaexec.skip and not runAlways if not self.skip: start = time.time() try: procname = self.udaexec.config.resolve(procname) outparams = self.cursor.callproc(procname, params, **kwargs) duration = time.time() - start logger.info( "Procedure Successful. Duration: %.3f seconds, " "Procedure: %s, Params: %s", duration, procname, params) return outparams except Exception as e: duration = time.time() - start if isinstance(e, api.DatabaseError) and e.code in ignoreErrors: logger.error( "Procedure Failed! Duration: %.3f seconds, " "Procedure: %s, Params: %s, Error Ignored: ", duration, procname, params, e) else: logger.exception( "Procedure Failed! Duration: %.3f seconds, " "Procedure: %s, Params: %s", duration, procname, params) if not continueOnError: raise else: logger.info( "Skipping procedure, haven't reached resume checkpoint yet. " "Procedure: %s", procname) def close(self): self.cursor.close() def execute(self, query=None, params=None, file=None, fileType=None, delimiter=";", **kwargs): if file is None: util.raiseIfNone("query", query) if query is not None: if util.isString(query): self._execute(self.cursor.execute, query, params, **kwargs) else: for q in query: self._execute(self.cursor.execute, q, params, **kwargs) if file is not None: self._executeFile(file, params, fileType, delimiter, **kwargs) return self def executemany(self, query, params, **kwargs): self._execute(self.cursor.executemany, query, params, **kwargs) return self def _executeFile(self, file, params, fileType, delimiter, runAlways=False, **kwargs): self.skip = self.udaexec.skip and not runAlways if not self.skip: file = self.udaexec.config.resolve(file) if fileType is None: script = util.SqlScript(file, delimiter) elif fileType == "bteq": script = util.BteqScript(file) else: raise api.InterfaceError( "UNKNOWN_FILE_TYPE", "The file type '{}' is not unknown".format(fileType)) for query in script: self._execute( self.cursor.execute, query, params, runAlways, **kwargs) else: logger.info( "Skipping file, haven't reached resume checkpoint yet. " "File: %s", file) def _execute(self, func, query, params, runAlways=False, continueOnError=False, logParamFrequency=1, logParamCharLimit=80, ignoreErrors=[], **kwargs): self.skip = self.udaexec.skip and not runAlways if not self.skip: start = time.time() paramStr = _getParamsString(params, logParamFrequency, logParamCharLimit) try: query = self.udaexec.config.resolve(query) func(query, params, **kwargs) self.description = self.cursor.description self.types = self.cursor.types self.rowcount = self.cursor.rowcount duration = time.time() - start rowsStr = " " if self.cursor.rowcount < 0 else \ " Rows: %s, " % self.cursor.rowcount logger.info( "Query Successful. Duration: %.3f seconds,%sQuery: %s%s", duration, rowsStr, query, paramStr) except Exception as e: self.description = None self.types = None self.rowcount = -1 duration = time.time() - start if isinstance(e, api.DatabaseError) and e.code in ignoreErrors: logger.error( "Query Failed! Duration: %.3f seconds, Query: %s%s, " "Error Ignored: %s", duration, query, paramStr, e) else: logger.exception( "Query Failed! Duration: %.3f seconds, Query: %s%s", duration, query, paramStr) if not continueOnError: raise e else: logger.info( "Skipping query, haven't reached resume checkpoint yet. " "Query: %s", query) def fetchone(self): if self.skip: return None return self.cursor.fetchone() def fetchmany(self, size=None): if self.skip: return [] return self.cursor.fetchmany(size) def fetchall(self): if self.skip: return [] return self.cursor.fetchall() def nextset(self): if self.skip: return None return self.cursor.nextset() def setinputsizes(self, sizes): self.cursor.setinputsizes(self, sizes) def setoutputsize(self, size, column=None): self.cursor.setoutputsizes(self, size) def __iter__(self): return self def __next__(self): if self.skip: raise StopIteration() return self.cursor.__next__() def next(self): return self.__next__() def __enter__(self): return self def __exit__(self, t, value, traceback): self.close() def _getParamsString(params, logParamFrequency=1, logParamCharLimit=80): paramsStr = "" if params and logParamFrequency > 0: if isinstance(params[0], (list, tuple)): index = 0 paramsStr = [] for p in params: index += 1 if index == 1 or index % logParamFrequency == 0: paramsStr.append(_getParamString(p, logParamCharLimit, index)) if index != 1 and index % logParamFrequency != 0: paramsStr.append(_getParamString(p, logParamCharLimit, index)) paramsStr = u", Params: {}".format( u"\n".join(paramsStr)) else: paramsStr = u", Params: {}".format(_getParamString( params, logParamCharLimit)) return paramsStr def _getParamString(params, logParamCharLimit=80, index=None): paramsStr = [] for p in params: p = repr(p) if logParamCharLimit > 0 and len(p) > logParamCharLimit: p = (p[:(logParamCharLimit)] + '...') paramsStr.append(p) prefix = u"[" if index is not None: prefix = u"%s:[" % index return prefix + u",".join(paramsStr) + u"]"
{ "content_hash": "befa8a416a3d6e13033ef29c148e40e6", "timestamp": "", "source": "github", "line_count": 873, "max_line_length": 79, "avg_line_length": 40.39862542955326, "alnum_prop": 0.5587501417715777, "repo_name": "fxstein/PyTd", "id": "697b22bb0d8036fb7212abe32b4de88cd356461d", "size": "35268", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "teradata/udaexec.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "251471" } ], "symlink_target": "" }
"""The shares api.""" import ast from http import client as http_client from oslo_log import log from oslo_utils import strutils from oslo_utils import uuidutils import webob from webob import exc from manila.api import common from manila.api.openstack import wsgi from manila.api.views import share_accesses as share_access_views from manila.api.views import shares as share_views from manila.common import constants from manila import db from manila import exception from manila.i18n import _ from manila import share from manila.share import share_types from manila import utils LOG = log.getLogger(__name__) class ShareMixin(object): """Mixin class for Share API Controllers.""" def _update(self, *args, **kwargs): db.share_update(*args, **kwargs) def _get(self, *args, **kwargs): return self.share_api.get(*args, **kwargs) def _delete(self, *args, **kwargs): return self.share_api.delete(*args, **kwargs) def show(self, req, id): """Return data about the given share.""" context = req.environ['manila.context'] try: share = self.share_api.get(context, id) except exception.NotFound: raise exc.HTTPNotFound() return self._view_builder.detail(req, share) def delete(self, req, id): """Delete a share.""" context = req.environ['manila.context'] LOG.info("Delete share with id: %s", id, context=context) try: share = self.share_api.get(context, id) # NOTE(ameade): If the share is in a share group, we require its # id be specified as a param. sg_id_key = 'share_group_id' if share.get(sg_id_key): share_group_id = req.params.get(sg_id_key) if not share_group_id: msg = _("Must provide '%s' as a request " "parameter when deleting a share in a share " "group.") % sg_id_key raise exc.HTTPBadRequest(explanation=msg) elif share_group_id != share.get(sg_id_key): msg = _("The specified '%s' does not match " "the share group id of the share.") % sg_id_key raise exc.HTTPBadRequest(explanation=msg) self.share_api.delete(context, share) except exception.NotFound: raise exc.HTTPNotFound() except exception.InvalidShare as e: raise exc.HTTPForbidden(explanation=e.message) except exception.Conflict as e: raise exc.HTTPConflict(explanation=e.message) return webob.Response(status_int=http_client.ACCEPTED) def index(self, req): """Returns a summary list of shares.""" req.GET.pop('export_location_id', None) req.GET.pop('export_location_path', None) req.GET.pop('name~', None) req.GET.pop('description~', None) req.GET.pop('description', None) req.GET.pop('with_count', None) return self._get_shares(req, is_detail=False) def detail(self, req): """Returns a detailed list of shares.""" req.GET.pop('export_location_id', None) req.GET.pop('export_location_path', None) req.GET.pop('name~', None) req.GET.pop('description~', None) req.GET.pop('description', None) req.GET.pop('with_count', None) return self._get_shares(req, is_detail=True) def _get_shares(self, req, is_detail): """Returns a list of shares, transformed through view builder.""" context = req.environ['manila.context'] common._validate_pagination_query(req) search_opts = {} search_opts.update(req.GET) # Remove keys that are not related to share attrs sort_key = search_opts.pop('sort_key', 'created_at') sort_dir = search_opts.pop('sort_dir', 'desc') show_count = False if 'with_count' in search_opts: show_count = utils.get_bool_from_api_params( 'with_count', search_opts) search_opts.pop('with_count') if 'is_soft_deleted' in search_opts: is_soft_deleted = utils.get_bool_from_api_params( 'is_soft_deleted', search_opts) search_opts['is_soft_deleted'] = is_soft_deleted # Deserialize dicts if 'metadata' in search_opts: search_opts['metadata'] = ast.literal_eval(search_opts['metadata']) if 'extra_specs' in search_opts: search_opts['extra_specs'] = ast.literal_eval( search_opts['extra_specs']) # NOTE(vponomaryov): Manila stores in DB key 'display_name', but # allows to use both keys 'name' and 'display_name'. It is leftover # from Cinder v1 and v2 APIs. if 'name' in search_opts: search_opts['display_name'] = search_opts.pop('name') if 'description' in search_opts: search_opts['display_description'] = search_opts.pop( 'description') # like filter for key, db_key in (('name~', 'display_name~'), ('description~', 'display_description~')): if key in search_opts: search_opts[db_key] = search_opts.pop(key) if sort_key == 'name': sort_key = 'display_name' common.remove_invalid_options( context, search_opts, self._get_share_search_options()) total_count = None if show_count: count, shares = self.share_api.get_all_with_count( context, search_opts=search_opts, sort_key=sort_key, sort_dir=sort_dir) total_count = count else: shares = self.share_api.get_all( context, search_opts=search_opts, sort_key=sort_key, sort_dir=sort_dir) if is_detail: shares = self._view_builder.detail_list(req, shares, total_count) else: shares = self._view_builder.summary_list(req, shares, total_count) return shares def _get_share_search_options(self): """Return share search options allowed by non-admin.""" # NOTE(vponomaryov): share_server_id depends on policy, allow search # by it for non-admins in case policy changed. # Also allow search by extra_specs in case policy # for it allows non-admin access. return ( 'display_name', 'status', 'share_server_id', 'volume_type_id', 'share_type_id', 'snapshot_id', 'host', 'share_network_id', 'is_public', 'metadata', 'extra_specs', 'sort_key', 'sort_dir', 'share_group_id', 'share_group_snapshot_id', 'export_location_id', 'export_location_path', 'display_name~', 'display_description~', 'display_description', 'limit', 'offset', 'is_soft_deleted') @wsgi.Controller.authorize def update(self, req, id, body): """Update a share.""" context = req.environ['manila.context'] if not body or 'share' not in body: raise exc.HTTPUnprocessableEntity() share_data = body['share'] valid_update_keys = ( 'display_name', 'display_description', 'is_public', ) update_dict = {key: share_data[key] for key in valid_update_keys if key in share_data} try: share = self.share_api.get(context, id) except exception.NotFound: raise exc.HTTPNotFound() if share.get('is_soft_deleted'): msg = _("Share '%s cannot be updated, " "since it has been soft deleted.") % share['id'] raise exc.HTTPForbidden(explanation=msg) update_dict = common.validate_public_share_policy( context, update_dict, api='update') share = self.share_api.update(context, share, update_dict) share.update(update_dict) return self._view_builder.detail(req, share) def create(self, req, body): # Remove share group attributes body.get('share', {}).pop('share_group_id', None) share = self._create(req, body) return share @wsgi.Controller.authorize('create') def _create(self, req, body, check_create_share_from_snapshot_support=False, check_availability_zones_extra_spec=False, scheduler_hints=None): """Creates a new share.""" context = req.environ['manila.context'] if not self.is_valid_body(body, 'share'): raise exc.HTTPUnprocessableEntity() share = body['share'] share = common.validate_public_share_policy(context, share) # NOTE(rushiagr): Manila API allows 'name' instead of 'display_name'. if share.get('name'): share['display_name'] = share.get('name') del share['name'] # NOTE(rushiagr): Manila API allows 'description' instead of # 'display_description'. if share.get('description'): share['display_description'] = share.get('description') del share['description'] size = share['size'] share_proto = share['share_proto'].upper() msg = ("Create %(share_proto)s share of %(size)s GB" % {'share_proto': share_proto, 'size': size}) LOG.info(msg, context=context) availability_zone_id = None availability_zone = share.get('availability_zone') if availability_zone: try: availability_zone_db = db.availability_zone_get( context, availability_zone) availability_zone_id = availability_zone_db.id availability_zone = availability_zone_db.name except exception.AvailabilityZoneNotFound as e: raise exc.HTTPNotFound(explanation=e.message) share_group_id = share.get('share_group_id') if share_group_id: try: share_group = db.share_group_get(context, share_group_id) except exception.ShareGroupNotFound as e: raise exc.HTTPNotFound(explanation=e.message) sg_az_id = share_group['availability_zone_id'] if availability_zone and availability_zone_id != sg_az_id: msg = _("Share cannot have AZ ('%(s_az)s') different than " "share group's one (%(sg_az)s).") % { 's_az': availability_zone_id, 'sg_az': sg_az_id} raise exception.InvalidInput(msg) availability_zone = db.availability_zone_get( context, sg_az_id).name kwargs = { 'availability_zone': availability_zone, 'metadata': share.get('metadata'), 'is_public': share.get('is_public', False), 'share_group_id': share_group_id, } snapshot_id = share.get('snapshot_id') if snapshot_id: snapshot = self.share_api.get_snapshot(context, snapshot_id) else: snapshot = None kwargs['snapshot_id'] = snapshot_id share_network_id = share.get('share_network_id') parent_share_type = {} if snapshot: # Need to check that share_network_id from snapshot's # parents share equals to share_network_id from args. # If share_network_id is empty then update it with # share_network_id of parent share. parent_share = self.share_api.get(context, snapshot['share_id']) parent_share_net_id = parent_share.instance['share_network_id'] parent_share_type = share_types.get_share_type( context, parent_share.instance['share_type_id']) if share_network_id: if share_network_id != parent_share_net_id: msg = ("Share network ID should be the same as snapshot's" " parent share's or empty") raise exc.HTTPBadRequest(explanation=msg) elif parent_share_net_id: share_network_id = parent_share_net_id # Verify that share can be created from a snapshot if (check_create_share_from_snapshot_support and not parent_share['create_share_from_snapshot_support']): msg = (_("A new share may not be created from snapshot '%s', " "because the snapshot's parent share does not have " "that capability.") % snapshot_id) LOG.error(msg) raise exc.HTTPBadRequest(explanation=msg) if share_network_id: try: share_network = self.share_api.get_share_network( context, share_network_id) except exception.ShareNetworkNotFound as e: raise exc.HTTPNotFound(explanation=e.msg) common.check_share_network_is_active(share_network) if availability_zone_id: subnets = ( db.share_network_subnets_get_all_by_availability_zone_id( context, share_network_id, availability_zone_id=availability_zone_id)) if not subnets: msg = _("A share network subnet was not found for the " "requested availability zone.") raise exc.HTTPBadRequest(explanation=msg) kwargs['az_request_multiple_subnet_support_map'] = { availability_zone_id: len(subnets) > 1, } display_name = share.get('display_name') display_description = share.get('display_description') if 'share_type' in share and 'volume_type' in share: msg = 'Cannot specify both share_type and volume_type' raise exc.HTTPBadRequest(explanation=msg) req_share_type = share.get('share_type', share.get('volume_type')) share_type = None if req_share_type: try: if not uuidutils.is_uuid_like(req_share_type): share_type = share_types.get_share_type_by_name( context, req_share_type) else: share_type = share_types.get_share_type( context, req_share_type) except (exception.ShareTypeNotFound, exception.ShareTypeNotFoundByName): msg = _("Share type not found.") raise exc.HTTPNotFound(explanation=msg) elif not snapshot: def_share_type = share_types.get_default_share_type() if def_share_type: share_type = def_share_type # Only use in create share feature. Create share from snapshot # and create share with share group features not # need this check. if (not share_network_id and not snapshot and not share_group_id and share_type and share_type.get('extra_specs') and (strutils.bool_from_string(share_type.get('extra_specs'). get('driver_handles_share_servers')))): msg = _('Share network must be set when the ' 'driver_handles_share_servers is true.') raise exc.HTTPBadRequest(explanation=msg) type_chosen = share_type or parent_share_type if type_chosen and check_availability_zones_extra_spec: type_azs = type_chosen.get( 'extra_specs', {}).get('availability_zones', '') type_azs = type_azs.split(',') if type_azs else [] kwargs['availability_zones'] = type_azs if (availability_zone and type_azs and availability_zone not in type_azs): msg = _("Share type %(type)s is not supported within the " "availability zone chosen %(az)s.") type_chosen = ( req_share_type or "%s (from source snapshot)" % ( parent_share_type.get('name') or parent_share_type.get('id')) ) payload = {'type': type_chosen, 'az': availability_zone} raise exc.HTTPBadRequest(explanation=msg % payload) if share_type: kwargs['share_type'] = share_type if share_network_id: kwargs['share_network_id'] = share_network_id kwargs['scheduler_hints'] = scheduler_hints new_share = self.share_api.create(context, share_proto, size, display_name, display_description, **kwargs) return self._view_builder.detail(req, new_share) @staticmethod def _any_instance_has_errored_rules(share): for instance in share['instances']: access_rules_status = instance['access_rules_status'] if access_rules_status == constants.SHARE_INSTANCE_RULES_ERROR: return True return False @wsgi.Controller.authorize('allow_access') def _allow_access(self, req, id, body, enable_ceph=False, allow_on_error_status=False, enable_ipv6=False, enable_metadata=False): """Add share access rule.""" context = req.environ['manila.context'] access_data = body.get('allow_access', body.get('os-allow_access')) if not enable_metadata: access_data.pop('metadata', None) share = self.share_api.get(context, id) if share.get('is_soft_deleted'): msg = _("Cannot allow access for share '%s' " "since it has been soft deleted.") % id raise exc.HTTPForbidden(explanation=msg) share_network_id = share.get('share_network_id') if share_network_id: share_network = db.share_network_get(context, share_network_id) common.check_share_network_is_active(share_network) if (not allow_on_error_status and self._any_instance_has_errored_rules(share)): msg = _("Access rules cannot be added while the share or any of " "its replicas or migration copies has its " "access_rules_status set to %(instance_rules_status)s. " "Deny any rules in %(rule_state)s state and try " "again.") % { 'instance_rules_status': constants.SHARE_INSTANCE_RULES_ERROR, 'rule_state': constants.ACCESS_STATE_ERROR, } raise webob.exc.HTTPBadRequest(explanation=msg) access_type = access_data['access_type'] access_to = access_data['access_to'] common.validate_access(access_type=access_type, access_to=access_to, enable_ceph=enable_ceph, enable_ipv6=enable_ipv6) try: access = self.share_api.allow_access( context, share, access_type, access_to, access_data.get('access_level'), access_data.get('metadata')) except exception.ShareAccessExists as e: raise webob.exc.HTTPBadRequest(explanation=e.msg) except exception.InvalidMetadata as error: raise exc.HTTPBadRequest(explanation=error.msg) except exception.InvalidMetadataSize as error: raise exc.HTTPBadRequest(explanation=error.msg) return self._access_view_builder.view(req, access) @wsgi.Controller.authorize('deny_access') def _deny_access(self, req, id, body): """Remove share access rule.""" context = req.environ['manila.context'] access_id = body.get( 'deny_access', body.get('os-deny_access'))['access_id'] share = self.share_api.get(context, id) if share.get('is_soft_deleted'): msg = _("Cannot deny access for share '%s' " "since it has been soft deleted.") % id raise exc.HTTPForbidden(explanation=msg) share_network_id = share.get('share_network_id', None) if share_network_id: share_network = db.share_network_get(context, share_network_id) common.check_share_network_is_active(share_network) try: access = self.share_api.access_get(context, access_id) if access.share_id != id: raise exception.NotFound() share = self.share_api.get(context, id) except exception.NotFound as error: raise webob.exc.HTTPNotFound(explanation=error.message) self.share_api.deny_access(context, share, access) return webob.Response(status_int=http_client.ACCEPTED) def _access_list(self, req, id, body): """List share access rules.""" context = req.environ['manila.context'] share = self.share_api.get(context, id) access_rules = self.share_api.access_get_all(context, share) return self._access_view_builder.list_view(req, access_rules) def _extend(self, req, id, body): """Extend size of a share.""" context = req.environ['manila.context'] share, size, force = self._get_valid_extend_parameters( context, id, body, 'os-extend') if share.get('is_soft_deleted'): msg = _("Cannot extend share '%s' " "since it has been soft deleted.") % id raise exc.HTTPForbidden(explanation=msg) try: self.share_api.extend(context, share, size, force=force) except (exception.InvalidInput, exception.InvalidShare) as e: raise webob.exc.HTTPBadRequest(explanation=str(e)) except exception.ShareSizeExceedsAvailableQuota as e: raise webob.exc.HTTPForbidden(explanation=e.message) return webob.Response(status_int=http_client.ACCEPTED) def _shrink(self, req, id, body): """Shrink size of a share.""" context = req.environ['manila.context'] share, size = self._get_valid_shrink_parameters( context, id, body, 'os-shrink') if share.get('is_soft_deleted'): msg = _("Cannot shrink share '%s' " "since it has been soft deleted.") % id raise exc.HTTPForbidden(explanation=msg) try: self.share_api.shrink(context, share, size) except (exception.InvalidInput, exception.InvalidShare) as e: raise webob.exc.HTTPBadRequest(explanation=str(e)) return webob.Response(status_int=http_client.ACCEPTED) def _get_valid_extend_parameters(self, context, id, body, action): try: share = self.share_api.get(context, id) except exception.NotFound as e: raise webob.exc.HTTPNotFound(explanation=e.message) try: size = int(body.get(action, body.get('extend'))['new_size']) except (KeyError, ValueError, TypeError): msg = _("New share size must be specified as an integer.") raise webob.exc.HTTPBadRequest(explanation=msg) # force is True means share extend will extend directly, is False # means will go through scheduler. Default value is False, try: force = strutils.bool_from_string(body.get( action, body.get('extend'))['force'], strict=True) except KeyError: force = False except (ValueError, TypeError): msg = (_('Invalid boolean force : %(value)s') % {'value': body.get('extend')['force']}) raise webob.exc.HTTPBadRequest(explanation=msg) return share, size, force def _get_valid_shrink_parameters(self, context, id, body, action): try: share = self.share_api.get(context, id) except exception.NotFound as e: raise webob.exc.HTTPNotFound(explanation=e.message) try: size = int(body.get(action, body.get('shrink'))['new_size']) except (KeyError, ValueError, TypeError): msg = _("New share size must be specified as an integer.") raise webob.exc.HTTPBadRequest(explanation=msg) return share, size class ShareController(wsgi.Controller, ShareMixin, wsgi.AdminActionsMixin): """The Shares API v1 controller for the OpenStack API.""" resource_name = 'share' _view_builder_class = share_views.ViewBuilder def __init__(self): super(ShareController, self).__init__() self.share_api = share.API() self._access_view_builder = share_access_views.ViewBuilder() @wsgi.action('os-reset_status') def share_reset_status(self, req, id, body): """Reset status of a share.""" return self._reset_status(req, id, body) @wsgi.action('os-force_delete') def share_force_delete(self, req, id, body): """Delete a share, bypassing the check for status.""" return self._force_delete(req, id, body) @wsgi.action('os-allow_access') def allow_access(self, req, id, body): """Add share access rule.""" return self._allow_access(req, id, body) @wsgi.action('os-deny_access') def deny_access(self, req, id, body): """Remove share access rule.""" return self._deny_access(req, id, body) @wsgi.action('os-access_list') def access_list(self, req, id, body): """List share access rules.""" return self._access_list(req, id, body) @wsgi.action('os-extend') def extend(self, req, id, body): """Extend size of a share.""" return self._extend(req, id, body) @wsgi.action('os-shrink') def shrink(self, req, id, body): """Shrink size of a share.""" return self._shrink(req, id, body) def create_resource(): return wsgi.Resource(ShareController())
{ "content_hash": "a543ed09d38ef70d9b7dc381cb14ae41", "timestamp": "", "source": "github", "line_count": 654, "max_line_length": 79, "avg_line_length": 40.22477064220183, "alnum_prop": 0.5707986467480138, "repo_name": "openstack/manila", "id": "8099b9a9fb57c2b65a09214182078e419f1a64dc", "size": "26929", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "manila/api/v1/shares.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Mako", "bytes": "953" }, { "name": "Python", "bytes": "12728998" }, { "name": "Shell", "bytes": "107601" } ], "symlink_target": "" }
from keras_cv.bounding_box.converters import _decode_deltas_to_boxes from keras_cv.bounding_box.converters import _encode_box_to_deltas from keras_cv.bounding_box.converters import convert_format from keras_cv.bounding_box.formats import CENTER_XYWH from keras_cv.bounding_box.formats import REL_XYXY from keras_cv.bounding_box.formats import REL_YXYX from keras_cv.bounding_box.formats import XYWH from keras_cv.bounding_box.formats import XYXY from keras_cv.bounding_box.formats import YXYX from keras_cv.bounding_box.iou import compute_iou from keras_cv.bounding_box.pad_batch_to_shape import pad_batch_to_shape from keras_cv.bounding_box.utils import add_class_id from keras_cv.bounding_box.utils import clip_to_image from keras_cv.bounding_box.utils import filter_sentinels from keras_cv.bounding_box.utils import pad_with_sentinels
{ "content_hash": "365ca509cc266fdbcf111f7a795f9beb", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 71, "avg_line_length": 55.86666666666667, "alnum_prop": 0.8257756563245824, "repo_name": "keras-team/keras-cv", "id": "f98b3f884ad7adfb0e323b381cf37e6357065118", "size": "1423", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "keras_cv/bounding_box/__init__.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C++", "bytes": "20378" }, { "name": "Dockerfile", "bytes": "420" }, { "name": "Jsonnet", "bytes": "967" }, { "name": "Jupyter Notebook", "bytes": "24377" }, { "name": "Python", "bytes": "1606495" }, { "name": "Shell", "bytes": "4249" }, { "name": "Smarty", "bytes": "535" }, { "name": "Starlark", "bytes": "10259" } ], "symlink_target": "" }
from kmip.core import enums from kmip.core import primitives class RequestPayload(primitives.Struct): """ An abstract base class for KMIP request payloads. """ def __init__(self): super(RequestPayload, self).__init__(enums.Tags.REQUEST_PAYLOAD) class ResponsePayload(primitives.Struct): """ An abstract base class for KMIP response payloads. """ def __init__(self): super(ResponsePayload, self).__init__(enums.Tags.RESPONSE_PAYLOAD)
{ "content_hash": "cd4c138fdf548519862cbc51e4b1bac6", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 74, "avg_line_length": 25.57894736842105, "alnum_prop": 0.676954732510288, "repo_name": "OpenKMIP/PyKMIP", "id": "cbfda03f19e4ccfca6c33a86a323fb3f312b4354", "size": "1132", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "kmip/core/messages/payloads/base.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "5552888" }, { "name": "Shell", "bytes": "1214" } ], "symlink_target": "" }
import random as r from gps import * import time import threading import zmq import zmq_ports as ports import zmq_topics as topic import delays gpsd = None # seting the global variable class GpsPoller(threading.Thread): def __init__(self): threading.Thread.__init__(self) global gpsd # bring it in scope gpsd = gps(mode=WATCH_ENABLE) # starting the stream of info self.current_value = None self.running = True # setting the thread running to true def run(self): global gpsd while gpsp.running: gpsd.next() # this will continue to loop and grab EACH set of gpsd info to clear the buffer class gpsdata(): def __init__(self): self.latitude=0 self.longitude=0 self.altitude=0 self.speed=0 self.climb=0 self.track=0 self.epx=0 self.epv=0 self.utc = 0 self.time = 0 self.mode = 0 self.nsatellites = 0 def joinDelimiter(self, arr): tmp=[None]*len(arr) for i in range(len(arr)): tmp[i]=str(arr[i]) return ",".join(tmp) def getRandomStrArr(self): pass def run(self): try: while True: self.latitude = gpsd.fix.latitude self.longitude = gpsd.fix.longitude self.altitude = gpsd.fix.altitude self.speed = gpsd.fix.speed self.climb = gpsd.fix.climb self.track = gpsd.fix.track self.epx = round(gpsd.fix.epx,1) self.epv = round(gpsd.fix.epv,1) self.utc = gpsd.utc self.time = gpsd.fix.time self.mode = gpsd.fix.mode self.nsatellites = len(gpsd.satellites) gps_publisher.send_string("%s %s" % (topic.GPS_TOPIC, self.getString())) time.sleep(delays.GPS_REFRESH_DELAY) except (KeyboardInterrupt, SystemExit): # when you press ctrl+c print("\nKilling Thread...") gpsp.running = False gpsp.join() print("Done.\nExiting.") def getString(self): return self.joinDelimiter( [self.latitude, self.longitude, self.altitude, self.speed, self.climb, self.track, self.epx, self.epv, self.mode, self.nsatellites]) # Update values if instance not doint reading with run() def setValues(self,string): self.latitude, self.longitude, self.altitude, self.speed, self.climb, self.track, self.epx, self.epv, \ self.mode, self.nsatellites = [float(x) for x in string.split(',')] self.mode = int(self.mode) self.nsatellites = int(self.nsatellites) if __name__ == '__main__': # Publisher context = zmq.Context() gps_publisher = context.socket(zmq.PUB) gps_publisher.bind("tcp://*:%s" % ports.GPS_PUB) # create, start thread gpsp = GpsPoller() gpsp.start() # mygps = gpsdata() mygps.run()
{ "content_hash": "f15f230207aa3bf3334c8c00038ed3b1", "timestamp": "", "source": "github", "line_count": 100, "max_line_length": 114, "avg_line_length": 30.24, "alnum_prop": 0.5724206349206349, "repo_name": "jeryfast/piflyer", "id": "b15677d8a8536b13aa08b1f8880bab7ad5c6a5d6", "size": "3024", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "piflyer/zmq_gps.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "HTML", "bytes": "11037" }, { "name": "Python", "bytes": "95834" } ], "symlink_target": "" }
""" config.py author: [email protected] ======================== This file acts as the configuration holder for the environment. If you want to define a global configuration please define it here. Naming convention: - Use capital letters. - If needed, use underscores ('_') as separators between words """ import os # Do not change these values! BASEDIR = os.path.abspath(os.path.dirname(__file__)) SQLALCHEMY_MIGRATE_REPO = os.path.join(BASEDIR, 'db_repository') # Add list of python libraries you wish to install on startup in this list # Example: # additional_packages = ['flask-mail','nose'] ADDITIONAL_PACKAGES = [] # Select the database connectivity that you wish to use. # THe current value defaults to sqlite #SQLALCHEMY_DATABASE_URI = 'mysql://root:[email protected]/flasklearn' # << use this for MySQL, adjust accordingly SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(BASEDIR, 'db/app.db') # << use this for SQLite, adjust accordingly #SQLALCHEMY_DATABASE_URI = 'postgresql://scott:tiger@localhost/mydatabase' #<< use this for postgresql, adjust accordingly #SQLALCHEMY_DATABASE_URI = 'oracle://scott:[email protected]:1521/sidname' #<< use this for oracle, adjust accordingly # This is the default server port settings that will be used by the system SERVER_PORT = 5000 #this is to determine the white space used in generating the controllers. You can change it accordinly to your preferance. WHITE_SPACE = "\t" # This variable will be used to check the valid data types enterred by the user in box.py -n command. VALID_DATA_TYPES = [ 'boolean', 'date', 'time', 'datetime', 'enum', 'interval', 'pickletype', 'schematype', 'numeric', 'float', 'biginteger', 'smallinteger', 'smallint', 'string', 'bigint','int','integer', 'text', 'unicode', 'unicodetext', 'binary', 'largebinary', 'blob' ] # end of file
{ "content_hash": "7e75133e22ed22ca41471356008832d7", "timestamp": "", "source": "github", "line_count": 46, "max_line_length": 122, "avg_line_length": 40.108695652173914, "alnum_prop": 0.71869918699187, "repo_name": "kevinmel2000/EmeraldBox", "id": "decf0e652e08bcca6ca82ec4d3164adb4b16e6e5", "size": "1845", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "config.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "18592" }, { "name": "HTML", "bytes": "4314" }, { "name": "JavaScript", "bytes": "682" }, { "name": "Python", "bytes": "154010" } ], "symlink_target": "" }
from azure.identity import DefaultAzureCredential from azure.mgmt.sql import SqlManagementClient """ # PREREQUISITES pip install azure-identity pip install azure-mgmt-sql # USAGE python delete_a_sync_agent.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, AZURE_CLIENT_SECRET. For more info about how to get the value, please see: https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal """ def main(): client = SqlManagementClient( credential=DefaultAzureCredential(), subscription_id="00000000-1111-2222-3333-444444444444", ) response = client.sync_agents.begin_delete( resource_group_name="syncagentcrud-65440", server_name="syncagentcrud-8475", sync_agent_name="syncagentcrud-3187", ).result() print(response) # x-ms-original-file: specification/sql/resource-manager/Microsoft.Sql/preview/2020-11-01-preview/examples/SyncAgentDelete.json if __name__ == "__main__": main()
{ "content_hash": "4628c53fed527427c43a2cfce848294a", "timestamp": "", "source": "github", "line_count": 34, "max_line_length": 127, "avg_line_length": 33.588235294117645, "alnum_prop": 0.7250437828371279, "repo_name": "Azure/azure-sdk-for-python", "id": "a4d3ba76a2c0a45015e7de51a61a9237c0d39be4", "size": "1610", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "sdk/sql/azure-mgmt-sql/generated_samples/delete_a_sync_agent.py", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "1224" }, { "name": "Bicep", "bytes": "24196" }, { "name": "CSS", "bytes": "6089" }, { "name": "Dockerfile", "bytes": "4892" }, { "name": "HTML", "bytes": "12058" }, { "name": "JavaScript", "bytes": "8137" }, { "name": "Jinja", "bytes": "10377" }, { "name": "Jupyter Notebook", "bytes": "272022" }, { "name": "PowerShell", "bytes": "518535" }, { "name": "Python", "bytes": "715484989" }, { "name": "Shell", "bytes": "3631" } ], "symlink_target": "" }
import cards import hands import pickle from random import * # gets indices from hand of size 2 def getIndicesFromHand(hand): first = hand.cards[0] second = hand.cards[1] suited = False if first.suit == second.suit: suited = True firstval = first.get_value() secondval = second.get_value() if (firstval > secondval and suited) or (firstval < secondval and not suited): return [firstval - 1, secondval - 1] else: return [secondval - 1, firstval - 1] # creates monte carlo simulation, lower left triangular piece of matrix is signed, upper right is unsigned def simulate(filename = "preflop_values", trials = 0): # holds card combos and results in a vector of [#wins, #ties, #losses] #mat = [] #for i in range(13): # tmat = [] # for j in range(13): # tmat.append([0,0,0]) # mat.append(tmat) mat = pickle.load(open(filename, "rb")) for i in range(trials): theDeck = cards.Deck() theDeck.shuffle() herohand = cards.Hand() adversaryhand = cards.Hand() for j in range(2): herohand.add_card(theDeck.deal_card()) adversaryhand.add_card(theDeck.deal_card()) indices = getIndicesFromHand(herohand) table = cards.Hand() # 5 cards on table for j in range(5): table.add_card(theDeck.deal_card()) result = hands.compare_hands(herohand, adversaryhand, table) if result == 'left': mat[indices[0]][indices[1]][0] += 1 elif result == 'none': mat[indices[0]][indices[1]][1] += 1 elif result == 'right': mat[indices[0]][indices[1]][2] += 1 pickle.dump(mat, open(filename, "wb")) def printMatrix(filename = "preflop_values"): mat = pickle.load(open(filename, "rb")) print mat # returns [% chance of win, % chance of push, % chance of loss] def getPreflopStrength(hand, filename = "preflop_values"): mat = pickle.load(open(filename, "rb")) indices = getIndicesFromHand(hand) chances = mat[indices[0]][indices[1]] s = chances[0] + chances[1] + chances[2] return [chances[0] / float(s), chances[1] / float(s), chances[2] / float(s)] #simulate("preflop_values", 2000000) #printMatrix() #for i in range(20): # theDeck = cards.Deck() # theDeck.shuffle() # hand = cards.Hand() # hand.add_card(theDeck.deal_card()) # hand.add_card(theDeck.deal_card()) # print hand # print getPreflopStrength(hand) #hand = cards.Hand() #hand.add_card(cards.Card('C','K')) #hand.add_card(cards.Card('D','K')) #print 'getting preflop for this' #print getPreflopStrength(hand)
{ "content_hash": "ebc1e2eed7e7025387087816eaeaca77", "timestamp": "", "source": "github", "line_count": 91, "max_line_length": 106, "avg_line_length": 27.65934065934066, "alnum_prop": 0.6519666269368296, "repo_name": "pmaddi/CPSC458_Final-Project", "id": "8f462610ab5501f2c57b897d3884e5502531f064", "size": "2517", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "preflop_sim.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "56157" } ], "symlink_target": "" }
import sys import os SPARK_HOME="SPARK_HOME" try: import pyspark except ImportError: if SPARK_HOME in os.environ: spark_home = os.environ.get(SPARK_HOME) pyspark_path = "%s/python" % spark_home sys.path.insert(1, pyspark_path) else: raise Exception("Required Environment variable %s not set" % SPARK_HOME) from tkcontext import TkContext from sparkconf import create_sc import dtypes from sparktk.loggers import loggers from sparktk.frame.ops.inspect import inspect_settings
{ "content_hash": "2afea8455d3f382f94344f140946a4b1", "timestamp": "", "source": "github", "line_count": 23, "max_line_length": 80, "avg_line_length": 22.82608695652174, "alnum_prop": 0.72, "repo_name": "ashaarunkumar/spark-tk", "id": "306b8a3e2072daf5bd123a035738536159d78e79", "size": "1230", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "python/sparktk/__init__.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "11509" }, { "name": "Python", "bytes": "1310665" }, { "name": "Scala", "bytes": "1492283" }, { "name": "Shell", "bytes": "22007" } ], "symlink_target": "" }
from __future__ import absolute_import from concurrent.futures import ThreadPoolExecutor from unittest import TestCase import asyncio from opentracing.scope_managers.contextvars import ContextVarsScopeManager from opentracing.harness.scope_check import ScopeCompatibilityCheckMixin class AsyncioContextVarsCompabilityCheck( TestCase, ScopeCompatibilityCheckMixin ): def scope_manager(self): return ContextVarsScopeManager() def run_test(self, test_fn): @asyncio.coroutine def async_test_fn(): test_fn() asyncio.get_event_loop().run_until_complete(async_test_fn()) def test_no_event_loop(self): # no event loop exists by default in # new threads, so make sure we don't fail there. def test_fn(): manager = self.scope_manager() assert manager.active is None executor = ThreadPoolExecutor(max_workers=1) executor.submit(test_fn).result()
{ "content_hash": "3809e1e9532c2d906430871282d009ec", "timestamp": "", "source": "github", "line_count": 33, "max_line_length": 74, "avg_line_length": 29.333333333333332, "alnum_prop": 0.7004132231404959, "repo_name": "opentracing/opentracing-python", "id": "dd1820fe3d06143c238f647b40352809b41838cc", "size": "2066", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tests/scope_managers/test_contextvars.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Makefile", "bytes": "2598" }, { "name": "Python", "bytes": "243543" } ], "symlink_target": "" }
import multiprocessing import multiprocessing.pool import sys import os import torch from .bound_ops import * def build_solver_module(self, x=None, C=None, intermediate_layer_bounds=None, final_node_name=None, model_type="mip", solver_pkg="gurobi"): r"""build lp/mip solvers in general graph. Args: x: inputs, a list of BoundedTensor. If set to None, we reuse exisint bounds that were previously computed in compute_bounds(). C (Tensor): The specification matrix that can map the output of the model with an additional linear layer. This is usually used for maping the logits output of the model to classification margins. intermediate_layer_bounds: if specified, will replace existing intermediate layer bounds. Otherwise we reuse exising intermediate bounds. final_node_name (String): the name for the target layer to optimize solver_pkg (String): the backbone of the solver, default gurobi, also support scipy Returns: output vars (list): a list of final nodes to optimize """ # self.root_name: list of root node name # self.final_name: list of output node name # self.final_node: output module # <module>.input: a list of input modules of this layer module # <module>.solver_vars: a list of gurobi vars of every layer module # list with conv shape if conv layers, otherwise flattened # if last layer we need to be careful with: # C: specification matrix # <module>.is_input_perturbed(1) if x is not None: assert intermediate_layer_bounds is not None # Set the model to use new intermediate layer bounds, ignore the original ones. self._set_input(x, intermediate_layer_bounds=intermediate_layer_bounds) root = [self[name] for name in self.root_name] # create interval ranges for input and other weight parameters for i in range(len(root)): value = root[i].forward() # if isinstance(root[i], BoundInput) and not isinstance(root[i], BoundParams): if type(root[i]) is BoundInput: # create input vars for gurobi self.model inp_gurobi_vars = self._build_solver_input(root[i]) else: # regular weights root[i].solver_vars = value final = self.final_node() if final_node_name is None else self[final_node_name] # backward propagate every layer including last layer self._build_solver_general(node=final, C=C, model_type=model_type, solver_pkg=solver_pkg) # a list of output solver vars return final.solver_vars def _build_solver_general(self, node, C=None, model_type="mip", solver_pkg="gurobi"): if not hasattr(node, 'solver_vars'): for n in node.inputs: self._build_solver_general(n, C=C, model_type=model_type, solver_pkg=solver_pkg) inp = [n_pre.solver_vars for n_pre in node.inputs] # print(node, node.inputs) if C is not None and isinstance(node, BoundLinear) and\ not node.is_input_perturbed(1) and self.final_name == node.name: # when node is the last layer # merge the last BoundLinear node with the specification, # available when weights of this layer are not perturbed solver_vars = node.build_solver(*inp, model=self.model, C=C, model_type=model_type, solver_pkg=solver_pkg) else: solver_vars = node.build_solver(*inp, model=self.model, C=None, model_type=model_type, solver_pkg=solver_pkg) # just return output node gurobi vars return solver_vars def _build_solver_input(self, node): ## Do the input layer, which is a special case assert isinstance(node, BoundInput) assert node.perturbation is not None assert node.perturbation.norm == float("inf") inp_gurobi_vars = [] # zero var will be shared within the solver model zero_var = self.model.addVar(lb=0, ub=0, obj=0, vtype=grb.GRB.CONTINUOUS, name='zero') x_L = node.value - node.perturbation.eps if node.perturbation.x_L is None else node.perturbation.x_L x_U = node.value + node.perturbation.eps if node.perturbation.x_U is None else node.perturbation.x_U x_L = x_L.squeeze(0) x_U = x_U.squeeze(0) # x_L, x_U = node.lower.squeeze(0), node.upper.squeeze(0) if x_L.ndim == 1: # This is a linear input. for dim, (lb, ub) in enumerate(zip(x_L, x_U)): v = self.model.addVar(lb=lb, ub=ub, obj=0, vtype=grb.GRB.CONTINUOUS, name=f'inp_{dim}') inp_gurobi_vars.append(v) else: assert x_L.ndim == 3, f"x_L ndim {x_L.ndim}" dim = 0 for chan in range(x_L.shape[0]): chan_vars = [] for row in range(x_L.shape[1]): row_vars = [] for col in range(x_L.shape[2]): lb = x_L[chan, row, col] ub = x_U[chan, row, col] v = self.model.addVar(lb=lb, ub=ub, obj=0, vtype=grb.GRB.CONTINUOUS, name=f'inp_{dim}') # name=f'inp_[{chan},{row},{col}]') row_vars.append(v) dim += 1 chan_vars.append(row_vars) inp_gurobi_vars.append(chan_vars) node.solver_vars = inp_gurobi_vars # save the gurobi input variables so that we can later extract primal values in input space easily self.input_vars = inp_gurobi_vars self.model.update() return inp_gurobi_vars
{ "content_hash": "d486811de722afd8280b453eb73c4503", "timestamp": "", "source": "github", "line_count": 132, "max_line_length": 104, "avg_line_length": 43.5, "alnum_prop": 0.6088470916057123, "repo_name": "KaidiXu/auto_LiRPA", "id": "9720ce552b5fd1f4e52bb3f372e9fd4281f21d16", "size": "5742", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "auto_LiRPA/solver_module.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C++", "bytes": "981" }, { "name": "Cuda", "bytes": "1234" }, { "name": "Python", "bytes": "699644" } ], "symlink_target": "" }
class HCException(Exception): def __init__(self, value): self.value = value def __str__(self): return repr(self.value) class LocatorException(HCException): pass class NoSuchFileAttachmentError(HCException): pass class NoSuchMemberException(HCException): pass class NoSuchTagException(HCException): pass class NoSuchUserException(HCException): pass class TimeoutException(HCException): pass class ProxyPortError(HCException): pass class ConnectionClosedError(HCException): """ Raise this exception if the connection closed unexpectedly """ pass class ExitCodeError(HCException): """ Raise this exception if the exit code of running a command is not 0 """ pass class NavigationError(HCException): """ Raise this exception if there was trouble navigating the web browser. """ pass class FormSubmissionError(HCException): """ Raise this exception if there was trouble submitting a web form. """ pass class CatalogError(HCException): """ Raise this exception if there was trouble using the page object catalog. """ pass class SessionCreateError(HCException): """ Raise this exception if there was trouble creating a tool session container. """ pass
{ "content_hash": "b08894cff2ea3ea3d0362d367071d627", "timestamp": "", "source": "github", "line_count": 69, "max_line_length": 80, "avg_line_length": 19.014492753623188, "alnum_prop": 0.694359756097561, "repo_name": "codedsk/hubcheck", "id": "2ce643e1a97a66fc5d535152f82a1689ec3767c2", "size": "1342", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "hubcheck/exceptions.py", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "1248" }, { "name": "Makefile", "bytes": "846" }, { "name": "Python", "bytes": "1355360" }, { "name": "Shell", "bytes": "1483" } ], "symlink_target": "" }
__author__ = 'Christoph Jansen'
{ "content_hash": "d7526f2d560cd5750cf30ee5a746814c", "timestamp": "", "source": "github", "line_count": 1, "max_line_length": 31, "avg_line_length": 33, "alnum_prop": 0.6363636363636364, "repo_name": "PandoIO/brocas-lm", "id": "18ed8ce3160b3d7ca4f760f0405e8e9fcfe40367", "size": "33", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "brocas_lm/__init__.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "17007" } ], "symlink_target": "" }
import json import pecan from oslo_log import log from oslo_utils.strutils import bool_from_string from pecan.core import abort from vitrage.api.controllers.rest import RootRestController from vitrage.api.policy import enforce LOG = log.getLogger(__name__) # noinspection PyBroadException class AlarmCountsController(RootRestController): @pecan.expose('json') def index(self, all_tenants=False): return self.get(all_tenants) @pecan.expose('json') def get(self, all_tenants=False): all_tenants = bool_from_string(all_tenants) if all_tenants: enforce("get alarms count:all_tenants", pecan.request.headers, pecan.request.enforcer, {}) else: enforce("get alarms count", pecan.request.headers, pecan.request.enforcer, {}) LOG.info('received get alarm counts') try: alarm_counts_json = pecan.request.client.call( pecan.request.context, 'get_alarm_counts', all_tenants=all_tenants) return json.loads(alarm_counts_json) except Exception: LOG.exception('failed to get alarm count.') abort(404, 'Failed to get alarm count.') class ResourceCountsController(RootRestController): @pecan.expose('json') def post(self, **kwargs): resource_type = kwargs.get('resource_type', None) all_tenants = kwargs.get('all_tenants', False) all_tenants = bool_from_string(all_tenants) query = kwargs.get('query') group_by = kwargs.get('group_by') if query: query = json.loads(query) if all_tenants: enforce("count resources:all_tenants", pecan.request.headers, pecan.request.enforcer, {}) else: enforce("count resources", pecan.request.headers, pecan.request.enforcer, {}) LOG.info('received get resource counts') try: resource_counts_json = pecan.request.client.call( pecan.request.context, 'count_resources', resource_type=resource_type, all_tenants=all_tenants, query=query, group_by=group_by) return json.loads(resource_counts_json) except Exception: LOG.exception('failed to get resource count.') abort(404, 'Failed to get resource count.')
{ "content_hash": "f29a126bc4ee42112c6aacb608844295", "timestamp": "", "source": "github", "line_count": 79, "max_line_length": 74, "avg_line_length": 31.050632911392405, "alnum_prop": 0.6061964940888708, "repo_name": "openstack/vitrage", "id": "e07750d4d38872a6f9b6397b16d8f9c14ad32753", "size": "2999", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "vitrage/api/controllers/v1/count.py", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "HTML", "bytes": "26541" }, { "name": "Mako", "bytes": "896" }, { "name": "Python", "bytes": "2074427" }, { "name": "Shell", "bytes": "17668" } ], "symlink_target": "" }
import os import webapp2 import jinja2 import webapp2 import json from google.appengine.ext import ndb JINJA_ENVIRONMENT = jinja2.Environment( loader=jinja2.FileSystemLoader(os.path.dirname(__file__)), extensions=['jinja2.ext.autoescape'], autoescape=True) class UserRating(ndb.Model): username = ndb.StringProperty() ratings = ndb.JsonProperty() class MainPage(webapp2.RequestHandler): def get(self): self.response.headers['Content-Type'] = 'text/html' template = JINJA_ENVIRONMENT.get_template('index.html') template_value = { 'value': 8} self.response.write(template.render(template_value)) class RatingPage(webapp2.RequestHandler): def get(self): self.response.headers['Content-Type'] = 'text/html' template = JINJA_ENVIRONMENT.get_template('index.html') template_value = { 'value': 8} self.response.write('dammit') def post(self): self.response.headers['Content-Type'] = 'text/html' template = JINJA_ENVIRONMENT.get_template('index.html') template_value = { 'value': 8} json_data = json.loads(self.request.body) json_data['stuff'] = "marcos" self.response.write(json.dumps(json_data)) class APIPage(webapp2.RequestHandler): def get(self): self.response.headers['Content-Type'] = 'application/json' id = 6473924464345088 user = ndb.Key(UserRating, id).get() name = user.username self.response.write(json.dumps(name)) def post(self): self.response.headers['Content-Type'] = 'application/json' #TODO:// Make this more secure json_data = json.loads(self.request.body) user_rating = UserRating() user_rating.username = json_data['username'] user_rating.ratings = json_data['ratings'] user_key = user_rating.put() self.response.write('{"user_key":"' + str(user_key.id()) +'"}') class RatingsPage(webapp2.RequestHandler): def get(self): self.response.headers['Content-Type'] = 'application/json' all_ratings = UserRating.query().fetch() result = {} result['stuff'] = [x.ratings for x in all_ratings] self.response.write(json.dumps(result)) def delete(self): ndb.delete_multi(UserRating.query().fetch(keys_only=True)) application = webapp2.WSGIApplication([ ('/', MainPage), ('/api/rating', RatingPage), ('/api/test', APIPage), ('/api/ratings', RatingsPage), ], debug=True)
{ "content_hash": "e525f6bb294612d3602f9157deff7b8e", "timestamp": "", "source": "github", "line_count": 77, "max_line_length": 71, "avg_line_length": 32.8051948051948, "alnum_prop": 0.6417260490894695, "repo_name": "msavoury/machine-learning", "id": "8a266198753a3ad1b5e8e439942aba2aadc4f762", "size": "2526", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "apps/rateawatch/rateawatch.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "350" }, { "name": "HTML", "bytes": "6993" }, { "name": "JavaScript", "bytes": "2603" }, { "name": "Python", "bytes": "2526" } ], "symlink_target": "" }
from oslo_log import log as logging from vitrage.common.constants import DatasourceProperties as DSProps from vitrage.common.constants import EdgeLabel from vitrage.common.constants import EntityCategory as ECategory from vitrage.common.constants import VertexProperties as VProps from vitrage.datasources.alarm_transformer_base import AlarmTransformerBase from vitrage.datasources.prometheus import PROMETHEUS_DATASOURCE from vitrage.datasources.prometheus.properties import get_alarm_update_time from vitrage.datasources.prometheus.properties import get_label from vitrage.datasources.prometheus.properties import PrometheusAlertLabels \ as PAlertLabels from vitrage.datasources.prometheus.properties \ import PrometheusAlertProperties as PProps from vitrage.datasources.prometheus.properties import PrometheusAlertStatus \ as PAlertStatus from vitrage.datasources.prometheus.properties \ import PrometheusDatasourceProperties as PDProps from vitrage.datasources import transformer_base as tbase import vitrage.graph.utils as graph_utils LOG = logging.getLogger(__name__) class PrometheusTransformer(AlarmTransformerBase): def _create_snapshot_entity_vertex(self, entity_event): return self._create_vertex(entity_event) def _create_update_entity_vertex(self, entity_event): return self._create_vertex(entity_event) def _create_vertex(self, entity_event): metadata = { VProps.NAME: get_label(entity_event, PAlertLabels.ALERT_NAME), VProps.SEVERITY: get_label(entity_event, PAlertLabels.SEVERITY), PProps.STATUS: entity_event.get(PProps.STATUS), } return graph_utils.create_vertex( self._create_entity_key(entity_event), vitrage_category=ECategory.ALARM, vitrage_type=entity_event[DSProps.ENTITY_TYPE], vitrage_sample_timestamp=entity_event[DSProps.SAMPLE_DATE], entity_state=self._get_alarm_state(entity_event), update_timestamp=get_alarm_update_time(entity_event), metadata=metadata ) def _create_snapshot_neighbors(self, entity_event): return self._create_prometheus_neighbors(entity_event) def _create_update_neighbors(self, entity_event): return self._create_prometheus_neighbors(entity_event) def _create_prometheus_neighbors(self, entity_event): graph_neighbors = entity_event.get(self.QUERY_RESULT, []) return [self._create_neighbor(entity_event, graph_neighbor[VProps.ID], graph_neighbor[VProps.VITRAGE_TYPE], EdgeLabel.ON, neighbor_category=ECategory.RESOURCE) for graph_neighbor in graph_neighbors] def _create_entity_key(self, entity_event): return tbase.build_key((ECategory.ALARM, entity_event[DSProps.ENTITY_TYPE], get_label(entity_event, PAlertLabels.ALERT_NAME), str(entity_event.get( PDProps.ENTITY_UNIQUE_PROPS)))) def get_vitrage_type(self): return PROMETHEUS_DATASOURCE def _ok_status(self, entity_event): return entity_event and \ PAlertStatus.RESOLVED == entity_event.get(PProps.STATUS) @staticmethod def get_enrich_query(event): LOG.debug('event for enrich query: %s', event) entity_unique_props = event.get(PDProps.ENTITY_UNIQUE_PROPS) return entity_unique_props
{ "content_hash": "56599abcdb255288ca69c9bb7a3786f3", "timestamp": "", "source": "github", "line_count": 83, "max_line_length": 77, "avg_line_length": 44.144578313253014, "alnum_prop": 0.6711244541484717, "repo_name": "openstack/vitrage", "id": "3fcac0a51856c0a959360caaefe7fd3e5de846e6", "size": "4237", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "vitrage/datasources/prometheus/transformer.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "HTML", "bytes": "26541" }, { "name": "Mako", "bytes": "896" }, { "name": "Python", "bytes": "2074427" }, { "name": "Shell", "bytes": "17668" } ], "symlink_target": "" }
__author__ = 'kkennedy' import labtronyx from .c_base import PluginController class InterfaceController(PluginController): def __init__(self, c_manager, model): super(InterfaceController, self).__init__(c_manager, model) def refresh(self): self.model.refresh()
{ "content_hash": "e8ac3136fbee2f572a876da14e04845b", "timestamp": "", "source": "github", "line_count": 12, "max_line_length": 67, "avg_line_length": 24, "alnum_prop": 0.6909722222222222, "repo_name": "protonyx/labtronyx", "id": "72e781b202d962cd52e1a1da297454e373e4192f", "size": "288", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "labtronyx/gui/controllers/c_interface.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "440210" } ], "symlink_target": "" }
import os from .controllers.{{ label }} import {{ class_name }} def add_template_dir(app): path = os.path.join(os.path.dirname(__file__), 'templates') app.add_template_dir(path) def load(app): app.handler.register({{ class_name }}) app.hook.register('post_setup', add_template_dir)
{ "content_hash": "dd077765b7eb0f2c122cd1704c2b1e98", "timestamp": "", "source": "github", "line_count": 10, "max_line_length": 63, "avg_line_length": 30, "alnum_prop": 0.6633333333333333, "repo_name": "datafolklabs/cement", "id": "c65e07268c85923814df032ecce4c815ae632b53", "size": "301", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "cement/cli/templates/generate/plugin/{{ label }}/__init__.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C", "bytes": "7083" }, { "name": "Dockerfile", "bytes": "1081" }, { "name": "Handlebars", "bytes": "113" }, { "name": "Jinja", "bytes": "481" }, { "name": "Makefile", "bytes": "2767" }, { "name": "Mustache", "bytes": "120" }, { "name": "PowerShell", "bytes": "4675" }, { "name": "Python", "bytes": "1153527" }, { "name": "Shell", "bytes": "6842" }, { "name": "Vim Script", "bytes": "38" } ], "symlink_target": "" }
import os import sys import math as m import numpy as np import astropy.io.fits as pf from util_PPC import log_wr from util_PPC import log_fail from util_PPC import fail_not_exists from util_PPC import DataManager from util_RM import do_rmclean # Constants C = 2.99792458e8 #-----------------------------------------------------------------------------# def mod_do_RMclean(specRec, sessionPath, cleanCutoff_sigma=5, maxIter=1000, gain=0.1, doOverwrite=False, LF=None): # Default logging to STDOUT if LF is None: LF = sys.stdout # Check required directories exist fail_not_exists(sessionPath, 'directory', LF) dataPath = sessionPath + "/OUT" fail_not_exists(dataPath, 'directory', LF) inParmFile = sessionPath + "/inputs.config" fail_not_exists(inParmFile, "file", LF) # Create a recArray to store the clean properties dType = [('uniqueName', 'a20'), ('nIterDone', 'i8'), ('cleanCutoff_sigma', 'f8'), ('cleanCutoff_Jybm', 'f8')] cleanRec = np.zeros(len(specRec), dtype=dType) # Create a DataManager object to access the stored data products dataMan = DataManager(sessionPath, calcParms=False) cleanCutoff_sigma = float(dataMan.pDict["cleanCutoff_sigma"]) maxIter = int(dataMan.pDict["maxCleanIter"]) gain = float(dataMan.pDict["gain"]) # Loop through the catalogue entries log_wr(LF, '\nPerforming RM-clean on the catalogue entries ...') for i in range(len(specRec)): uniqueName = specRec[i]['uniqueName'] log_wr(LF, "\nProcessing entry %d: '%s'." % (i+1, uniqueName)) # Read in the dirty FDF, RMSF, frequency and weight arrays phiArr, dirtyFDF = dataMan.get_dirtyFDF_byname(uniqueName) RMSFphiArr, RMSFArr = dataMan.get_RMSF_byname(uniqueName) freqArr_Hz, weightArr = dataMan.get_freqweight_byname(uniqueName) nFreqChan = len(freqArr_Hz) # Calculate the lamSqArr lamArr_m = C / freqArr_Hz lamSqArr_m2 = np.power(lamArr_m, 2.0) # Calculate the clean cutoff cleanCutoff_Jybm = (cleanCutoff_sigma * specRec[i]['rmsMedQUAvg_Jybm'] / m.sqrt(nFreqChan)) LF.write('> Using cutoff = %.3f mJy.\n' % (cleanCutoff_Jybm*1e3)) # Run the RM-clean procedure cleanFDF, ccModel, fwhmRMSF, nIter = do_rmclean(dirtyFDF, phiArr, lamSqArr_m2, cleanCutoff_Jybm, maxIter, gain, weightArr, RMSFArr, RMSFphiArr, doPlots=False) LF.write('> CLEANed dirty FDF in %d iterations.\n' % nIter) # Save the clean FDF and CC model to the FITS file dataMan.put_cleanFDF_byname(uniqueName, CC=ccModel, cleanFDF=cleanFDF) log_wr(LF, '> Clean FDF and CC model saved to FITS files.') # Write flags and metadata to the record array cleanRec[i]['uniqueName'] = specRec[i]['uniqueName'] cleanRec[i]['nIterDone'] = nIter cleanRec[i]['cleanCutoff_sigma'] = cleanCutoff_sigma cleanRec[i]['cleanCutoff_Jybm'] = cleanCutoff_Jybm return cleanRec
{ "content_hash": "2bb6a602bce34f32430a717a611d5d6f", "timestamp": "", "source": "github", "line_count": 90, "max_line_length": 79, "avg_line_length": 40.51111111111111, "alnum_prop": 0.5383982446516731, "repo_name": "crpurcell/RMpipeL5", "id": "7ed5cd2539a9635d806a5fb246acfbd6516e4d67", "size": "6630", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Imports/module_RM_clean.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "584852" } ], "symlink_target": "" }
import os, time from Xlib.display import Display from Xlib.ext import xtest from Xlib import X if "DISPLAY" in os.environ: displayStr = os.environ["DISPLAY"] else: displayStr = ":0" display = Display( displayStr ) screen = display.screen() screen.root.warp_pointer(50,100) print "moved mouse to 50,50" #print "display:", dir(display) #print #print "screen:", dir(screen) #print #print "root window:", dir(screen.root) #print screen.root.get_geometry() # help(screen.root) # Xlib.protocol.display.py lists properties like width_in_pixels. print "Resolution:", screen.width_in_pixels, screen.height_in_pixels display.sync() xtest.fake_input(screen.root, X.ButtonPress, detail=1, x=50, y=100) display.sync() xtest.fake_input(screen.root, X.ButtonRelease, detail=1, x=55, y=120) display.sync()
{ "content_hash": "758d4714da1914d1150a8e25709bc6a6", "timestamp": "", "source": "github", "line_count": 34, "max_line_length": 69, "avg_line_length": 23.705882352941178, "alnum_prop": 0.7320099255583127, "repo_name": "rpwagner/tiled-display", "id": "7050f318acd06896de198efd2c7a3a8c4b10122a", "size": "807", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "flWii/xclick.py", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
from __future__ import absolute_import from __future__ import division from __future__ import print_function from absl import app from absl import flags import numpy as np import os import sys import tensorflow.compat.v2 as tf tf.compat.v1.enable_v2_behavior() import pickle from tf_agents.environments import gym_wrapper from tf_agents.environments import tf_py_environment from dice_rl.environments.env_policies import get_target_policy import dice_rl.environments.gridworld.navigation as navigation import dice_rl.environments.gridworld.tree as tree import dice_rl.environments.gridworld.taxi as taxi from dice_rl.estimators.neural_dice import NeuralDice from dice_rl.estimators import estimator as estimator_lib from dice_rl.networks.value_network import ValueNetwork import dice_rl.utils.common as common_utils from dice_rl.data.dataset import Dataset, EnvStep, StepType from dice_rl.data.tf_offpolicy_dataset import TFOffpolicyDataset FLAGS = flags.FLAGS flags.DEFINE_string('load_dir', None, 'Directory to load dataset from.') flags.DEFINE_string('save_dir', None, 'Directory to save the model and estimation results.') flags.DEFINE_string('env_name', 'grid', 'Environment name.') flags.DEFINE_integer('seed', 0, 'Initial random seed.') flags.DEFINE_bool('tabular_obs', False, 'Whether to use tabular observations.') flags.DEFINE_integer('num_trajectory', 1000, 'Number of trajectories to collect.') flags.DEFINE_integer('max_trajectory_length', 40, 'Cutoff trajectory at this step.') flags.DEFINE_float('alpha', 0.0, 'How close to target policy.') flags.DEFINE_float('gamma', 0.99, 'Discount factor.') flags.DEFINE_float('nu_learning_rate', 0.0001, 'Learning rate for nu.') flags.DEFINE_float('zeta_learning_rate', 0.0001, 'Learning rate for zeta.') flags.DEFINE_float('nu_regularizer', 0.0, 'Ortho regularization on nu.') flags.DEFINE_float('zeta_regularizer', 0.0, 'Ortho regularization on zeta.') flags.DEFINE_integer('num_steps', 100000, 'Number of training steps.') flags.DEFINE_integer('batch_size', 2048, 'Batch size.') flags.DEFINE_float('f_exponent', 2, 'Exponent for f function.') flags.DEFINE_bool('primal_form', False, 'Whether to use primal form of loss for nu.') flags.DEFINE_float('primal_regularizer', 0., 'LP regularizer of primal variables.') flags.DEFINE_float('dual_regularizer', 1., 'LP regularizer of dual variables.') flags.DEFINE_bool('zero_reward', False, 'Whether to ignore reward in optimization.') flags.DEFINE_float('norm_regularizer', 1., 'Weight of normalization constraint.') flags.DEFINE_bool('zeta_pos', True, 'Whether to enforce positivity constraint.') flags.DEFINE_float('scale_reward', 1., 'Reward scaling factor.') flags.DEFINE_float('shift_reward', 0., 'Reward shift factor.') flags.DEFINE_string( 'transform_reward', None, 'Non-linear reward transformation' 'One of [exp, cuberoot, None]') def main(argv): load_dir = FLAGS.load_dir save_dir = FLAGS.save_dir env_name = FLAGS.env_name seed = FLAGS.seed tabular_obs = FLAGS.tabular_obs num_trajectory = FLAGS.num_trajectory max_trajectory_length = FLAGS.max_trajectory_length alpha = FLAGS.alpha gamma = FLAGS.gamma nu_learning_rate = FLAGS.nu_learning_rate zeta_learning_rate = FLAGS.zeta_learning_rate nu_regularizer = FLAGS.nu_regularizer zeta_regularizer = FLAGS.zeta_regularizer num_steps = FLAGS.num_steps batch_size = FLAGS.batch_size f_exponent = FLAGS.f_exponent primal_form = FLAGS.primal_form primal_regularizer = FLAGS.primal_regularizer dual_regularizer = FLAGS.dual_regularizer zero_reward = FLAGS.zero_reward norm_regularizer = FLAGS.norm_regularizer zeta_pos = FLAGS.zeta_pos scale_reward = FLAGS.scale_reward shift_reward = FLAGS.shift_reward transform_reward = FLAGS.transform_reward def reward_fn(env_step): reward = env_step.reward * scale_reward + shift_reward if transform_reward is None: return reward if transform_reward == 'exp': reward = tf.math.exp(reward) elif transform_reward == 'cuberoot': reward = tf.sign(reward) * tf.math.pow(tf.abs(reward), 1.0 / 3.0) else: raise ValueError('Reward {} not implemented.'.format(transform_reward)) return reward hparam_str = ('{ENV_NAME}_tabular{TAB}_alpha{ALPHA}_seed{SEED}_' 'numtraj{NUM_TRAJ}_maxtraj{MAX_TRAJ}').format( ENV_NAME=env_name, TAB=tabular_obs, ALPHA=alpha, SEED=seed, NUM_TRAJ=num_trajectory, MAX_TRAJ=max_trajectory_length) train_hparam_str = ( 'nlr{NLR}_zlr{ZLR}_zeror{ZEROR}_preg{PREG}_dreg{DREG}_nreg{NREG}_' 'pform{PFORM}_fexp{FEXP}_zpos{ZPOS}_' 'scaler{SCALER}_shiftr{SHIFTR}_transr{TRANSR}').format( NLR=nu_learning_rate, ZLR=zeta_learning_rate, ZEROR=zero_reward, PREG=primal_regularizer, DREG=dual_regularizer, NREG=norm_regularizer, PFORM=primal_form, FEXP=f_exponent, ZPOS=zeta_pos, SCALER=scale_reward, SHIFTR=shift_reward, TRANSR=transform_reward) if save_dir is not None: save_dir = os.path.join(save_dir, hparam_str, train_hparam_str) summary_writer = tf.summary.create_file_writer(logdir=save_dir) summary_writer.set_as_default() else: tf.summary.create_noop_writer() directory = os.path.join(load_dir, hparam_str) print('Loading dataset from', directory) dataset = Dataset.load(directory) all_steps = dataset.get_all_steps() max_reward = tf.reduce_max(all_steps.reward) min_reward = tf.reduce_min(all_steps.reward) print('num loaded steps', dataset.num_steps) print('num loaded total steps', dataset.num_total_steps) print('num loaded episodes', dataset.num_episodes) print('num loaded total episodes', dataset.num_total_episodes) print('min reward', min_reward, 'max reward', max_reward) print('behavior per-step', estimator_lib.get_fullbatch_average(dataset, gamma=gamma)) target_dataset = Dataset.load( directory.replace('alpha{}'.format(alpha), 'alpha1.0')) print('target per-step', estimator_lib.get_fullbatch_average(target_dataset, gamma=1.)) activation_fn = tf.nn.relu kernel_initializer = tf.keras.initializers.GlorotUniform() hidden_dims = (64, 64) input_spec = (dataset.spec.observation, dataset.spec.action) nu_network = ValueNetwork( input_spec, fc_layer_params=hidden_dims, activation_fn=activation_fn, kernel_initializer=kernel_initializer, last_kernel_initializer=kernel_initializer) output_activation_fn = tf.math.square if zeta_pos else tf.identity zeta_network = ValueNetwork( input_spec, fc_layer_params=hidden_dims, activation_fn=activation_fn, output_activation_fn=output_activation_fn, kernel_initializer=kernel_initializer, last_kernel_initializer=kernel_initializer) nu_optimizer = tf.keras.optimizers.Adam(nu_learning_rate, clipvalue=1.0) zeta_optimizer = tf.keras.optimizers.Adam(zeta_learning_rate, clipvalue=1.0) lam_optimizer = tf.keras.optimizers.Adam(nu_learning_rate, clipvalue=1.0) estimator = NeuralDice( dataset.spec, nu_network, zeta_network, nu_optimizer, zeta_optimizer, lam_optimizer, gamma, zero_reward=zero_reward, f_exponent=f_exponent, primal_form=primal_form, reward_fn=reward_fn, primal_regularizer=primal_regularizer, dual_regularizer=dual_regularizer, norm_regularizer=norm_regularizer, nu_regularizer=nu_regularizer, zeta_regularizer=zeta_regularizer) global_step = tf.Variable(0, dtype=tf.int64) tf.summary.experimental.set_step(global_step) target_policy = get_target_policy(load_dir, env_name, tabular_obs) running_losses = [] running_estimates = [] for step in range(num_steps): transitions_batch = dataset.get_step(batch_size, num_steps=2) initial_steps_batch, _ = dataset.get_episode( batch_size, truncate_episode_at=1) initial_steps_batch = tf.nest.map_structure(lambda t: t[:, 0, ...], initial_steps_batch) losses = estimator.train_step(initial_steps_batch, transitions_batch, target_policy) running_losses.append(losses) if step % 500 == 0 or step == num_steps - 1: estimate = estimator.estimate_average_reward(dataset, target_policy) running_estimates.append(estimate) running_losses = [] global_step.assign_add(1) print('Done!') if __name__ == '__main__': app.run(main)
{ "content_hash": "e6d0c8c921488590f5238d2f986b5c59", "timestamp": "", "source": "github", "line_count": 228, "max_line_length": 80, "avg_line_length": 38.51315789473684, "alnum_prop": 0.6872793531488441, "repo_name": "google-research/dice_rl", "id": "d91d66c0f994aa40618f712b757dfad2c1daa31f", "size": "9357", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "scripts/run_neural_dice.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "579847" }, { "name": "Shell", "bytes": "1366" } ], "symlink_target": "" }
from .models import Election, ElectionManager from config.base import get_environment_variable from import_export_google_civic.controllers import retrieve_from_google_civic_api_election_query, \ store_results_from_google_civic_api_election_query import json import requests import wevote_functions.admin from wevote_functions.functions import positive_value_exists logger = wevote_functions.admin.get_logger(__name__) WE_VOTE_API_KEY = get_environment_variable("WE_VOTE_API_KEY") ELECTIONS_SYNC_URL = get_environment_variable("ELECTIONS_SYNC_URL") def election_remote_retrieve(): retrieve_results = retrieve_from_google_civic_api_election_query() if not retrieve_results['success']: results = { 'success': False, 'status': retrieve_results['status'] } return results else: structured_json = retrieve_results['structured_json'] results = store_results_from_google_civic_api_election_query(structured_json) return results def elections_import_from_sample_file(): """ Get the json data, and either create new entries or update existing :return: """ # Load saved json from local file logger.info("Loading elections from local file") with open('election/import_data/elections_sample.json') as json_data: structured_json = json.load(json_data) return elections_import_from_structured_json(structured_json) def elections_import_from_master_server(request=None): """ Get the json data, and either create new entries or update existing :return: """ # Request json file from We Vote servers logger.info("Loading Election from We Vote Master servers") request = requests.get(ELECTIONS_SYNC_URL, params={ "key": WE_VOTE_API_KEY, # This comes from an environment variable "format": 'json', }) structured_json = json.loads(request.text) return elections_import_from_structured_json(structured_json) def elections_import_from_structured_json(structured_json): election_manager = ElectionManager() elections_saved = 0 elections_updated = 0 elections_not_processed = 0 for one_election in structured_json: logger.debug( u"google_civic_election_id: {google_civic_election_id}, election_name: {election_name}, " u"election_day_text: {election_day_text}".format(**one_election) ) google_civic_election_id = one_election["google_civic_election_id"] \ if "google_civic_election_id" in one_election else '' election_name = one_election["election_name"] if "election_name" in one_election else '' election_day_text = one_election["election_day_text"] if "election_day_text" in one_election else '' ocd_division_id = one_election["ocd_division_id"] if "ocd_division_id" in one_election else '' state_code = one_election["state_code"] if "state_code" in one_election else '' # Make sure we have the minimum required variables if not positive_value_exists(google_civic_election_id) or not positive_value_exists(election_name): elections_not_processed += 1 continue results = election_manager.update_or_create_election( google_civic_election_id, election_name, election_day_text, ocd_division_id, state_code) if results['success']: if results['new_election_created']: elections_saved += 1 else: elections_updated += 1 else: elections_not_processed += 1 elections_results = { 'success': True, 'status': "ELECTION_IMPORT_PROCESS_COMPLETE", 'saved': elections_saved, 'updated': elections_updated, 'not_processed': elections_not_processed, } return elections_results def elections_sync_out_list_for_api(voter_device_id): # # We care about who the voter is, because we *might* want to limit which elections we show? # results = is_voter_device_id_valid(voter_device_id) # if not results['success']: # results2 = { # 'success': False, # 'json_data': results['json_data'], # } # return results2 # # voter_id = fetch_voter_id_from_voter_device_link(voter_device_id) # if voter_id > 0: # voter_manager = VoterManager() # results = voter_manager.retrieve_voter_by_id(voter_id) # if results['voter_found']: # voter_id = results['voter_id'] # else: # # If we are here, the voter_id could not be found from the voter_device_id # json_data = { # 'status': "VOTER_NOT_FOUND_FROM_DEVICE_ID", # 'success': False, # 'voter_device_id': voter_device_id, # } # results = { # 'success': False, # 'json_data': json_data, # } # return results # election_list = Election.objects.all() if len(election_list): results = { 'success': True, 'election_list': election_list, } return results # Trying to mimic the Google Civic error codes scheme errors_list = [ { 'domain': "TODO global", 'reason': "TODO reason", 'message': "TODO Error message here", 'locationType': "TODO Error message here", 'location': "TODO location", } ] error_package = { 'errors': errors_list, 'code': 400, 'message': "Error message here", } json_data = { 'error': error_package, 'status': "ELECTIONS_COULD_NOT_BE_RETRIEVED", 'success': False, 'voter_device_id': voter_device_id, } results = { 'success': False, 'json_data': json_data, } return results
{ "content_hash": "a01416d2b95a4cd81ec03d35fcb1bb88", "timestamp": "", "source": "github", "line_count": 174, "max_line_length": 108, "avg_line_length": 34.4080459770115, "alnum_prop": 0.6071488224486388, "repo_name": "wevote/WebAppPublic", "id": "43d31e5a31eebb1fee85bfb17666a21c1a0e1df6", "size": "6076", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "election/controllers.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "8022" }, { "name": "HTML", "bytes": "131153" }, { "name": "JavaScript", "bytes": "296860" }, { "name": "Python", "bytes": "1700558" }, { "name": "Shell", "bytes": "252" } ], "symlink_target": "" }
import sys _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) from google.protobuf.internal import enum_type_wrapper from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() import complexdata_pb2 as complexdata__pb2 DESCRIPTOR = _descriptor.FileDescriptor( name='user.proto', package='ten64chat.user', syntax='proto3', serialized_pb=_b('\n\nuser.proto\x12\x0eten64chat.user\x1a\x11\x63omplexdata.proto\"g\n\x04User\x12\"\n\x04uuid\x18\x01 \x01(\x0b\x32\x14.ten64chat.data.UUID\x12\x13\n\x0b\x64isplayName\x18\x02 \x01(\t\x12&\n\x06status\x18\x03 \x01(\x0e\x32\x16.ten64chat.user.Status\"^\n\x10UserStatusChange\x12\"\n\x04user\x18\x01 \x01(\x0b\x32\x14.ten64chat.user.User\x12&\n\x06status\x18\x02 \x01(\x0e\x32\x16.ten64chat.user.Status*L\n\x06Status\x12\x0b\n\x07OFFLINE\x10\x00\x12\x0b\n\x07\x42LOCKED\x10\x02\x12\x12\n\x0e\x44O_NOT_DISTURB\x10\x03\x12\x08\n\x04\x41WAY\x10\x04\x12\n\n\x06ONLINE\x10\x05\x62\x06proto3') , dependencies=[complexdata__pb2.DESCRIPTOR,]) _sym_db.RegisterFileDescriptor(DESCRIPTOR) _STATUS = _descriptor.EnumDescriptor( name='Status', full_name='ten64chat.user.Status', filename=None, file=DESCRIPTOR, values=[ _descriptor.EnumValueDescriptor( name='OFFLINE', index=0, number=0, options=None, type=None), _descriptor.EnumValueDescriptor( name='BLOCKED', index=1, number=2, options=None, type=None), _descriptor.EnumValueDescriptor( name='DO_NOT_DISTURB', index=2, number=3, options=None, type=None), _descriptor.EnumValueDescriptor( name='AWAY', index=3, number=4, options=None, type=None), _descriptor.EnumValueDescriptor( name='ONLINE', index=4, number=5, options=None, type=None), ], containing_type=None, options=None, serialized_start=250, serialized_end=326, ) _sym_db.RegisterEnumDescriptor(_STATUS) Status = enum_type_wrapper.EnumTypeWrapper(_STATUS) OFFLINE = 0 BLOCKED = 2 DO_NOT_DISTURB = 3 AWAY = 4 ONLINE = 5 _USER = _descriptor.Descriptor( name='User', full_name='ten64chat.user.User', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='uuid', full_name='ten64chat.user.User.uuid', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='displayName', full_name='ten64chat.user.User.displayName', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='status', full_name='ten64chat.user.User.status', index=2, number=3, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=49, serialized_end=152, ) _USERSTATUSCHANGE = _descriptor.Descriptor( name='UserStatusChange', full_name='ten64chat.user.UserStatusChange', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='user', full_name='ten64chat.user.UserStatusChange.user', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='status', full_name='ten64chat.user.UserStatusChange.status', index=1, number=2, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=154, serialized_end=248, ) _USER.fields_by_name['uuid'].message_type = complexdata__pb2._UUID _USER.fields_by_name['status'].enum_type = _STATUS _USERSTATUSCHANGE.fields_by_name['user'].message_type = _USER _USERSTATUSCHANGE.fields_by_name['status'].enum_type = _STATUS DESCRIPTOR.message_types_by_name['User'] = _USER DESCRIPTOR.message_types_by_name['UserStatusChange'] = _USERSTATUSCHANGE DESCRIPTOR.enum_types_by_name['Status'] = _STATUS User = _reflection.GeneratedProtocolMessageType('User', (_message.Message,), dict( DESCRIPTOR = _USER, __module__ = 'user_pb2' # @@protoc_insertion_point(class_scope:ten64chat.user.User) )) _sym_db.RegisterMessage(User) UserStatusChange = _reflection.GeneratedProtocolMessageType('UserStatusChange', (_message.Message,), dict( DESCRIPTOR = _USERSTATUSCHANGE, __module__ = 'user_pb2' # @@protoc_insertion_point(class_scope:ten64chat.user.UserStatusChange) )) _sym_db.RegisterMessage(UserStatusChange) # @@protoc_insertion_point(module_scope)
{ "content_hash": "1c22124abe0865481bd561ccb8311ce8", "timestamp": "", "source": "github", "line_count": 174, "max_line_length": 605, "avg_line_length": 33.02298850574713, "alnum_prop": 0.7053602506091193, "repo_name": "1064CBread/1064Chat", "id": "3a2e18c4e54fa7120f7cdc69a12bb1b4ab3adc3c", "size": "5827", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/generated_protobuf/user_pb2.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "1596" }, { "name": "HTML", "bytes": "7838" }, { "name": "JavaScript", "bytes": "466" }, { "name": "Protocol Buffer", "bytes": "559" }, { "name": "Python", "bytes": "7729" }, { "name": "Shell", "bytes": "898" } ], "symlink_target": "" }
import sys import numpy import matplotlib.pyplot def analyse (filename, outfile = None): """ Displays the mean, maxima and minimum value for each weather station. Creates a figure of three subplots, showing values for mean, maxima and minimum value with axis = 0, y axis labels, and a tight layout. """ data = numpy.loadtxt(fname=filename, delimiter =',') fig = matplotlib.pyplot.figure (figsize= (10.0, 3.0)) subplot1 = fig.add_subplot (1, 3, 1) subplot2 = fig.add_subplot (1, 3, 2) subplot3 = fig.add_subplot (1, 3, 3) subplot1.set_ylabel('average') subplot1.plot(numpy.mean(data, axis = 0)) subplot2.set_ylabel('max') subplot2.plot(numpy.max(data, axis = 0)) subplot3.set_ylabel('min') subplot3.plot(numpy.min(data, axis = 0)) # fig.tight_layout() if outfile is None: matplotlib.pyplot.show() else: matplotlib.pyplot.savefig(outfile) def detect_problems (filename): """Some of our temperature files have problems, check for these This function reads a file(filename argument) and reports on odd looking maxima, and minima that add up to zero. This seems to happen when the sensors break. The function does not return any data. """ data = numpy.loadtxt(fname=filename, delimiter =',') if numpy.max (data, axis = 0)[0] and numpy.max(data, axis = 0)[20] ==20: print ("Suspicious looking maximum") elif numpy.sum(numpy.min(data, axis = 0)) ==0: print ("Minima to zero") else: print ("Data looks OK") if __name__=="__main__": print("Running", sys.argv[0]) print (sys.argv[1]) analyse (sys.argv[1], outfile = sys.argv[2]) detect_problems (sys.argv[1])
{ "content_hash": "1aaae4968ad91b6431dd690c9225f907", "timestamp": "", "source": "github", "line_count": 55, "max_line_length": 113, "avg_line_length": 34.14545454545455, "alnum_prop": 0.5963791267305645, "repo_name": "WillRhB/PythonLesssons", "id": "a11bab0d53839b12434cebc115926afb491df37b", "size": "1906", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "datalook.py", "mode": "33188", "license": "mit", "language": [ { "name": "Jupyter Notebook", "bytes": "1769040" }, { "name": "Python", "bytes": "1906" } ], "symlink_target": "" }
__author__ = 'nb254' import csv def save_to_csv(lines, header, filename): myfile = open(filename, 'wb') wr = csv.writer(myfile, quoting = csv.QUOTE_ALL) wr.writerow(header) for line in lines: try: wr.writerow(line) except UnicodeError: fixed_line = list(line) print 'Unicode exception for a line, converting to utf-8' for elem in fixed_line: if isinstance(elem, unicode): index = fixed_line.index(elem) fixed_line[index] = fixed_line[index].encode('utf-8') print fixed_line wr.writerow(fixed_line) def saveStatToCSV(file_name, data, header, one_row): with open(file_name, 'wb') as myfile: wr = csv.writer(myfile) if header != '': wr.writerow(header) if one_row: wr.writerow(data) else: wr.writerows(data) return def saveElemsToCSV(file_name, data, header): with open(file_name, 'wb') as myfile: wr = csv.writer(myfile) if header != '': wr.writerow(header) for elem in data: #print elem wr.writerow([elem]) return def openListFromCSV(filename): with open(filename, 'rb') as f: reader = csv.reader(f) listfromCSV = list(reader) flattened = [val for sublist in listfromCSV for val in sublist] return flattened def openDictfromCSV(filename): reader = csv.reader(open(filename, 'rb')) dictfromCSV = dict(reader) return dictfromCSV def writeDict(filename, mydict): writer = csv.writer(open(filename, 'wb')) for key, value in mydict.items(): writer.writerow([key, value])
{ "content_hash": "bfa4950896caa65db99f6a365eae0f3d", "timestamp": "", "source": "github", "line_count": 58, "max_line_length": 68, "avg_line_length": 29.086206896551722, "alnum_prop": 0.5981031416716064, "repo_name": "Nik0l/UTemPro", "id": "f20ba9afc1b4daa2be44e5c8b4e73303bbf3d596", "size": "1715", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "utils.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "199683" }, { "name": "R", "bytes": "1644" } ], "symlink_target": "" }
import os import angr import claripy import nose from angr.codenode import BlockNode, HookNode, SyscallNode BIN_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..', 'binaries') def test_ret_float(): p = angr.load_shellcode(b'X', arch='i386') class F1(angr.SimProcedure): def run(self): return 12.5 p.hook(0x1000, F1(cc=p.factory.cc(func_ty=angr.sim_type.parse_file('float (x)();')[0]['x']))) p.hook(0x2000, F1(cc=p.factory.cc(func_ty=angr.sim_type.parse_file('double (x)();')[0]['x']))) s = p.factory.call_state(addr=0x1000, ret_addr=0) succ = s.step() nose.tools.assert_equal(len(succ.successors), 1) s2 = succ.flat_successors[0] nose.tools.assert_false(s2.regs.st0.symbolic) nose.tools.assert_equal(s2.solver.eval(s2.regs.st0.get_bytes(4, 4).raw_to_fp()), 12.5) s = p.factory.call_state(addr=0x2000, ret_addr=0) succ = s.step() nose.tools.assert_equal(len(succ.successors), 1) s2 = succ.flat_successors[0] nose.tools.assert_false(s2.regs.st0.symbolic) nose.tools.assert_equal(s2.solver.eval(s2.regs.st0.raw_to_fp()), 12.5) def test_syscall_and_simprocedure(): bin_path = os.path.join(BIN_PATH, 'tests', 'cgc', 'CADET_00002') proj = angr.Project(bin_path) cfg = proj.analyses.CFGFast(normalize=True) # check syscall node = cfg.get_any_node(proj.loader.kernel_object.mapped_base + 1) func = proj.kb.functions[node.addr] nose.tools.assert_true(node.is_simprocedure) nose.tools.assert_true(node.is_syscall) nose.tools.assert_false(node.to_codenode().is_hook) nose.tools.assert_false(proj.is_hooked(node.addr)) nose.tools.assert_true(func.is_syscall) nose.tools.assert_true(func.is_simprocedure) nose.tools.assert_equal(type(proj.factory.snippet(node.addr)), SyscallNode) # check normal functions node = cfg.get_any_node(0x80480a0) func = proj.kb.functions[node.addr] nose.tools.assert_false(node.is_simprocedure) nose.tools.assert_false(node.is_syscall) nose.tools.assert_false(proj.is_hooked(node.addr)) nose.tools.assert_false(func.is_syscall) nose.tools.assert_false(func.is_simprocedure) nose.tools.assert_equal(type(proj.factory.snippet(node.addr)), BlockNode) # check hooked functions proj.hook(0x80480a0, angr.SIM_PROCEDURES['libc']['puts']()) cfg = proj.analyses.CFGFast(normalize=True)# rebuild cfg to updated nodes node = cfg.get_any_node(0x80480a0) func = proj.kb.functions[node.addr] nose.tools.assert_true(node.is_simprocedure) nose.tools.assert_false(node.is_syscall) nose.tools.assert_true(proj.is_hooked(node.addr)) nose.tools.assert_false(func.is_syscall) nose.tools.assert_true(func.is_simprocedure) nose.tools.assert_equal(type(proj.factory.snippet(node.addr)), HookNode) if __name__ == '__main__': test_ret_float() test_syscall_and_simprocedure()
{ "content_hash": "a24f2bf0e31c2e5dc59a804fe4fa9e6d", "timestamp": "", "source": "github", "line_count": 77, "max_line_length": 98, "avg_line_length": 38.02597402597402, "alnum_prop": 0.6878415300546448, "repo_name": "schieb/angr", "id": "26db7e863305288c7ad916080d6414016d1084ba", "size": "2928", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tests/test_sim_procedure.py", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "C", "bytes": "6375" }, { "name": "C++", "bytes": "39522" }, { "name": "Dockerfile", "bytes": "493" }, { "name": "Makefile", "bytes": "739" }, { "name": "Python", "bytes": "4987778" } ], "symlink_target": "" }
from django.shortcuts import render
{ "content_hash": "66fec7901daf38f4b05bf93d8d4c554b", "timestamp": "", "source": "github", "line_count": 2, "max_line_length": 35, "avg_line_length": 18.5, "alnum_prop": 0.8378378378378378, "repo_name": "lemanjo/pimanager", "id": "c70d36c495f321b95046f6ecfb83c0925e81677c", "size": "48", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "pimanager/views.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "133446" }, { "name": "JavaScript", "bytes": "3749" }, { "name": "Python", "bytes": "19399" } ], "symlink_target": "" }
import ssl import pytest from pg8000 import DatabaseError, connect # This requires a line in pg_hba.conf that requires scram-sha-256 for the # database scram-sha-256 def test_scram_sha_256_plus(db_kwargs): context = ssl.create_default_context() context.check_hostname = False context.verify_mode = ssl.CERT_NONE db_kwargs["ssl_context"] = context db_kwargs["database"] = "pg8000_scram_sha_256" # Should only raise an exception saying db doesn't exist with pytest.raises(DatabaseError, match="3D000"): with connect(**db_kwargs) as con: con.close()
{ "content_hash": "ef611c1b83791347093b76284f5f26b8", "timestamp": "", "source": "github", "line_count": 22, "max_line_length": 73, "avg_line_length": 27.363636363636363, "alnum_prop": 0.6976744186046512, "repo_name": "tlocke/pg8000", "id": "dd35b42392aa4828b27402cdf2b321db64060c96", "size": "602", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "test/legacy/auth/test_scram-sha-256_ssl.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Python", "bytes": "331262" } ], "symlink_target": "" }
import argparse import logging import tableauserverclient as TSC def main(): parser = argparse.ArgumentParser(description="Explore datasource functions supported by the Server API.") # Common options; please keep those in sync across all samples parser.add_argument("--server", "-s", required=True, help="server address") parser.add_argument("--site", "-S", help="site name") parser.add_argument( "--token-name", "-p", required=True, help="name of the personal access token used to sign into the server" ) parser.add_argument( "--token-value", "-v", required=True, help="value of the personal access token used to sign into the server" ) parser.add_argument( "--logging-level", "-l", choices=["debug", "info", "error"], default="error", help="desired logging level (set to error by default)", ) # Options specific to this sample parser.add_argument("--publish", metavar="FILEPATH", help="path to datasource to publish") parser.add_argument("--download", metavar="FILEPATH", help="path to save downloaded datasource") args = parser.parse_args() # Set logging level based on user input, or error by default logging_level = getattr(logging, args.logging_level.upper()) logging.basicConfig(level=logging_level) # SIGN IN tableau_auth = TSC.PersonalAccessTokenAuth(args.token_name, args.token_value, site_id=args.site) server = TSC.Server(args.server, use_server_version=True) with server.auth.sign_in(tableau_auth): # Query projects for use when demonstrating publishing and updating all_projects, pagination_item = server.projects.get() default_project = next((project for project in all_projects if project.is_default()), None) # Publish datasource if publish flag is set (-publish, -p) if args.publish: if default_project is not None: new_datasource = TSC.DatasourceItem(default_project.id) new_datasource = server.datasources.publish( new_datasource, args.publish, TSC.Server.PublishMode.Overwrite ) print("Datasource published. ID: {}".format(new_datasource.id)) else: print("Publish failed. Could not find the default project.") # Gets all datasource items all_datasources, pagination_item = server.datasources.get() print("\nThere are {} datasources on site: ".format(pagination_item.total_available)) print([datasource.name for datasource in all_datasources]) if all_datasources: # Pick one datasource from the list sample_datasource = all_datasources[0] # Populate connections server.datasources.populate_connections(sample_datasource) print("\nConnections for {}: ".format(sample_datasource.name)) print( [ "{0}({1})".format(connection.id, connection.datasource_name) for connection in sample_datasource.connections ] ) # Add some tags to the datasource original_tag_set = set(sample_datasource.tags) sample_datasource.tags.update("a", "b", "c", "d") server.datasources.update(sample_datasource) print("\nOld tag set: {}".format(original_tag_set)) print("New tag set: {}".format(sample_datasource.tags)) # Delete all tags that were added by setting tags to original sample_datasource.tags = original_tag_set server.datasources.update(sample_datasource) if __name__ == "__main__": main()
{ "content_hash": "61e2b24d2c4b1304fb9dc015c354f5ef", "timestamp": "", "source": "github", "line_count": 87, "max_line_length": 116, "avg_line_length": 42.758620689655174, "alnum_prop": 0.6327956989247312, "repo_name": "tableau/server-client-python", "id": "014a274ef0ab8d097a494b7b27ace5cb0c7fbbde", "size": "4141", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "samples/explore_datasource.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "858778" }, { "name": "Shell", "bytes": "208" } ], "symlink_target": "" }
""" Implemetation of eiffel like methods (methods with preconditions and postconditions). eiffelmethod is a new descriptor that implements eiffel like methods. It accepts a method and optional pre and post conditions. fI the pre or post conditions are not given it searchs methodName_pre and methodName_post. """ import types class eiffelmethod(object): def __init__(self, method, pre=None, post = None): self._method = method self._pre = pre self._post = post def __get__(self, inst, type=None): result = EiffelMethodWraper(inst,self._method,self._pre, self._post) setattr(inst, self._method.__name__,result) return result class EiffelMethodWraper: def __init__(self, inst, method, pre, post): self._inst = inst self._method = method if not pre: pre = getattr(inst,method.__name__+"_pre",None) if pre: pre = pre.im_func self._pre = pre if not post: post = getattr(inst,method.__name__+"_post",None) if post: post = post.im_func self._post = post def __call__(self, *args, **kargs): if self._pre: apply(self._pre,(self._inst,)+args, kargs) result = apply(self._method,(self._inst,)+args, kargs) if self._post: apply(self._post,(self._inst,result)+args, kargs) return result def _test(): class C: def f(self, arg): return arg+1 def f_pre(self, arg): assert arg>0 def f_post(self, result, arg): assert result>arg f = eiffelmethod(f,f_pre,f_post) c = C() c.f(1) try: c.f(-1) except AssertionError: pass else: raise "c.f(-1) bad implemented" print "OK" if __name__=='__main__': _test()
{ "content_hash": "4280f346dfce1c650c2ef8379488825d", "timestamp": "", "source": "github", "line_count": 63, "max_line_length": 103, "avg_line_length": 28.523809523809526, "alnum_prop": 0.5826377295492488, "repo_name": "ActiveState/code", "id": "5d98120b46530ce42f943bc58941f395e3ad5588", "size": "1797", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "recipes/Python/91192_eiffelmethod/recipe-91192.py", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "35894" }, { "name": "C", "bytes": "56048" }, { "name": "C++", "bytes": "90880" }, { "name": "HTML", "bytes": "11656" }, { "name": "Java", "bytes": "57468" }, { "name": "JavaScript", "bytes": "181218" }, { "name": "PHP", "bytes": "250144" }, { "name": "Perl", "bytes": "37296" }, { "name": "Perl 6", "bytes": "9914" }, { "name": "Python", "bytes": "17387779" }, { "name": "Ruby", "bytes": "40233" }, { "name": "Shell", "bytes": "190732" }, { "name": "Tcl", "bytes": "674650" } ], "symlink_target": "" }
import os.path # Only requests for myrig.quickmediasolutions.com will be processed ALLOWED_HOSTS = ['myrig.quickmediasolutions.com',] SITE_ID = 1 # Enable timezone-aware datetimes USE_TZ = True TIME_ZONE = 'America/Vancouver' # Determine the directory this file resides in so that an absolute # path can be specified for the static files and templates PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) STATICFILES_DIRS = (os.path.join(PROJECT_ROOT, 'static'),) TEMPLATE_DIRS = (os.path.join(PROJECT_ROOT, 'templates'),) ROOT_URLCONF = 'myrig.urls' WSGI_APPLICATION = 'myrig.wsgi.application' INSTALLED_APPS = ( # Core Django applications 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.sites', 'django.contrib.messages', 'django.contrib.staticfiles', 'django.contrib.admin', # Django helper applications 'south', 'widget_tweaks', 'social.apps.django_app.default', # MyRig applications 'myrig.accounts', 'myrig.computer', ) # Authentication backends AUTHENTICATION_BACKENDS = ( 'social.backends.open_id.OpenIdAuth', 'django.contrib.auth.backends.ModelBackend', ) # Import local settings, which may add to or override the above settings try: from local_settings import * except ImportError: pass
{ "content_hash": "e806612ebdbe3a6b3039639767b31ec3", "timestamp": "", "source": "github", "line_count": 50, "max_line_length": 72, "avg_line_length": 26.92, "alnum_prop": 0.7147102526002972, "repo_name": "nathan-osman/myrig-website", "id": "3b02ed87a99b78c87f5317f1f55dc64aaa4b841b", "size": "1489", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "myrig/settings.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "597" }, { "name": "Python", "bytes": "28092" } ], "symlink_target": "" }
from billing.integration import Integration from django.views.decorators.csrf import csrf_exempt from django.views.decorators.http import require_POST from django.conf.urls import patterns from billing.signals import transaction_was_successful, transaction_was_unsuccessful from django.http import HttpResponse from billing.models.world_pay_models import WorldPayResponse from django.utils.decorators import method_decorator RBS_HOSTED_URL_TEST = "https://select-test.wp3.rbsworldpay.com/wcc/purchase" RBS_HOSTED_URL_LIVE = "https://secure.wp3.rbsworldpay.com/wcc/purchase" # http://www.rbsworldpay.com/support/bg/index.php?page=development&sub=integration&c=WW csrf_exempt_m = method_decorator(csrf_exempt) require_POST_m = method_decorator(require_POST) class WorldPayIntegration(Integration): """ Fields required: instId: Installation ID provided by WorldPay cartId: Merchant specified unique id to identify user amount: Amount to be charged currency: ISO 3-character currency """ def __init__(self, options=None): if not options: options = {} super(WorldPayIntegration, self).__init__(options=options) if self.test_mode: self.fields.update({"testMode": 100}) def get_urls(self): urlpatterns = patterns('', (r'^rbs-notify-handler/$', self.notify_handler), ) return urlpatterns @property def service_url(self): if self.test_mode: return RBS_HOSTED_URL_TEST return RBS_HOSTED_URL_LIVE @csrf_exempt_m @require_POST_m def notify_handler(self, request): post_data = request.POST.copy() data = {} resp_fields = { 'instId': 'installation_id', 'compName': 'company_name', 'cartId': 'cart_id', 'desc': 'description', 'amount': 'amount', 'currency': 'currency', 'authMode': 'auth_mode', 'testMode': 'test_mode', 'transId': 'transaction_id', 'transStatus': 'transaction_status', 'transTime': 'transaction_time', 'authAmount': 'auth_amount', 'authCurrency': 'auth_currency', 'authAmountString': 'auth_amount_string', 'rawAuthMessage': 'raw_auth_message', 'rawAuthCode': 'raw_auth_code', 'name': 'name', 'address': 'address', 'postcode': 'post_code', 'country': 'country_code', 'countryString': 'country', 'tel': 'phone', 'fax': 'fax', 'email': 'email', 'futurePayId': 'future_pay_id', 'cardType': 'card_type', 'ipAddress': 'ip_address', } for (key, val) in resp_fields.iteritems(): data[val] = post_data.get(key, '') try: resp = WorldPayResponse.objects.create(**data) # TODO: Make the type more generic transaction_was_successful.send(sender=self.__class__, type="purchase", response=resp) status = "SUCCESS" except: transaction_was_unsuccessful.send(sender=self.__class__, type="purchase", response=post_data) status = "FAILURE" return HttpResponse(status)
{ "content_hash": "3cbd2c3e9aa3014ca6a11074d4d56ca9", "timestamp": "", "source": "github", "line_count": 93, "max_line_length": 105, "avg_line_length": 35.86021505376344, "alnum_prop": 0.5949025487256372, "repo_name": "SimpleTax/merchant", "id": "35e2fa0a017568eaf4883455d63be04e6d425198", "size": "3335", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "billing/integrations/world_pay_integration.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "HTML", "bytes": "9029" }, { "name": "Python", "bytes": "270234" } ], "symlink_target": "" }
""" Python wrappers for Orthogonal Distance Regression (ODRPACK). Notes ===== * Array formats -- FORTRAN stores its arrays in memory column first, i.e., an array element A(i, j, k) will be next to A(i+1, j, k). In C and, consequently, NumPy, arrays are stored row first: A[i, j, k] is next to A[i, j, k+1]. For efficiency and convenience, the input and output arrays of the fitting function (and its Jacobians) are passed to FORTRAN without transposition. Therefore, where the ODRPACK documentation says that the X array is of shape (N, M), it will be passed to the Python function as an array of shape (M, N). If M==1, the 1-D case, then nothing matters; if M>1, then your Python functions will be dealing with arrays that are indexed in reverse of the ODRPACK documentation. No real issue, but watch out for your indexing of the Jacobians: the i,jth elements (@f_i/@x_j) evaluated at the nth observation will be returned as jacd[j, i, n]. Except for the Jacobians, it really is easier to deal with x[0] and x[1] than x[:,0] and x[:,1]. Of course, you can always use the transpose() function from SciPy explicitly. * Examples -- See the accompanying file test/test.py for examples of how to set up fits of your own. Some are taken from the User's Guide; some are from other sources. * Models -- Some common models are instantiated in the accompanying module models.py . Contributions are welcome. Credits ======= * Thanks to Arnold Moene and Gerard Vermeulen for fixing some killer bugs. Robert Kern [email protected] """ import os import numpy from warnings import warn from scipy.odr import __odrpack __all__ = ['odr', 'OdrWarning', 'OdrError', 'OdrStop', 'Data', 'RealData', 'Model', 'Output', 'ODR', 'odr_error', 'odr_stop'] odr = __odrpack.odr class OdrWarning(UserWarning): """ Warning indicating that the data passed into ODR will cause problems when passed into 'odr' that the user should be aware of. """ pass class OdrError(Exception): """ Exception indicating an error in fitting. This is raised by `~scipy.odr.odr` if an error occurs during fitting. """ pass class OdrStop(Exception): """ Exception stopping fitting. You can raise this exception in your objective function to tell `~scipy.odr.odr` to stop fitting. """ pass # Backwards compatibility odr_error = OdrError odr_stop = OdrStop __odrpack._set_exceptions(OdrError, OdrStop) def _conv(obj, dtype=None): """ Convert an object to the preferred form for input to the odr routine. """ if obj is None: return obj else: if dtype is None: obj = numpy.asarray(obj) else: obj = numpy.asarray(obj, dtype) if obj.shape == (): # Scalar. return obj.dtype.type(obj) else: return obj def _report_error(info): """ Interprets the return code of the odr routine. Parameters ---------- info : int The return code of the odr routine. Returns ------- problems : list(str) A list of messages about why the odr() routine stopped. """ stopreason = ('Blank', 'Sum of squares convergence', 'Parameter convergence', 'Both sum of squares and parameter convergence', 'Iteration limit reached')[info % 5] if info >= 5: # questionable results or fatal error I = (info//10000 % 10, info//1000 % 10, info//100 % 10, info//10 % 10, info % 10) problems = [] if I[0] == 0: if I[1] != 0: problems.append('Derivatives possibly not correct') if I[2] != 0: problems.append('Error occurred in callback') if I[3] != 0: problems.append('Problem is not full rank at solution') problems.append(stopreason) elif I[0] == 1: if I[1] != 0: problems.append('N < 1') if I[2] != 0: problems.append('M < 1') if I[3] != 0: problems.append('NP < 1 or NP > N') if I[4] != 0: problems.append('NQ < 1') elif I[0] == 2: if I[1] != 0: problems.append('LDY and/or LDX incorrect') if I[2] != 0: problems.append('LDWE, LD2WE, LDWD, and/or LD2WD incorrect') if I[3] != 0: problems.append('LDIFX, LDSTPD, and/or LDSCLD incorrect') if I[4] != 0: problems.append('LWORK and/or LIWORK too small') elif I[0] == 3: if I[1] != 0: problems.append('STPB and/or STPD incorrect') if I[2] != 0: problems.append('SCLB and/or SCLD incorrect') if I[3] != 0: problems.append('WE incorrect') if I[4] != 0: problems.append('WD incorrect') elif I[0] == 4: problems.append('Error in derivatives') elif I[0] == 5: problems.append('Error occurred in callback') elif I[0] == 6: problems.append('Numerical error detected') return problems else: return [stopreason] class Data(object): """ The data to fit. Parameters ---------- x : array_like Observed data for the independent variable of the regression y : array_like, optional If array-like, observed data for the dependent variable of the regression. A scalar input implies that the model to be used on the data is implicit. we : array_like, optional If `we` is a scalar, then that value is used for all data points (and all dimensions of the response variable). If `we` is a rank-1 array of length q (the dimensionality of the response variable), then this vector is the diagonal of the covariant weighting matrix for all data points. If `we` is a rank-1 array of length n (the number of data points), then the i'th element is the weight for the i'th response variable observation (single-dimensional only). If `we` is a rank-2 array of shape (q, q), then this is the full covariant weighting matrix broadcast to each observation. If `we` is a rank-2 array of shape (q, n), then `we[:,i]` is the diagonal of the covariant weighting matrix for the i'th observation. If `we` is a rank-3 array of shape (q, q, n), then `we[:,:,i]` is the full specification of the covariant weighting matrix for each observation. If the fit is implicit, then only a positive scalar value is used. wd : array_like, optional If `wd` is a scalar, then that value is used for all data points (and all dimensions of the input variable). If `wd` = 0, then the covariant weighting matrix for each observation is set to the identity matrix (so each dimension of each observation has the same weight). If `wd` is a rank-1 array of length m (the dimensionality of the input variable), then this vector is the diagonal of the covariant weighting matrix for all data points. If `wd` is a rank-1 array of length n (the number of data points), then the i'th element is the weight for the ith input variable observation (single-dimensional only). If `wd` is a rank-2 array of shape (m, m), then this is the full covariant weighting matrix broadcast to each observation. If `wd` is a rank-2 array of shape (m, n), then `wd[:,i]` is the diagonal of the covariant weighting matrix for the ith observation. If `wd` is a rank-3 array of shape (m, m, n), then `wd[:,:,i]` is the full specification of the covariant weighting matrix for each observation. fix : array_like of ints, optional The `fix` argument is the same as ifixx in the class ODR. It is an array of integers with the same shape as data.x that determines which input observations are treated as fixed. One can use a sequence of length m (the dimensionality of the input observations) to fix some dimensions for all observations. A value of 0 fixes the observation, a value > 0 makes it free. meta : dict, optional Free-form dictionary for metadata. Notes ----- Each argument is attached to the member of the instance of the same name. The structures of `x` and `y` are described in the Model class docstring. If `y` is an integer, then the Data instance can only be used to fit with implicit models where the dimensionality of the response is equal to the specified value of `y`. The `we` argument weights the effect a deviation in the response variable has on the fit. The `wd` argument weights the effect a deviation in the input variable has on the fit. To handle multidimensional inputs and responses easily, the structure of these arguments has the n'th dimensional axis first. These arguments heavily use the structured arguments feature of ODRPACK to conveniently and flexibly support all options. See the ODRPACK User's Guide for a full explanation of how these weights are used in the algorithm. Basically, a higher value of the weight for a particular data point makes a deviation at that point more detrimental to the fit. """ def __init__(self, x, y=None, we=None, wd=None, fix=None, meta={}): self.x = _conv(x) if not isinstance(self.x, numpy.ndarray): raise ValueError(("Expected an 'ndarray' of data for 'x', " "but instead got data of type '{name}'").format( name=type(self.x).__name__)) self.y = _conv(y) self.we = _conv(we) self.wd = _conv(wd) self.fix = _conv(fix) self.meta = meta def set_meta(self, **kwds): """ Update the metadata dictionary with the keywords and data provided by keywords. Examples -------- :: data.set_meta(lab="Ph 7; Lab 26", title="Ag110 + Ag108 Decay") """ self.meta.update(kwds) def __getattr__(self, attr): """ Dispatch attribute access to the metadata dictionary. """ if attr in self.meta: return self.meta[attr] else: raise AttributeError("'%s' not in metadata" % attr) class RealData(Data): """ The data, with weightings as actual standard deviations and/or covariances. Parameters ---------- x : array_like Observed data for the independent variable of the regression y : array_like, optional If array-like, observed data for the dependent variable of the regression. A scalar input implies that the model to be used on the data is implicit. sx : array_like, optional Standard deviations of `x`. `sx` are standard deviations of `x` and are converted to weights by dividing 1.0 by their squares. sy : array_like, optional Standard deviations of `y`. `sy` are standard deviations of `y` and are converted to weights by dividing 1.0 by their squares. covx : array_like, optional Covariance of `x` `covx` is an array of covariance matrices of `x` and are converted to weights by performing a matrix inversion on each observation's covariance matrix. covy : array_like, optional Covariance of `y` `covy` is an array of covariance matrices and are converted to weights by performing a matrix inversion on each observation's covariance matrix. fix : array_like, optional The argument and member fix is the same as Data.fix and ODR.ifixx: It is an array of integers with the same shape as `x` that determines which input observations are treated as fixed. One can use a sequence of length m (the dimensionality of the input observations) to fix some dimensions for all observations. A value of 0 fixes the observation, a value > 0 makes it free. meta : dict, optional Free-form dictionary for metadata. Notes ----- The weights `wd` and `we` are computed from provided values as follows: `sx` and `sy` are converted to weights by dividing 1.0 by their squares. For example, ``wd = 1./numpy.power(`sx`, 2)``. `covx` and `covy` are arrays of covariance matrices and are converted to weights by performing a matrix inversion on each observation's covariance matrix. For example, ``we[i] = numpy.linalg.inv(covy[i])``. These arguments follow the same structured argument conventions as wd and we only restricted by their natures: `sx` and `sy` can't be rank-3, but `covx` and `covy` can be. Only set *either* `sx` or `covx` (not both). Setting both will raise an exception. Same with `sy` and `covy`. """ def __init__(self, x, y=None, sx=None, sy=None, covx=None, covy=None, fix=None, meta={}): if (sx is not None) and (covx is not None): raise ValueError("cannot set both sx and covx") if (sy is not None) and (covy is not None): raise ValueError("cannot set both sy and covy") # Set flags for __getattr__ self._ga_flags = {} if sx is not None: self._ga_flags['wd'] = 'sx' else: self._ga_flags['wd'] = 'covx' if sy is not None: self._ga_flags['we'] = 'sy' else: self._ga_flags['we'] = 'covy' self.x = _conv(x) if not isinstance(self.x, numpy.ndarray): raise ValueError(("Expected an 'ndarray' of data for 'x', " "but instead got data of type '{name}'").format( name=type(self.x).__name__)) self.y = _conv(y) self.sx = _conv(sx) self.sy = _conv(sy) self.covx = _conv(covx) self.covy = _conv(covy) self.fix = _conv(fix) self.meta = meta def _sd2wt(self, sd): """ Convert standard deviation to weights. """ return 1./numpy.power(sd, 2) def _cov2wt(self, cov): """ Convert covariance matrix(-ices) to weights. """ from scipy.linalg import inv if len(cov.shape) == 2: return inv(cov) else: weights = numpy.zeros(cov.shape, float) for i in range(cov.shape[-1]): # n weights[:,:,i] = inv(cov[:,:,i]) return weights def __getattr__(self, attr): lookup_tbl = {('wd', 'sx'): (self._sd2wt, self.sx), ('wd', 'covx'): (self._cov2wt, self.covx), ('we', 'sy'): (self._sd2wt, self.sy), ('we', 'covy'): (self._cov2wt, self.covy)} if attr not in ('wd', 'we'): if attr in self.meta: return self.meta[attr] else: raise AttributeError("'%s' not in metadata" % attr) else: func, arg = lookup_tbl[(attr, self._ga_flags[attr])] if arg is not None: return func(*(arg,)) else: return None class Model(object): """ The Model class stores information about the function you wish to fit. It stores the function itself, at the least, and optionally stores functions which compute the Jacobians used during fitting. Also, one can provide a function that will provide reasonable starting values for the fit parameters possibly given the set of data. Parameters ---------- fcn : function fcn(beta, x) --> y fjacb : function Jacobian of fcn wrt the fit parameters beta. fjacb(beta, x) --> @f_i(x,B)/@B_j fjacd : function Jacobian of fcn wrt the (possibly multidimensional) input variable. fjacd(beta, x) --> @f_i(x,B)/@x_j extra_args : tuple, optional If specified, `extra_args` should be a tuple of extra arguments to pass to `fcn`, `fjacb`, and `fjacd`. Each will be called by `apply(fcn, (beta, x) + extra_args)` estimate : array_like of rank-1 Provides estimates of the fit parameters from the data estimate(data) --> estbeta implicit : boolean If TRUE, specifies that the model is implicit; i.e `fcn(beta, x)` ~= 0 and there is no y data to fit against meta : dict, optional freeform dictionary of metadata for the model Notes ----- Note that the `fcn`, `fjacb`, and `fjacd` operate on NumPy arrays and return a NumPy array. The `estimate` object takes an instance of the Data class. Here are the rules for the shapes of the argument and return arrays of the callback functions: `x` if the input data is single-dimensional, then `x` is rank-1 array; i.e., ``x = array([1, 2, 3, ...]); x.shape = (n,)`` If the input data is multi-dimensional, then `x` is a rank-2 array; i.e., ``x = array([[1, 2, ...], [2, 4, ...]]); x.shape = (m, n)``. In all cases, it has the same shape as the input data array passed to `~scipy.odr.odr`. `m` is the dimensionality of the input data, `n` is the number of observations. `y` if the response variable is single-dimensional, then `y` is a rank-1 array, i.e., ``y = array([2, 4, ...]); y.shape = (n,)``. If the response variable is multi-dimensional, then `y` is a rank-2 array, i.e., ``y = array([[2, 4, ...], [3, 6, ...]]); y.shape = (q, n)`` where `q` is the dimensionality of the response variable. `beta` rank-1 array of length `p` where `p` is the number of parameters; i.e. ``beta = array([B_1, B_2, ..., B_p])`` `fjacb` if the response variable is multi-dimensional, then the return array's shape is `(q, p, n)` such that ``fjacb(x,beta)[l,k,i] = d f_l(X,B)/d B_k`` evaluated at the ith data point. If `q == 1`, then the return array is only rank-2 and with shape `(p, n)`. `fjacd` as with fjacb, only the return array's shape is `(q, m, n)` such that ``fjacd(x,beta)[l,j,i] = d f_l(X,B)/d X_j`` at the ith data point. If `q == 1`, then the return array's shape is `(m, n)`. If `m == 1`, the shape is (q, n). If `m == q == 1`, the shape is `(n,)`. """ def __init__(self, fcn, fjacb=None, fjacd=None, extra_args=None, estimate=None, implicit=0, meta=None): self.fcn = fcn self.fjacb = fjacb self.fjacd = fjacd if extra_args is not None: extra_args = tuple(extra_args) self.extra_args = extra_args self.estimate = estimate self.implicit = implicit self.meta = meta def set_meta(self, **kwds): """ Update the metadata dictionary with the keywords and data provided here. Examples -------- set_meta(name="Exponential", equation="y = a exp(b x) + c") """ self.meta.update(kwds) def __getattr__(self, attr): """ Dispatch attribute access to the metadata. """ if attr in self.meta: return self.meta[attr] else: raise AttributeError("'%s' not in metadata" % attr) class Output(object): """ The Output class stores the output of an ODR run. Attributes ---------- beta : ndarray Estimated parameter values, of shape (q,). sd_beta : ndarray Standard deviations of the estimated parameters, of shape (p,). cov_beta : ndarray Covariance matrix of the estimated parameters, of shape (p,p). delta : ndarray, optional Array of estimated errors in input variables, of same shape as `x`. eps : ndarray, optional Array of estimated errors in response variables, of same shape as `y`. xplus : ndarray, optional Array of ``x + delta``. y : ndarray, optional Array ``y = fcn(x + delta)``. res_var : float, optional Residual variance. sum_square : float, optional Sum of squares error. sum_square_delta : float, optional Sum of squares of delta error. sum_square_eps : float, optional Sum of squares of eps error. inv_condnum : float, optional Inverse condition number (cf. ODRPACK UG p. 77). rel_error : float, optional Relative error in function values computed within fcn. work : ndarray, optional Final work array. work_ind : dict, optional Indices into work for drawing out values (cf. ODRPACK UG p. 83). info : int, optional Reason for returning, as output by ODRPACK (cf. ODRPACK UG p. 38). stopreason : list of str, optional `info` interpreted into English. Notes ----- Takes one argument for initialization, the return value from the function `~scipy.odr.odr`. The attributes listed as "optional" above are only present if `~scipy.odr.odr` was run with ``full_output=1``. """ def __init__(self, output): self.beta = output[0] self.sd_beta = output[1] self.cov_beta = output[2] if len(output) == 4: # full output self.__dict__.update(output[3]) self.stopreason = _report_error(self.info) def pprint(self): """ Pretty-print important results. """ print('Beta:', self.beta) print('Beta Std Error:', self.sd_beta) print('Beta Covariance:', self.cov_beta) if hasattr(self, 'info'): print('Residual Variance:',self.res_var) print('Inverse Condition #:', self.inv_condnum) print('Reason(s) for Halting:') for r in self.stopreason: print(' %s' % r) class ODR(object): """ The ODR class gathers all information and coordinates the running of the main fitting routine. Members of instances of the ODR class have the same names as the arguments to the initialization routine. Parameters ---------- data : Data class instance instance of the Data class model : Model class instance instance of the Model class Other Parameters ---------------- beta0 : array_like of rank-1 a rank-1 sequence of initial parameter values. Optional if model provides an "estimate" function to estimate these values. delta0 : array_like of floats of rank-1, optional a (double-precision) float array to hold the initial values of the errors in the input variables. Must be same shape as data.x ifixb : array_like of ints of rank-1, optional sequence of integers with the same length as beta0 that determines which parameters are held fixed. A value of 0 fixes the parameter, a value > 0 makes the parameter free. ifixx : array_like of ints with same shape as data.x, optional an array of integers with the same shape as data.x that determines which input observations are treated as fixed. One can use a sequence of length m (the dimensionality of the input observations) to fix some dimensions for all observations. A value of 0 fixes the observation, a value > 0 makes it free. job : int, optional an integer telling ODRPACK what tasks to perform. See p. 31 of the ODRPACK User's Guide if you absolutely must set the value here. Use the method set_job post-initialization for a more readable interface. iprint : int, optional an integer telling ODRPACK what to print. See pp. 33-34 of the ODRPACK User's Guide if you absolutely must set the value here. Use the method set_iprint post-initialization for a more readable interface. errfile : str, optional string with the filename to print ODRPACK errors to. If the file already exists, an error will be thrown. The `overwrite` argument can be used to prevent this. *Do Not Open This File Yourself!* rptfile : str, optional string with the filename to print ODRPACK summaries to. If the file already exists, an error will be thrown. The `overwrite` argument can be used to prevent this. *Do Not Open This File Yourself!* ndigit : int, optional integer specifying the number of reliable digits in the computation of the function. taufac : float, optional float specifying the initial trust region. The default value is 1. The initial trust region is equal to taufac times the length of the first computed Gauss-Newton step. taufac must be less than 1. sstol : float, optional float specifying the tolerance for convergence based on the relative change in the sum-of-squares. The default value is eps**(1/2) where eps is the smallest value such that 1 + eps > 1 for double precision computation on the machine. sstol must be less than 1. partol : float, optional float specifying the tolerance for convergence based on the relative change in the estimated parameters. The default value is eps**(2/3) for explicit models and ``eps**(1/3)`` for implicit models. partol must be less than 1. maxit : int, optional integer specifying the maximum number of iterations to perform. For first runs, maxit is the total number of iterations performed and defaults to 50. For restarts, maxit is the number of additional iterations to perform and defaults to 10. stpb : array_like, optional sequence (``len(stpb) == len(beta0)``) of relative step sizes to compute finite difference derivatives wrt the parameters. stpd : optional array (``stpd.shape == data.x.shape`` or ``stpd.shape == (m,)``) of relative step sizes to compute finite difference derivatives wrt the input variable errors. If stpd is a rank-1 array with length m (the dimensionality of the input variable), then the values are broadcast to all observations. sclb : array_like, optional sequence (``len(stpb) == len(beta0)``) of scaling factors for the parameters. The purpose of these scaling factors are to scale all of the parameters to around unity. Normally appropriate scaling factors are computed if this argument is not specified. Specify them yourself if the automatic procedure goes awry. scld : array_like, optional array (scld.shape == data.x.shape or scld.shape == (m,)) of scaling factors for the *errors* in the input variables. Again, these factors are automatically computed if you do not provide them. If scld.shape == (m,), then the scaling factors are broadcast to all observations. work : ndarray, optional array to hold the double-valued working data for ODRPACK. When restarting, takes the value of self.output.work. iwork : ndarray, optional array to hold the integer-valued working data for ODRPACK. When restarting, takes the value of self.output.iwork. overwrite : bool, optional If it is True, output files defined by `errfile` and `rptfile` are overwritten. The default is False. Attributes ---------- data : Data The data for this fit model : Model The model used in fit output : Output An instance if the Output class containing all of the returned data from an invocation of ODR.run() or ODR.restart() """ def __init__(self, data, model, beta0=None, delta0=None, ifixb=None, ifixx=None, job=None, iprint=None, errfile=None, rptfile=None, ndigit=None, taufac=None, sstol=None, partol=None, maxit=None, stpb=None, stpd=None, sclb=None, scld=None, work=None, iwork=None, overwrite=False): self.data = data self.model = model if beta0 is None: if self.model.estimate is not None: self.beta0 = _conv(self.model.estimate(self.data)) else: raise ValueError( "must specify beta0 or provide an estimater with the model" ) else: self.beta0 = _conv(beta0) if ifixx is None and data.fix is not None: ifixx = data.fix if overwrite: # remove output files for overwriting. if rptfile is not None and os.path.exists(rptfile): os.remove(rptfile) if errfile is not None and os.path.exists(errfile): os.remove(errfile) self.delta0 = _conv(delta0) # These really are 32-bit integers in FORTRAN (gfortran), even on 64-bit # platforms. # XXX: some other FORTRAN compilers may not agree. self.ifixx = _conv(ifixx, dtype=numpy.int32) self.ifixb = _conv(ifixb, dtype=numpy.int32) self.job = job self.iprint = iprint self.errfile = errfile self.rptfile = rptfile self.ndigit = ndigit self.taufac = taufac self.sstol = sstol self.partol = partol self.maxit = maxit self.stpb = _conv(stpb) self.stpd = _conv(stpd) self.sclb = _conv(sclb) self.scld = _conv(scld) self.work = _conv(work) self.iwork = _conv(iwork) self.output = None self._check() def _check(self): """ Check the inputs for consistency, but don't bother checking things that the builtin function odr will check. """ x_s = list(self.data.x.shape) if isinstance(self.data.y, numpy.ndarray): y_s = list(self.data.y.shape) if self.model.implicit: raise OdrError("an implicit model cannot use response data") else: # implicit model with q == self.data.y y_s = [self.data.y, x_s[-1]] if not self.model.implicit: raise OdrError("an explicit model needs response data") self.set_job(fit_type=1) if x_s[-1] != y_s[-1]: raise OdrError("number of observations do not match") n = x_s[-1] if len(x_s) == 2: m = x_s[0] else: m = 1 if len(y_s) == 2: q = y_s[0] else: q = 1 p = len(self.beta0) # permissible output array shapes fcn_perms = [(q, n)] fjacd_perms = [(q, m, n)] fjacb_perms = [(q, p, n)] if q == 1: fcn_perms.append((n,)) fjacd_perms.append((m, n)) fjacb_perms.append((p, n)) if m == 1: fjacd_perms.append((q, n)) if p == 1: fjacb_perms.append((q, n)) if m == q == 1: fjacd_perms.append((n,)) if p == q == 1: fjacb_perms.append((n,)) # try evaluating the supplied functions to make sure they provide # sensible outputs arglist = (self.beta0, self.data.x) if self.model.extra_args is not None: arglist = arglist + self.model.extra_args res = self.model.fcn(*arglist) if res.shape not in fcn_perms: print(res.shape) print(fcn_perms) raise OdrError("fcn does not output %s-shaped array" % y_s) if self.model.fjacd is not None: res = self.model.fjacd(*arglist) if res.shape not in fjacd_perms: raise OdrError( "fjacd does not output %s-shaped array" % repr((q, m, n))) if self.model.fjacb is not None: res = self.model.fjacb(*arglist) if res.shape not in fjacb_perms: raise OdrError( "fjacb does not output %s-shaped array" % repr((q, p, n))) # check shape of delta0 if self.delta0 is not None and self.delta0.shape != self.data.x.shape: raise OdrError( "delta0 is not a %s-shaped array" % repr(self.data.x.shape)) if self.data.x.size == 0: warn(("Empty data detected for ODR instance. " "Do not expect any fitting to occur"), OdrWarning) def _gen_work(self): """ Generate a suitable work array if one does not already exist. """ n = self.data.x.shape[-1] p = self.beta0.shape[0] if len(self.data.x.shape) == 2: m = self.data.x.shape[0] else: m = 1 if self.model.implicit: q = self.data.y elif len(self.data.y.shape) == 2: q = self.data.y.shape[0] else: q = 1 if self.data.we is None: ldwe = ld2we = 1 elif len(self.data.we.shape) == 3: ld2we, ldwe = self.data.we.shape[1:] else: # Okay, this isn't precisely right, but for this calculation, # it's fine ldwe = 1 ld2we = self.data.we.shape[1] if self.job % 10 < 2: # ODR not OLS lwork = (18 + 11*p + p*p + m + m*m + 4*n*q + 6*n*m + 2*n*q*p + 2*n*q*m + q*q + 5*q + q*(p+m) + ldwe*ld2we*q) else: # OLS not ODR lwork = (18 + 11*p + p*p + m + m*m + 4*n*q + 2*n*m + 2*n*q*p + 5*q + q*(p+m) + ldwe*ld2we*q) if isinstance(self.work, numpy.ndarray) and self.work.shape == (lwork,)\ and self.work.dtype.str.endswith('f8'): # the existing array is fine return else: self.work = numpy.zeros((lwork,), float) def set_job(self, fit_type=None, deriv=None, var_calc=None, del_init=None, restart=None): """ Sets the "job" parameter is a hopefully comprehensible way. If an argument is not specified, then the value is left as is. The default value from class initialization is for all of these options set to 0. Parameters ---------- fit_type : {0, 1, 2} int 0 -> explicit ODR 1 -> implicit ODR 2 -> ordinary least-squares deriv : {0, 1, 2, 3} int 0 -> forward finite differences 1 -> central finite differences 2 -> user-supplied derivatives (Jacobians) with results checked by ODRPACK 3 -> user-supplied derivatives, no checking var_calc : {0, 1, 2} int 0 -> calculate asymptotic covariance matrix and fit parameter uncertainties (V_B, s_B) using derivatives recomputed at the final solution 1 -> calculate V_B and s_B using derivatives from last iteration 2 -> do not calculate V_B and s_B del_init : {0, 1} int 0 -> initial input variable offsets set to 0 1 -> initial offsets provided by user in variable "work" restart : {0, 1} int 0 -> fit is not a restart 1 -> fit is a restart Notes ----- The permissible values are different from those given on pg. 31 of the ODRPACK User's Guide only in that one cannot specify numbers greater than the last value for each variable. If one does not supply functions to compute the Jacobians, the fitting procedure will change deriv to 0, finite differences, as a default. To initialize the input variable offsets by yourself, set del_init to 1 and put the offsets into the "work" variable correctly. """ if self.job is None: job_l = [0, 0, 0, 0, 0] else: job_l = [self.job // 10000 % 10, self.job // 1000 % 10, self.job // 100 % 10, self.job // 10 % 10, self.job % 10] if fit_type in (0, 1, 2): job_l[4] = fit_type if deriv in (0, 1, 2, 3): job_l[3] = deriv if var_calc in (0, 1, 2): job_l[2] = var_calc if del_init in (0, 1): job_l[1] = del_init if restart in (0, 1): job_l[0] = restart self.job = (job_l[0]*10000 + job_l[1]*1000 + job_l[2]*100 + job_l[3]*10 + job_l[4]) def set_iprint(self, init=None, so_init=None, iter=None, so_iter=None, iter_step=None, final=None, so_final=None): """ Set the iprint parameter for the printing of computation reports. If any of the arguments are specified here, then they are set in the iprint member. If iprint is not set manually or with this method, then ODRPACK defaults to no printing. If no filename is specified with the member rptfile, then ODRPACK prints to stdout. One can tell ODRPACK to print to stdout in addition to the specified filename by setting the so_* arguments to this function, but one cannot specify to print to stdout but not a file since one can do that by not specifying a rptfile filename. There are three reports: initialization, iteration, and final reports. They are represented by the arguments init, iter, and final respectively. The permissible values are 0, 1, and 2 representing "no report", "short report", and "long report" respectively. The argument iter_step (0 <= iter_step <= 9) specifies how often to make the iteration report; the report will be made for every iter_step'th iteration starting with iteration one. If iter_step == 0, then no iteration report is made, regardless of the other arguments. If the rptfile is None, then any so_* arguments supplied will raise an exception. """ if self.iprint is None: self.iprint = 0 ip = [self.iprint // 1000 % 10, self.iprint // 100 % 10, self.iprint // 10 % 10, self.iprint % 10] # make a list to convert iprint digits to/from argument inputs # rptfile, stdout ip2arg = [[0, 0], # none, none [1, 0], # short, none [2, 0], # long, none [1, 1], # short, short [2, 1], # long, short [1, 2], # short, long [2, 2]] # long, long if (self.rptfile is None and (so_init is not None or so_iter is not None or so_final is not None)): raise OdrError( "no rptfile specified, cannot output to stdout twice") iprint_l = ip2arg[ip[0]] + ip2arg[ip[1]] + ip2arg[ip[3]] if init is not None: iprint_l[0] = init if so_init is not None: iprint_l[1] = so_init if iter is not None: iprint_l[2] = iter if so_iter is not None: iprint_l[3] = so_iter if final is not None: iprint_l[4] = final if so_final is not None: iprint_l[5] = so_final if iter_step in range(10): # 0..9 ip[2] = iter_step ip[0] = ip2arg.index(iprint_l[0:2]) ip[1] = ip2arg.index(iprint_l[2:4]) ip[3] = ip2arg.index(iprint_l[4:6]) self.iprint = ip[0]*1000 + ip[1]*100 + ip[2]*10 + ip[3] def run(self): """ Run the fitting routine with all of the information given and with ``full_output=1``. Returns ------- output : Output instance This object is also assigned to the attribute .output . """ args = (self.model.fcn, self.beta0, self.data.y, self.data.x) kwds = {'full_output': 1} kwd_l = ['ifixx', 'ifixb', 'job', 'iprint', 'errfile', 'rptfile', 'ndigit', 'taufac', 'sstol', 'partol', 'maxit', 'stpb', 'stpd', 'sclb', 'scld', 'work', 'iwork'] if self.delta0 is not None and self.job % 1000 // 10 == 1: # delta0 provided and fit is not a restart self._gen_work() d0 = numpy.ravel(self.delta0) self.work[:len(d0)] = d0 # set the kwds from other objects explicitly if self.model.fjacb is not None: kwds['fjacb'] = self.model.fjacb if self.model.fjacd is not None: kwds['fjacd'] = self.model.fjacd if self.data.we is not None: kwds['we'] = self.data.we if self.data.wd is not None: kwds['wd'] = self.data.wd if self.model.extra_args is not None: kwds['extra_args'] = self.model.extra_args # implicitly set kwds from self's members for attr in kwd_l: obj = getattr(self, attr) if obj is not None: kwds[attr] = obj self.output = Output(odr(*args, **kwds)) return self.output def restart(self, iter=None): """ Restarts the run with iter more iterations. Parameters ---------- iter : int, optional ODRPACK's default for the number of new iterations is 10. Returns ------- output : Output instance This object is also assigned to the attribute .output . """ if self.output is None: raise OdrError("cannot restart: run() has not been called before") self.set_job(restart=1) self.work = self.output.work self.iwork = self.output.iwork self.maxit = iter return self.run()
{ "content_hash": "c9de98382008205a575d8cd1e0cf20c4", "timestamp": "", "source": "github", "line_count": 1142, "max_line_length": 97, "avg_line_length": 36.78721541155867, "alnum_prop": 0.5832758087167647, "repo_name": "nmayorov/scipy", "id": "45fb08c91dcfab3ffe1af33604f064263ff0b9fc", "size": "42011", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "scipy/odr/odrpack.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C", "bytes": "4418291" }, { "name": "C++", "bytes": "672553" }, { "name": "Dockerfile", "bytes": "1328" }, { "name": "Fortran", "bytes": "5300184" }, { "name": "MATLAB", "bytes": "4346" }, { "name": "Makefile", "bytes": "778" }, { "name": "Python", "bytes": "13498627" }, { "name": "Shell", "bytes": "538" }, { "name": "TeX", "bytes": "52106" } ], "symlink_target": "" }
import datetime from datetime import timedelta import os import glob import re # In[ ]: # In[2]: today = datetime.date.today() yesterday = today - timedelta(1) # In[3]: if yesterday.isoweekday() == 3: yesterday_str = yesterday.strftime("%Y-%m-%d") # In[4]: filename = glob.glob("_posts/" + yesterday_str + "*")[0] # In[5]: with open(filename, "r") as file: file_text = file.read() file_text # In[6]: file_text = file_text.replace('category: upcoming', 'category: posts') file_text = file_text.replace('category:upcoming', 'category: posts') file_text # In[7]: with open(filename, "w") as file: file.write(file_text)
{ "content_hash": "d7c9eb403a389b74ed6220986c8db8dd", "timestamp": "", "source": "github", "line_count": 52, "max_line_length": 70, "avg_line_length": 12.673076923076923, "alnum_prop": 0.637329286798179, "repo_name": "thehackerwithin/berkeley", "id": "9958160863f91e8543b2726b4715ca20a20a8864", "size": "926", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "docs/upcoming2posts.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C", "bytes": "9196" }, { "name": "C++", "bytes": "9944" }, { "name": "Dockerfile", "bytes": "1068" }, { "name": "Fortran", "bytes": "434" }, { "name": "Gnuplot", "bytes": "240" }, { "name": "HTML", "bytes": "1901059" }, { "name": "Jupyter Notebook", "bytes": "23122238" }, { "name": "Makefile", "bytes": "1416" }, { "name": "PostScript", "bytes": "287518" }, { "name": "Python", "bytes": "62059" }, { "name": "R", "bytes": "5431" }, { "name": "Shell", "bytes": "1493" }, { "name": "TeX", "bytes": "51016" } ], "symlink_target": "" }
import oauth2 import urllib from twitter_users import settings # not sure why this is necessary, but oauth2 does this, so I'm following its lead try: from urlparse import parse_qs, parse_qsl except ImportError: from cgi import parse_qs, parse_qsl REQUEST_TOKEN_URL = 'https://api.twitter.com/oauth/request_token' ACCESS_TOKEN_URL = 'https://api.twitter.com/oauth/access_token' AUTHORIZATION_URL = 'https://api.twitter.com/oauth/authenticate' class Consumer(oauth2.Consumer): pass class Token(object): def __init__(self, consumer): self.consumer = consumer def _get_token(self, url, token=None, method='POST', **parameters): client = oauth2.Client(self.consumer, token) response, content = client.request(url, method = method, body = urllib.urlencode(parameters) ) if response['status'] != '200': return None; return content class RequestToken(Token): def __init__(self, consumer, callback_url=None): super(RequestToken, self).__init__(consumer) parameters = {} if callback_url is not None: parameters['oauth_callback'] = callback_url token_content = self._get_token(REQUEST_TOKEN_URL, **parameters) self.token = oauth2.Token.from_string(token_content) @property def authorization_url(self): request = oauth2.Request.from_consumer_and_token( self.consumer, self.token, http_url = AUTHORIZATION_URL ) request.sign_request(oauth2.SignatureMethod_HMAC_SHA1(), self.consumer, self.token) return request.to_url() class AccessToken(Token): def __init__(self, consumer, oauth_token, oauth_verifier): super(AccessToken, self).__init__(consumer) # parse the access token by hand to get access to the additional # parameters that Twitter passes back, like the user id and screen name token_content = self._get_token(ACCESS_TOKEN_URL, oauth_token=oauth_token, oauth_verifier=oauth_verifier) self.params = parse_qs(token_content) @property def token(self): return self.params['oauth_token'][0] @property def secret(self): return self.params['oauth_token_secret'][0] @property def user_id(self): return self.params['user_id'][0] @property def username(self): return self.params['screen_name'][0]
{ "content_hash": "6b9e471f187af40a183fb6c3394f9799", "timestamp": "", "source": "github", "line_count": 78, "max_line_length": 113, "avg_line_length": 32.32051282051282, "alnum_prop": 0.6291154303847679, "repo_name": "tamizhgeek/tweetscrapper", "id": "ab1d2eb62f8c14e8fc2ad1201ea0513866863cef", "size": "2522", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "twitter_users/oauth.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "JavaScript", "bytes": "72966" }, { "name": "Python", "bytes": "18004" } ], "symlink_target": "" }
import sys from twopy.board import retrieve_board from twopy.thread import retrieve_thread from twopy.utility import retrieve_2ch_boards def main(): board_url = "http://uni.2ch.net/newsplus/" result = retrieve_board(board_url) print result[0]["title"], result[0]["res"], result[0]["dat"] retrieve_thread(board_url, result[0]["dat"]) """ result = retrieve_2ch_boards() for category in result: print category["category"] for board in category["boards"]: print " ", board["title"], board["url"] return 0 """ if __name__ == "__main__": sys.exit(main())
{ "content_hash": "f11182fff44c25a2d2f7cb96dee64f8f", "timestamp": "", "source": "github", "line_count": 23, "max_line_length": 64, "avg_line_length": 27.130434782608695, "alnum_prop": 0.6185897435897436, "repo_name": "rezoo/twopy", "id": "3ffa062d4096a0b4c9dad6867fc812c58bc4e2be", "size": "671", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "test.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "16363" } ], "symlink_target": "" }
import os import tempfile import unittest from dateutil.parser import parse as dtparse import numpy as np from pyaxiom.netcdf.sensors.dsg import IncompleteMultidimensionalTrajectory import logging from pyaxiom import logger logger.level = logging.INFO logger.handlers = [logging.StreamHandler()] class TestIncompleteMultidimensionalTrajectory(unittest.TestCase): def setUp(self): self.single = os.path.join(os.path.dirname(__file__), 'resources', 'im-single.nc') self.multi = os.path.join(os.path.dirname(__file__), 'resources', 'im-multiple.nc') def test_imt_load(self): IncompleteMultidimensionalTrajectory(self.single).close() IncompleteMultidimensionalTrajectory(self.multi).close() def test_imt_dataframe(self): single_tmp = tempfile.mkstemp(suffix='.nc')[-1] with IncompleteMultidimensionalTrajectory(self.single) as ncd: single_df = ncd.to_dataframe(clean_rows=False) single_nc = IncompleteMultidimensionalTrajectory.from_dataframe(single_df, single_tmp) single_nc.close() os.remove(single_tmp) multip_tmp = tempfile.mkstemp(suffix='.nc')[-1] with IncompleteMultidimensionalTrajectory(self.multi) as ncd: multip_df = ncd.to_dataframe(clean_rows=False) multip_nc = IncompleteMultidimensionalTrajectory.from_dataframe(multip_df, multip_tmp) multip_nc.close() os.remove(multip_tmp) def test_imt_calculated_metadata(self): with IncompleteMultidimensionalTrajectory(self.single) as ncd: s = ncd.calculated_metadata() assert s.min_t == dtparse('1990-01-01 00:00:00') assert s.max_t == dtparse('1990-01-05 03:00:00') traj1 = s.trajectories["Trajectory1"] assert traj1.min_z == 0 assert traj1.max_z == 99 assert traj1.min_t == dtparse('1990-01-01 00:00:00') assert traj1.max_t == dtparse('1990-01-05 03:00:00') assert np.isclose(traj1.first_loc.x, -7.9336) assert np.isclose(traj1.first_loc.y, 42.00339) for n, v in ncd.variables.items(): assert np.issubdtype(v.dtype, np.int64) is False assert np.issubdtype(v.dtype, np.uint64) is False with IncompleteMultidimensionalTrajectory(self.multi) as ncd: m = ncd.calculated_metadata() assert m.min_t == dtparse('1990-01-01 00:00:00') assert m.max_t == dtparse('1990-01-02 12:00:00') assert len(m.trajectories) == 4 traj0 = m.trajectories["Trajectory0"] assert traj0.min_z == 0 assert traj0.max_z == 35 assert traj0.min_t == dtparse('1990-01-01 00:00:00') assert traj0.max_t == dtparse('1990-01-02 11:00:00') assert np.isclose(traj0.first_loc.x, -35.07884) assert np.isclose(traj0.first_loc.y, 2.15286) traj3 = m.trajectories["Trajectory3"] assert traj3.min_z == 0 assert traj3.max_z == 36 assert traj3.min_t == dtparse('1990-01-01 00:00:00') assert traj3.max_t == dtparse('1990-01-02 12:00:00') assert np.isclose(traj3.first_loc.x, -73.3026) assert np.isclose(traj3.first_loc.y, 1.95761) for n, v in ncd.variables.items(): assert np.issubdtype(v.dtype, np.int64) is False assert np.issubdtype(v.dtype, np.uint64) is False def test_json_attributes(self): with IncompleteMultidimensionalTrajectory(self.single) as s: s.json_attributes() with IncompleteMultidimensionalTrajectory(self.multi) as m: m.json_attributes()
{ "content_hash": "9c55a86815a76c33c58ab6688c8198c4", "timestamp": "", "source": "github", "line_count": 88, "max_line_length": 98, "avg_line_length": 42.35227272727273, "alnum_prop": 0.6289240676147035, "repo_name": "axiom-data-science/pyaxiom", "id": "29874c9cb08e46d65720548de2793617e54c07f8", "size": "3751", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "pyaxiom/tests/dsg/trajectory/test_trajectory_im.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "246497" }, { "name": "Shell", "bytes": "451" } ], "symlink_target": "" }
import sys, os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) # -- General configuration ---------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'celery-simple-elasticsearch' copyright = u'2015, James Kelly and contributors' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # try: from celery_simple_elasticsearch import __version__ # The short X.Y version. version = '.'.join(__version__.split('.')[:2]) # The full version, including alpha/beta/rc tags. release = __version__ except ImportError: version = release = 'dev' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all # documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # -- Options for HTML output -------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". # html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'celery-simple-elasticsearchdoc' # -- Options for LaTeX output -------------------------------------------------- # The paper size ('letter' or 'a4'). #latex_paper_size = 'letter' # The font size ('10pt', '11pt' or '12pt'). #latex_font_size = '10pt' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass # [howto/manual]). latex_documents = [ ( 'index', 'celery-simple-elasticsearch.tex', u'celery-simple-elasticsearch Documentation', u'James Kelly', 'manual' ), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Additional stuff for the LaTeX preamble. #latex_preamble = '' # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output ------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ( 'index', 'celery-simple-elasticsearch', u'celery-simple-elasticsearch Documentation', [u'James Kelly'], 1 ) ] # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { 'python': ('http://docs.python.org/2.7', None), 'sphinx': ('http://sphinx.pocoo.org/', None), 'django': ('http://django.readthedocs.org/en/latest/', None), }
{ "content_hash": "6972df20a5dd74fea6225966eeacfcc9", "timestamp": "", "source": "github", "line_count": 227, "max_line_length": 80, "avg_line_length": 31.845814977973568, "alnum_prop": 0.6998201687646978, "repo_name": "jimjkelly/celery-simple-elasticsearch", "id": "bffc870c30f6578e0df99d9a251db542ba34ce33", "size": "7743", "binary": false, "copies": "1", "ref": "refs/heads/develop", "path": "docs/conf.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Python", "bytes": "17268" } ], "symlink_target": "" }
from khmer import Read from khmer import ReadParser from screed import Record from . import khmer_tst_utils as utils import pytest from functools import reduce # pylint: disable=redefined-builtin def test_read_type_basic(): # test that basic properties of khmer.Read behave like screed.Record # Constructing without mandatory arguments should raise an exception with pytest.raises(TypeError): Read() name = "895:1:1:1246:14654 1:N:0:NNNNN" sequence = "ACGT" r = Read(name, sequence) s = Record(name, sequence) for x in (r, s): assert x.name == name assert x.sequence == sequence assert not hasattr(x, 'quality'), x assert not hasattr(x, 'description'), x def test_read_quality_none(): r = Read(name="test", sequence="ACGT", quality=None) assert not hasattr(r, 'quality') def test_read_type_attributes(): r = Read(sequence='ACGT', quality='good', name='1234', description='desc') assert r.sequence == 'ACGT' assert r.cleaned_seq == 'ACGT' assert r.quality == 'good' assert r.name == '1234' assert r.description == 'desc' def test_read_type_cleaned_seq(): r = Read(sequence='acgtnN', name='1234') assert r.sequence == 'acgtnN' assert r.cleaned_seq == 'ACGTAA' def test_read_properties(): # Note: Using a data file with only one read. rparser = ReadParser(utils.get_test_data("single-read.fq")) # Check the properties of all one reads in data set. for read in rparser: assert read.name == "895:1:1:1246:14654 1:N:0:NNNNN" assert read.sequence == "CAGGCGCCCACCACCGTGCCCTCCAACCTGATGGT" # if an attribute is empty it shouldn't exist assert not hasattr(read, 'annotations') assert read.quality == """][aaX__aa[`ZUZ[NONNFNNNNNO_____^RQ_""" def test_read_properties_fa(): # Note: Using a data file with only one read. rparser = ReadParser(utils.get_test_data("single-read.fa")) # Check the properties of all one reads in data set. for read in rparser: print(read.name) assert read.name == "895:1:1:1246:14654 1:N:0:NNNNN" assert read.sequence == "CAGGCGCCCACCACCGTGCCCTCCAACCTGATGGT" # if an attribute is empty it shouldn't exist assert not hasattr(read, 'quality') def test_with_default_arguments(): read_names = [] # Note: Using a data file where read names are just integers on [0,99). rparser = ReadParser(utils.get_test_data("random-20-a.fa")) for read in rparser: read_names.append(int(read.name)) # "Derandomize". read_names.sort() # Each read number should match the corresponding name. for m, n in enumerate(read_names): assert m == n def test_num_reads(): """Test ReadParser.num_reads""" reads_count = 0 rparser = ReadParser(utils.get_test_data("100-reads.fq.gz")) for _ in rparser: reads_count += 1 assert reads_count == 100 assert rparser.num_reads == 100 @pytest.mark.multithread def test_num_reads_threads(): """Test threadsaftey of ReadParser's read counting""" import threading def count_reads(rparser): for _ in rparser: pass n_threads = 4 threads = [] rparser = ReadParser(utils.get_test_data("100-reads.fq.gz")) for _ in range(n_threads): thr = threading.Thread(target=count_reads, args=[rparser, ]) threads.append(thr) thr.start() for thr in threads: thr.join() assert rparser.num_reads == 100 def test_num_reads_truncated(): n_reads = 0 rparser = ReadParser(utils.get_test_data("truncated.fq")) try: for _ in rparser: n_reads += 1 except ValueError as err: assert "Sequence is empty" in str(err), str(err) assert rparser.num_reads == 1, "%d valid reads in file, got %d" % ( n_reads, rparser.num_reads) def test_gzip_decompression(): reads_count = 0 rparser = ReadParser(utils.get_test_data("100-reads.fq.gz")) for _ in rparser: reads_count += 1 assert 100 == reads_count def test_gzip_decompression_truncated(): rparser = ReadParser(utils.get_test_data("100-reads.fq.truncated.gz")) try: for _ in rparser: pass assert 0, "this should fail" except OSError as err: print(str(err)) def test_gzip_decompression_truncated_pairiter(): rparser = ReadParser(utils.get_test_data("100-reads.fq.truncated.gz")) try: for _ in rparser.iter_read_pairs(): pass assert 0, "this should fail" except OSError as err: print(str(err)) except ValueError as err: print(str(err)) def test_bzip2_decompression(): reads_count = 0 rparser = ReadParser(utils.get_test_data("100-reads.fq.bz2")) for _ in rparser: reads_count += 1 assert 100 == reads_count def test_bzip2_decompression_truncated(): rparser = ReadParser(utils.get_test_data("100-reads.fq.truncated.bz2")) try: for _ in rparser: pass assert 0, "this should fail" except OSError as err: print(str(err)) except ValueError as err: print(str(err)) def test_bzip2_decompression_truncated_pairiter(): rparser = ReadParser(utils.get_test_data("100-reads.fq.truncated.bz2")) try: for _ in rparser.iter_read_pairs(): pass assert 0, "this should fail" except OSError as err: print(str(err)) except ValueError as err: print(str(err)) def test_badbzip2(): try: rparser = ReadParser(utils.get_test_data("test-empty.fa.bz2")) for _ in rparser: pass assert 0, "this should fail" except OSError as err: print(str(err)) except ValueError as err: print(str(err)) @pytest.mark.multithread def test_with_multiple_threads(testfile="test-reads.fq.bz2"): import operator import threading reads_count_1thr = 0 rparser = ReadParser(utils.get_test_data(testfile)) for _ in rparser: reads_count_1thr += 1 def count_reads(rparser, counters, tnum): counters[tnum] = reduce(operator.add, (1 for read in rparser)) N_THREADS = 4 threads = [] reads_counts_per_thread = [0] * N_THREADS rparser = ReadParser(utils.get_test_data(testfile)) for tnum in range(N_THREADS): t = \ threading.Thread( target=count_reads, args=[rparser, reads_counts_per_thread, tnum] ) threads.append(t) t.start() for t in threads: t.join() assert reads_count_1thr == sum(reads_counts_per_thread), \ reads_counts_per_thread @pytest.mark.multithread def test_with_multiple_threads_big(): test_with_multiple_threads(testfile="test-large.fa") @pytest.mark.multithread def test_old_illumina_pair_mating(): import threading rparser = ReadParser(utils.get_test_data("test-reads.fa")) def thread_1_runtime(rparser): for _ in rparser: pass def thread_2_runtime(rparser): for readnum, _ in enumerate(rparser): if 0 == readnum: pass t1 = threading.Thread(target=thread_1_runtime, args=[rparser]) t2 = threading.Thread(target=thread_2_runtime, args=[rparser]) t1.start() t2.start() t1.join() t2.join() @pytest.mark.multithread def test_casava_1_8_pair_mating(): import threading # Note: This file, when used in conjunction with a 64 KiB per-thread # prefetch buffer, tests the paired read mating logic with the # Casava >= 1.8 read name format. rparser = ReadParser(utils.get_test_data("test-reads.fq.bz2")) def thread_1_runtime(rparser): for _ in rparser: pass def thread_2_runtime(rparser): for readnum, _ in enumerate(rparser): if 0 == readnum: pass # assert "895:1:1:1761:13189 2:N:0:NNNNN" == read.name, read.name t1 = threading.Thread(target=thread_1_runtime, args=[rparser]) t2 = threading.Thread(target=thread_2_runtime, args=[rparser]) t1.start() t2.start() t1.join() t2.join() def test_read_truncated(): rparser = ReadParser(utils.get_test_data("truncated.fq")) try: for _ in rparser: pass assert 0, "No exception raised on a truncated file" except ValueError as err: assert "Sequence is empty" in str(err), str(err) def test_iterator_identities(): rparser = \ ReadParser(utils.get_test_data("test-abund-read-paired.fa")) assert rparser is rparser.__iter__() assert rparser is rparser.iter_reads() @pytest.mark.known_failing def test_read_pair_iterator_in_error_mode(): assert 0 rparser = \ ReadParser(utils.get_test_data("test-abund-read-paired.fa")) # If walks like an iterator and quacks like an iterator... rpi = rparser.iter_read_pairs() assert "__iter__" in dir(rpi) assert "next" in dir(rpi) # Are the alleged pairs actually pairs? read_pairs_1 = [] for read_1, read_2 in rpi: read_pairs_1.append([read_1, read_2]) assert read_1.name[: 19] == read_2.name[: 19] # Reload parser. # Note: No 'rewind' or 'reset' capability at the time of this writing. rparser = \ ReadParser(utils.get_test_data("test-abund-read-paired.fa")) # Ensure that error mode is the default mode. read_pairs_2 = [] for read_1, read_2 \ in rparser.iter_read_pairs(ReadParser.PAIR_MODE_ERROR_ON_UNPAIRED): read_pairs_2.append([read_1, read_2]) matches = [(rp1, rp2) for rp1, rp2 in zip(read_pairs_1, read_pairs_2) if rp1[0].name == rp2[0].name] assert all(matches) # Assert ALL the matches. :-] @pytest.mark.linux def test_read_pair_iterator_in_error_mode_xfail(): rparser = \ ReadParser(utils.get_test_data("test-abund-read-impaired.fa")) failed = True try: for _ in rparser.iter_read_pairs(): pass failed = False except ValueError as exc: assert "Invalid read pair" in str(exc), str(exc) assert failed def test_read_pair_iterator_in_error_mode_xfail_osxsafe(): rparser = \ ReadParser(utils.get_test_data("test-abund-read-impaired.fa")) failed = True try: for _ in rparser.iter_read_pairs(): pass failed = False except ValueError: pass assert failed @pytest.mark.known_failing def test_read_pair_iterator_in_ignore_mode(): assert 0 rparser = \ ReadParser(utils.get_test_data("test-abund-read-impaired.fa")) read_pairs = [] for read_1, read_2 \ in rparser.iter_read_pairs(ReadParser.PAIR_MODE_IGNORE_UNPAIRED): read_pairs.append([read_1, read_2]) assert read_1.name[: 19] == read_2.name[: 19] assert 2 == len(read_pairs) def test_constructor(): # Note: Using a data file with only one read. try: ReadParser(utils.get_test_data("single-read.fq"), "a") assert 0, ("ReadParser's constructor shouldn't accept a character for " "the number of threads") except TypeError as err: print(str(err)) try: ReadParser("non-existent-file-name") assert 0, "ReadParser shouldn't accept a non-existant file name" except ValueError as err: print(str(err)) except OSError as err: print(str(err)) def test_iternext(): try: rparser = ReadParser(utils.get_test_data("fakelump.fa.stoptags.txt")) read_pairs = [] for read_1, read_2 in rparser.iter_read_pairs(): read_pairs.append(read_1, read_2) assert 0, "Shouldn't be able to iterate over non FASTA file" except OSError as err: print(str(err)) except ValueError as err: print(str(err)) def test_clean_seq(): for read in ReadParser(utils.get_test_data("test-abund-read-3.fa")): clean = read.sequence.upper().replace("N", "A") assert clean == read.cleaned_seq def test_error_badly_formatted_file(): fname = utils.get_temp_filename('badly-formatted.fa') with open(fname, 'w') as f: f.write("not-sequence") with pytest.raises(OSError) as e: ReadParser(fname) assert e.match("contains badly formatted sequence") def test_error_file_does_not_exist(): fname = utils.get_temp_filename('does-not-exist.fa') with pytest.raises(OSError) as e: ReadParser(fname) assert e.match("does not exist") # vim: set filetype=python tabstop=4 softtabstop=4 shiftwidth=4 expandtab: # vim: set textwidth=79:
{ "content_hash": "205e287af64a0ef0be54268a380768a4", "timestamp": "", "source": "github", "line_count": 468, "max_line_length": 79, "avg_line_length": 27.173076923076923, "alnum_prop": 0.6243610914523866, "repo_name": "ged-lab/khmer", "id": "0601a888f2ed71be173ecedddf5b6fb41021d825", "size": "14589", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "tests/test_read_parsers.py", "mode": "33261", "license": "bsd-3-clause", "language": [ { "name": "C++", "bytes": "507274" }, { "name": "GLSL", "bytes": "493" }, { "name": "Groff", "bytes": "9581" }, { "name": "Makefile", "bytes": "20859" }, { "name": "Python", "bytes": "961316" }, { "name": "Shell", "bytes": "4737" } ], "symlink_target": "" }
''' Created on 2016/1/29 :author: hubo ''' from vlcp.server import main from vlcp.config.config import manager if __name__ == '__main__': manager['module.httpserver.url'] = '' manager['module.httpserver.vhost.api.url'] = 'ltcp://localhost:8081/' main(None, ('vlcp.service.manage.webapi.WebAPI', 'vlcp.service.manage.modulemanager.Manager', 'vlcp.service.utils.knowledge.Knowledge'))
{ "content_hash": "c2c03f26a0c1bcc97ee9ec44782fc18c", "timestamp": "", "source": "github", "line_count": 14, "max_line_length": 97, "avg_line_length": 29.571428571428573, "alnum_prop": 0.6714975845410628, "repo_name": "hubo1016/vlcp", "id": "5e9d7f70362ff82e0a5b39a0c0ec0d2b9659e662", "size": "414", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "misc/testwebapi.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "3376" }, { "name": "HTML", "bytes": "731" }, { "name": "JavaScript", "bytes": "2583" }, { "name": "Python", "bytes": "2851834" }, { "name": "Shell", "bytes": "1510" } ], "symlink_target": "" }
import utility_function as uf import tensorflow as tf from bvlc_alexnet_fc7 import AlexNet import nt import numpy as np import os import time import cv2 import image_io # the dimension of the final layer = feature dim NN_DIM = 500 FEATURE_DIM = 43264 TRAIN_TXT = 'file_list_train_v2_cn5.txt' TEST_TXT = 'file_list_test.txt' TRAIN = True SHUFFLE_DATA = True BATCH_SIZE = 50 FEATURE_ROW = 227 FEATURE_COL = 227 RADIUS = 1.0 FLAGS = tf.app.flags.FLAGS tf.app.flags.DEFINE_string('train_log_dir','logs_v2', '''directory wherer to write event logs''') tf.app.flags.DEFINE_integer('max_training_iter', 100000, '''the max number of training iteration''') tf.app.flags.DEFINE_float('init_learning_rate',0.001, '''initial learning rate''') tf.app.flags.DEFINE_string('model_dir', 'model_logs_v2','''directory where to save the model''') def define_graph_config(): config_proto = tf.ConfigProto() config_proto.gpu_options.per_process_gpu_memory_fraction = 0.9 return config_proto def filequeue_to_batch_data(filename_queue, line_reader, batch_size = BATCH_SIZE): key, next_line = line_reader.read(filename_queue) query_image_name, retrieve_image_name, label = tf.decode_csv( next_line, [tf.constant([], dtype=tf.string), tf.constant([], dtype=tf.string), tf.constant([], dtype = tf.int32)], field_delim=" ") query_tensor = uf.read_binary(query_image_name, FEATURE_DIM) retrieve_tensor = uf.read_binary(retrieve_image_name, FEATURE_DIM) if SHUFFLE_DATA: min_after_dequeue = 100 capacity = min_after_dequeue + 3 * batch_size batch_query_image, batch_retrieve_image, batch_label = tf.train.shuffle_batch( [query_tensor, retrieve_tensor, label], batch_size=batch_size, capacity=capacity, min_after_dequeue=min_after_dequeue) else: batch_query_image, batch_retrieve_image, batch_label = tf.train.batch( [query_tensor, retrieve_tensor, label], batch_size=batch_size) batch_image = tf.concat(0,[batch_query_image, batch_retrieve_image]) return batch_image, batch_label def train(): train_filenamequeue=tf.train.string_input_producer([TRAIN_TXT], shuffle=SHUFFLE_DATA) line_reader = tf.TextLineReader() train_batch_image, train_batch_label = filequeue_to_batch_data(train_filenamequeue, line_reader) global_step = tf.Variable(0, name = 'global_step', trainable = False) image_data_ph = tf.placeholder(tf.float32, shape = (2 * BATCH_SIZE, FEATURE_DIM)) label_ph = tf.placeholder(tf.int32, shape = (BATCH_SIZE)) infer = nt.inference3(image_data_ph, NN_DIM) tloss = nt.triplet_loss(infer, label_ph, BATCH_SIZE, RADIUS) tf.scalar_summary('loss', tloss) for var in tf.trainable_variables(): tf.histogram_summary(var.op.name, var) merged_sum =tf.merge_all_summaries() lr = FLAGS.init_learning_rate train_op = nt.training(tloss, lr, global_step) saver = tf.train.Saver() config_proto = define_graph_config() sess = tf.Session(config = config_proto) if TRAIN: writer_sum = tf.train.SummaryWriter(FLAGS.train_log_dir,graph_def = sess.graph_def) init_op = tf.initialize_all_variables() sess.run(init_op) coord = tf.train.Coordinator() threads = tf.train.start_queue_runners(coord = coord, sess = sess) if TRAIN: for i in xrange(FLAGS.max_training_iter): batch_image_v, batch_label_v = sess.run([ train_batch_image, train_batch_label]) feed_data = {image_data_ph: batch_image_v, label_ph: batch_label_v} loss_v,_,merged_sum_v = sess.run([tloss, train_op, merged_sum], feed_dict = feed_data) if i % 100 == 0: print("i:%d, loss:%f"%(i,loss_v)) if i != 0: writer_sum.add_summary(merged_sum_v, i) if i != 0 and i % 500 == 0: curr_time = time.strftime("%Y%m%d_%H%M") model_name = FLAGS.model_dir + '/' + curr_time + '_iter_' + str(i) + '_model.ckpt' saver.save(sess,FLAGS.model_dir + '/' + curr_time + 'model.ckpt') def main(argv = None): if not os.path.exists(FLAGS.model_dir): os.makedirs(FLAGS.model_dir) if not os.path.exists(FLAGS.train_log_dir): os.makedirs(FLAGS.train_log_dir) train() if __name__ == '__main__': tf.app.run()
{ "content_hash": "4a3cb3013798dad7525e892c6403ae2b", "timestamp": "", "source": "github", "line_count": 121, "max_line_length": 100, "avg_line_length": 36.47933884297521, "alnum_prop": 0.6438604440416855, "repo_name": "polltooh/FineGrainedAction", "id": "27bb55a039a82839e69cf72658a77f68d4b86a55", "size": "4414", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "nn/train_nn_v2_cn5.py", "mode": "33188", "license": "mit", "language": [ { "name": "C++", "bytes": "24672" }, { "name": "CMake", "bytes": "1119" }, { "name": "Python", "bytes": "188905" }, { "name": "Shell", "bytes": "54" } ], "symlink_target": "" }
from modelling.sentiment_model import check_model from modelling.sentiment_model import load_data from modelling.sentiment_model import load_model from modelling.sentiment_model import predict from modelling.sentiment_model import save_model from modelling.sentiment_model import train_model
{ "content_hash": "832ae6015c480eaea9dd842308cc2e28", "timestamp": "", "source": "github", "line_count": 6, "max_line_length": 49, "avg_line_length": 48.666666666666664, "alnum_prop": 0.8595890410958904, "repo_name": "JohnCrickett/Sentiment", "id": "26f95e790b313f710f94703e90e963cda4f4e773", "size": "292", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "modelling/__init__.py", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "117" }, { "name": "Jupyter Notebook", "bytes": "10139" }, { "name": "Python", "bytes": "9033" } ], "symlink_target": "" }
""" Statistical Models =========== This module combines traversal strategies for visiting and storing the spatial tree-structures. The spystats range from simple linear to Gaussian Processes. This file is intented to be big! Some functions an methods for implementing Gaussian simulation of autocorrelated processes. Author : Juan Escamilla With Great help of: Erick Chacón Montalván Date: 02/08/2017 """ import numpy as np import scipy as sp from functools import partial def corr_exp_ij(distance,phi=1.0): """ This function calculates the correlation function of an exponential model with parameter phi. Returns : correlation value for distance 'distance' notes """ return np.exp(-(distance / phi)) def exponentialModel(phi=1.0): """ A functional form of the exponentialModel """ def corr_exp_ij(distance): """ This function calculates the correlation function of an exponential model with parameter phi. Returns : correlation value for distance 'distance' """ return np.exp(-(distance / phi)) return corr_exp_ij ## This function returns a Distance Matrix given a list of pairs of the form (a,b). It will calculate de distance between (a and b) calculateDistanceMatrix = lambda list_of_vectors : np.array(map(lambda (a,b) : np.linalg.norm(a-b),list_of_vectors)) ## note it doesn't have shape of a matrix but doesn't matter. ## Calculate correlations given a valid model makeCorrelations = lambda model : lambda list_of_points : np.array(map(model,calculateDistanceMatrix(makeDuples(list_of_points)))) def calculateCovarianceMatrix(points,model,sigma=1.0): """ Returns the covariance matrix calculated from a $Z(x) \sim N(0,|Sigma)$ stationary anisotropic model. """ ## Calculate covariances makeCovarianceMatrix = lambda sigma : lambda model: lambda list_of_points : (makeCorrelations(model)(list_of_points) * sigma)#.reshape(100,100) return makeCovarianceMatrix(sigma)(model)(points) def exponentialModelFunctional(points,phi,sigma,betas,X): """ Test Use the exponential correlation with parameter phi and sigma """ n = len(points) f = exponentialModel(phi) CM = calculateCovarianceMatrix(points, f, sigma=sigma).reshape(n,n) weights = betas * X.transpose() w = np.array(weights)[0] model = sp.stats.multivariate_normal(w.flatten(),CM) return model def likelihoodFunction(phi,sigma,betas,X,Y_vector,points_location,family='bry'): """ Returns a MVN with parameters "parameters" """ mvn = exponentialModelFunctional(points_location, phi, sigma,betas,X) if family =='binary': p = mvn.pdf(Y_vector) return np.log(p) - np.log( 1 -p) else: return mvn.pdf(Y_vector) def MlikelihoodFunction(phi_sigma_tuple,X,Y_vector,points_location): """ Returns the minus pdf """ phi,sigma,b1,b2,b3 = phi_sigma_tuple betas = np.array([b1,b2,b3]) p = -1 * likelihoodFunction(phi,sigma,betas,X,Y_vector,points_location) return p ## Generate the pre-image . For this example will be: time \in R time = np.linspace(0, 100, 100) lat = np.linspace(0,100,10) lon = np.linspace(0,100,10) ### Calculate crosws product of time for distance matrix makeDuples = lambda list_of_points : [(i,j) for i in list_of_points for j in list_of_points] points = map(lambda l : np.array(l),makeDuples(lat)) #### Let's generate several correlation functions for different phis corr_exp_list = map(lambda phi : partial(corr_exp_ij,phi=phi),np.linspace(1,100,50)) corr_exp_list = map(lambda phi : partial(corr_exp_ij,phi=phi),[0.001,20,80]) corr_exp_list = map(lambda phi : partial(corr_exp_ij,phi=phi),[20]) ## Different spystats for phi covarianceMatricesModels = lambda list_of_points : map(lambda model : makeCovarianceMatrix(1.0)(model)(list_of_points),corr_exp_list) #covarianceMatricesModels = map(lambda model : makeCovarianceMatrix(1.0)(list_of_points)(list_of_points),corr_exp_list) ## Simulation process zeros = lambda list_of_points : np.zeros(np.sqrt(len(list_of_points))**2) simulateWithThisPoints = lambda list_of_points : map(lambda Sigma : sp.random.multivariate_normal(zeros(list_of_points),Sigma.reshape(len(list_of_points),len(list_of_points))),covarianceMatricesModels(list_of_points)) ## Statistical Models # from spystats.statistical import * # t_30_10 = exponentialModelFunctional(points,phi=30,sigma=10) # phis =np.linspace(20,40,20) # sigmas =np.linspace(5,15,10) # phis_sigma = [(phi , sigma) for phi in phis for sigma in sigmas] # ys = t_30_10.rvs() # superfunciones = map(lambda (phi,sigma) : likelihoodFunction(phi,sigma,ys,points),phis_sigma) # A = np.array(superfunciones).reshape(10,20) # import matplotlib.pyplot as plt # plt.imshow(A) # plt.show() # A = np.array(superfunciones).reshape(20,10) # plt.imshow(A) # plt.show() # superfunciones # chula = zip(superfunciones,phis_sigma) # chula # chula.sort(key=lambda renglon : renglon[0])
{ "content_hash": "0c60dd43c006e1699b91d689f4e4a8ba", "timestamp": "", "source": "github", "line_count": 190, "max_line_length": 217, "avg_line_length": 27.03157894736842, "alnum_prop": 0.6929517133956387, "repo_name": "molgor/spystats", "id": "dc24d190f49c55268a2a6958a03baa9d0652c479", "size": "5183", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "spystats/models.py", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "DIGITAL Command Language", "bytes": "1026" }, { "name": "Dockerfile", "bytes": "905" }, { "name": "Jupyter Notebook", "bytes": "62905486" }, { "name": "Python", "bytes": "135774" }, { "name": "Shell", "bytes": "705" } ], "symlink_target": "" }
''' Extract _("...") strings for translation and convert to Qt4 stringdefs so that they can be picked up by Qt linguist. ''' from subprocess import Popen, PIPE import glob import operator OUT_CPP="src/qt/tinegestrings.cpp" EMPTY=['""'] def parse_po(text): """ Parse 'po' format produced by xgettext. Return a list of (msgid,msgstr) tuples. """ messages = [] msgid = [] msgstr = [] in_msgid = False in_msgstr = False for line in text.split('\n'): line = line.rstrip('\r') if line.startswith('msgid '): if in_msgstr: messages.append((msgid, msgstr)) in_msgstr = False # message start in_msgid = True msgid = [line[6:]] elif line.startswith('msgstr '): in_msgid = False in_msgstr = True msgstr = [line[7:]] elif line.startswith('"'): if in_msgid: msgid.append(line) if in_msgstr: msgstr.append(line) if in_msgstr: messages.append((msgid, msgstr)) return messages files = glob.glob('src/*.cpp') + glob.glob('src/*.h') # xgettext -n --keyword=_ $FILES child = Popen(['xgettext','--output=-','-n','--keyword=_'] + files, stdout=PIPE) (out, err) = child.communicate() messages = parse_po(out) f = open(OUT_CPP, 'w') f.write("""#include <QtGlobal> // Automatically generated by extract_strings.py #ifdef __GNUC__ #define UNUSED __attribute__((unused)) #else #define UNUSED #endif """) f.write('static const char UNUSED *tinege_strings[] = {\n') messages.sort(key=operator.itemgetter(0)) for (msgid, msgstr) in messages: if msgid != EMPTY: f.write('QT_TRANSLATE_NOOP("tinege-core", %s),\n' % ('\n'.join(msgid))) f.write('};') f.close()
{ "content_hash": "68df548905163e47aeae6ab5e0c45995", "timestamp": "", "source": "github", "line_count": 71, "max_line_length": 80, "avg_line_length": 25.676056338028168, "alnum_prop": 0.5699396599012616, "repo_name": "Tinege/Tinege", "id": "239668c2414d0f58961d5478826ea1626f0ccf68", "size": "1841", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "share/qt/extract_strings_qt.py", "mode": "33261", "license": "mit", "language": [ { "name": "C", "bytes": "92056" }, { "name": "C++", "bytes": "2490568" }, { "name": "CSS", "bytes": "1127" }, { "name": "IDL", "bytes": "13866" }, { "name": "Objective-C++", "bytes": "5711" }, { "name": "Python", "bytes": "38218" }, { "name": "Shell", "bytes": "20015" }, { "name": "TypeScript", "bytes": "5228978" } ], "symlink_target": "" }
import os import re import shutil import logging from migrate import exceptions from migrate.versioning import pathed, script log = logging.getLogger(__name__) class VerNum(object): """A version number that behaves like a string and int at the same time""" _instances = dict() def __new__(cls, value): val = str(value) if val not in cls._instances: cls._instances[val] = super(VerNum, cls).__new__(cls) ret = cls._instances[val] return ret def __init__(self,value): self.value = str(int(value)) if self < 0: raise ValueError("Version number cannot be negative") def __add__(self, value): ret = int(self) + int(value) return VerNum(ret) def __sub__(self, value): return self + (int(value) * -1) def __cmp__(self, value): return int(self) - int(value) def __repr__(self): return "<VerNum(%s)>" % self.value def __str__(self): return str(self.value) def __int__(self): return int(self.value) class Collection(pathed.Pathed): """A collection of versioning scripts in a repository""" FILENAME_WITH_VERSION = re.compile(r'^(\d{3,}).*') def __init__(self, path): """Collect current version scripts in repository and store them in self.versions """ super(Collection, self).__init__(path) # Create temporary list of files, allowing skipped version numbers. files = os.listdir(path) if '1' in files: # deprecation raise Exception('It looks like you have a repository in the old ' 'format (with directories for each version). ' 'Please convert repository before proceeding.') tempVersions = dict() for filename in files: match = self.FILENAME_WITH_VERSION.match(filename) if match: num = int(match.group(1)) tempVersions.setdefault(num, []).append(filename) else: pass # Must be a helper file or something, let's ignore it. # Create the versions member where the keys # are VerNum's and the values are Version's. self.versions = dict() for num, files in tempVersions.items(): self.versions[VerNum(num)] = Version(num, path, files) @property def latest(self): """:returns: Latest version in Collection""" return max([VerNum(0)] + self.versions.keys()) def create_new_python_version(self, description, **k): """Create Python files for new version""" ver = self.latest + 1 extra = str_to_filename(description) if extra: if extra == '_': extra = '' elif not extra.startswith('_'): extra = '_%s' % extra filename = '%03d%s.py' % (ver, extra) filepath = self._version_path(filename) script.PythonScript.create(filepath, **k) self.versions[ver] = Version(ver, self.path, [filename]) def create_new_sql_version(self, database, **k): """Create SQL files for new version""" ver = self.latest + 1 self.versions[ver] = Version(ver, self.path, []) # Create new files. for op in ('upgrade', 'downgrade'): filename = '%03d_%s_%s.sql' % (ver, database, op) filepath = self._version_path(filename) script.SqlScript.create(filepath, **k) self.versions[ver].add_script(filepath) def version(self, vernum=None): """Returns latest Version if vernum is not given. Otherwise, returns wanted version""" if vernum is None: vernum = self.latest return self.versions[VerNum(vernum)] @classmethod def clear(cls): super(Collection, cls).clear() def _version_path(self, ver): """Returns path of file in versions repository""" return os.path.join(self.path, str(ver)) class Version(object): """A single version in a collection :param vernum: Version Number :param path: Path to script files :param filelist: List of scripts :type vernum: int, VerNum :type path: string :type filelist: list """ def __init__(self, vernum, path, filelist): self.version = VerNum(vernum) # Collect scripts in this folder self.sql = dict() self.python = None for script in filelist: self.add_script(os.path.join(path, script)) def script(self, database=None, operation=None): """Returns SQL or Python Script""" for db in (database, 'default'): # Try to return a .sql script first try: return self.sql[db][operation] except KeyError: continue # No .sql script exists # TODO: maybe add force Python parameter? ret = self.python assert ret is not None, \ "There is no script for %d version" % self.version return ret def add_script(self, path): """Add script to Collection/Version""" if path.endswith(Extensions.py): self._add_script_py(path) elif path.endswith(Extensions.sql): self._add_script_sql(path) SQL_FILENAME = re.compile(r'^(\d+)_([^_]+)_([^_]+).sql') def _add_script_sql(self, path): basename = os.path.basename(path) match = self.SQL_FILENAME.match(basename) if match: version, dbms, op = match.group(1), match.group(2), match.group(3) else: raise exceptions.ScriptError( "Invalid SQL script name %s " % basename + \ "(needs to be ###_database_operation.sql)") # File the script into a dictionary self.sql.setdefault(dbms, {})[op] = script.SqlScript(path) def _add_script_py(self, path): if self.python is not None: raise exceptions.ScriptError('You can only have one Python script ' 'per version, but you have: %s and %s' % (self.python, path)) self.python = script.PythonScript(path) class Extensions: """A namespace for file extensions""" py = 'py' sql = 'sql' def str_to_filename(s): """Replaces spaces, (double and single) quotes and double underscores to underscores """ s = s.replace(' ', '_').replace('"', '_').replace("'", '_').replace(".", "_") while '__' in s: s = s.replace('__', '_') return s
{ "content_hash": "1f8678b1b6997f686ce01d8151e191ed", "timestamp": "", "source": "github", "line_count": 212, "max_line_length": 81, "avg_line_length": 30.9811320754717, "alnum_prop": 0.570341047503045, "repo_name": "eunchong/build", "id": "a09b8cd9dc5597816f57696109a2477298248d29", "size": "6615", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "third_party/sqlalchemy_migrate_0_7_1/migrate/versioning/version.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Batchfile", "bytes": "3128" }, { "name": "CSS", "bytes": "211818" }, { "name": "HTML", "bytes": "429981" }, { "name": "JavaScript", "bytes": "75624" }, { "name": "Makefile", "bytes": "21204" }, { "name": "Python", "bytes": "6143109" }, { "name": "Shell", "bytes": "23512" } ], "symlink_target": "" }
from base64 import b64encode from libcloud.common.base import Connection, JsonResponse from libcloud.container.base import ContainerImage __all__ = ["RegistryClient", "HubClient"] class DockerHubConnection(Connection): responseCls = JsonResponse def __init__( self, host, username=None, password=None, secure=True, port=None, url=None, timeout=None, proxy_url=None, backoff=None, retry_delay=None, ): super().__init__( secure=secure, host=host, port=port, url=url, timeout=timeout, proxy_url=proxy_url, backoff=backoff, retry_delay=retry_delay, ) self.username = username self.password = password def add_default_headers(self, headers): headers["Content-Type"] = "application/json" if self.username is not None: authstr = "Basic " + str( b64encode(("{}:{}".format(self.username, self.password)).encode("latin1")).strip() ) headers["Authorization"] = authstr return headers class RegistryClient: """ A client for the Docker v2 registry API """ connectionCls = DockerHubConnection def __init__(self, host, username=None, password=None, **kwargs): """ Construct a Docker registry client :param host: Your registry endpoint, e.g. 'registry.hub.docker.com' :type host: ``str`` :param username: (optional) Your registry account username :type username: ``str`` :param password: (optional) Your registry account password :type password: ``str`` """ self.connection = self.connectionCls(host, username, password, **kwargs) def list_images(self, repository_name, namespace="library", max_count=100): """ List the tags (versions) in a repository :param repository_name: The name of the repository e.g. 'ubuntu' :type repository_name: ``str`` :param namespace: (optional) The docker namespace :type namespace: ``str`` :param max_count: The maximum number of records to return :type max_count: ``int`` :return: A list of images :rtype: ``list`` of :class:`libcloud.container.base.ContainerImage` """ path = "/v2/repositories/{}/{}/tags/?page=1&page_size={}".format( namespace, repository_name, max_count, ) response = self.connection.request(path) images = [] for image in response.object["results"]: images.append(self._to_image(repository_name, image)) return images def get_repository(self, repository_name, namespace="library"): """ Get the information about a specific repository :param repository_name: The name of the repository e.g. 'ubuntu' :type repository_name: ``str`` :param namespace: (optional) The docker namespace :type namespace: ``str`` :return: The details of the repository :rtype: ``object`` """ path = "/v2/repositories/{}/{}/".format(namespace, repository_name) response = self.connection.request(path) return response.object def get_image(self, repository_name, tag="latest", namespace="library"): """ Get an image from a repository with a specific tag :param repository_name: The name of the repository, e.g. ubuntu :type repository_name: ``str`` :param tag: (optional) The image tag (defaults to latest) :type tag: ``str`` :param namespace: (optional) The docker namespace :type namespace: ``str`` :return: A container image :rtype: :class:`libcloud.container.base.ContainerImage` """ path = "/v2/repositories/{}/{}/tags/{}/".format(namespace, repository_name, tag) response = self.connection.request(path) return self._to_image(repository_name, response.object) def _to_image(self, repository_name, obj): path = "{}/{}:{}".format(self.connection.host, repository_name, obj["name"]) return ContainerImage( id=obj["id"], path=path, name=path, version=obj["name"], extra={"full_size": obj["full_size"]}, driver=None, ) class HubClient(RegistryClient): """ A client for the Docker Hub API The hub is based on the v2 registry API """ host = "registry.hub.docker.com" def __init__(self, username=None, password=None, **kwargs): """ Construct a Docker hub client :param username: (optional) Your Hub account username :type username: ``str`` :param password: (optional) Your hub account password :type password: ``str`` """ super().__init__(self.host, username, password, **kwargs)
{ "content_hash": "d00ee1a2be31f7b160c8204a375924b5", "timestamp": "", "source": "github", "line_count": 165, "max_line_length": 98, "avg_line_length": 30.618181818181817, "alnum_prop": 0.581353919239905, "repo_name": "apache/libcloud", "id": "97c65024d7fd1d76ff44531a2e34ca187e07d769", "size": "5835", "binary": false, "copies": "2", "ref": "refs/heads/trunk", "path": "libcloud/container/utils/docker.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Dockerfile", "bytes": "2155" }, { "name": "HTML", "bytes": "2545" }, { "name": "PowerShell", "bytes": "410" }, { "name": "Python", "bytes": "9105547" }, { "name": "Shell", "bytes": "12994" } ], "symlink_target": "" }
""" helloglwidgetplugin.py A simple OpenGL custom widget plugin for Qt Designer. Copyright (C) 2006 David Boddie <[email protected]> Copyright (C) 2005-2006 Trolltech ASA. All rights reserved. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA """ from PyQt5.QtGui import QIcon from PyQt5.QtDesigner import QPyDesignerCustomWidgetPlugin from my_custom_widget import MyCustomWidget class MyCustomWidgetPlugin(QPyDesignerCustomWidgetPlugin): """MyCustomWidgetPlugin(QPyDesignerCustomWidgetPlugin) Provides a Python custom plugin for Qt Designer by implementing the QDesignerCustomWidgetPlugin via a PyQt-specific custom plugin class. """ # The __init__() method is only used to set up the plugin and define its # initialized variable. def __init__(self, parent=None): super(MyCustomWidgetPlugin, self).__init__(parent) self.initialized = False # The initialize() and isInitialized() methods allow the plugin to set up # any required resources, ensuring that this can only happen once for each # plugin. def initialize(self, core): if self.initialized: return self.initialized = True def isInitialized(self): return self.initialized # This factory method creates new instances of our custom widget with the # appropriate parent. def createWidget(self, parent): return MyCustomWidget(parent) # This method returns the name of the custom widget class that is provided # by this plugin. def name(self): return "MyCustomWidget" # Returns the name of the group in Qt Designer's widget box that this # widget belongs to. def group(self): return "Display Widgets" # Returns the icon used to represent the custom widget in Qt Designer's # widget box. def icon(self): return QIcon() # Returns a short description of the custom widget for use in a tool tip. def toolTip(self): return "" # Returns a short description of the custom widget for use in a "What's # This?" help message for the widget. def whatsThis(self): return "" # Returns True if the custom widget acts as a container for other widgets; # otherwise returns False. Note that plugins for custom containers also # need to provide an implementation of the QDesignerContainerExtension # interface if they need to add custom editing support to Qt Designer. def isContainer(self): return False # Returns an XML description of a custom widget instance that describes # default values for its properties. Each custom widget created by this # plugin will be configured using this description. def domXml(self): return '<widget class="MyCustomWidget" name="mycustomwidget" />\n' # Returns the module containing the custom widget class. It may include # a module path. def includeFile(self): return "my_custom_widget"
{ "content_hash": "0bb752636ba0e6d5b2fe2054e61f6e7c", "timestamp": "", "source": "github", "line_count": 97, "max_line_length": 78, "avg_line_length": 37.123711340206185, "alnum_prop": 0.7220216606498195, "repo_name": "SyllogismRXS/misc", "id": "c8d50d8a6c2ab7f911fbba6eadbd50a9e4af2e74", "size": "3624", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "gui/widget-test/graveyard/mycustomwidgetplugin.py", "mode": "33188", "license": "mit", "language": [ { "name": "C++", "bytes": "83291" }, { "name": "CMake", "bytes": "19330" }, { "name": "Dockerfile", "bytes": "591" }, { "name": "Makefile", "bytes": "552" }, { "name": "OpenEdge ABL", "bytes": "28814605" }, { "name": "Python", "bytes": "27776" }, { "name": "Shell", "bytes": "71" } ], "symlink_target": "" }
from mistral.db.v2 import api as db_api from mistral.services import workbooks as wb_service from mistral.services import workflows as wf_service from mistral.tests.unit.engine import base from mistral.workflow import states class WorkflowCancelTest(base.EngineTestCase): def test_cancel_workflow(self): workflow = """ version: '2.0' wf: type: direct tasks: task1: action: std.echo output="Echo" """ wf_service.create_workflows(workflow) wf_ex = self.engine.start_workflow('wf') self.engine.stop_workflow( wf_ex.id, states.CANCELLED, "Cancelled by user." ) self.await_workflow_cancelled(wf_ex.id) with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) task_execs = wf_ex.task_executions task_1_ex = self._assert_single_item(task_execs, name='task1') self.await_task_success(task_1_ex.id) with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) task_execs = wf_ex.task_executions task_1_ex = self._assert_single_item(task_execs, name='task1') self.assertEqual(states.CANCELLED, wf_ex.state) self.assertEqual("Cancelled by user.", wf_ex.state_info) self.assertEqual(1, len(task_execs)) self.assertEqual(states.SUCCESS, task_1_ex.state) def test_cancel_workflow_if_definition_deleted(self): workflow = """ version: '2.0' wf: type: direct tasks: task1: action: std.echo output="foo" wait-before: 5 """ wf = wf_service.create_workflows(workflow)[0] wf_ex = self.engine.start_workflow('wf') with db_api.transaction(): db_api.delete_workflow_definition(wf.id) self.engine.stop_workflow( wf_ex.id, states.CANCELLED, "Cancelled by user." ) self.await_workflow_cancelled(wf_ex.id) def test_cancel_paused_workflow(self): workflow = """ version: '2.0' wf: type: direct tasks: task1: action: std.echo output="Echo" """ wf_service.create_workflows(workflow) wf_ex = self.engine.start_workflow('wf') self.engine.pause_workflow(wf_ex.id) self.await_workflow_paused(wf_ex.id) self.engine.stop_workflow( wf_ex.id, states.CANCELLED, "Cancelled by user." ) self.await_workflow_cancelled(wf_ex.id) with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) task_execs = wf_ex.task_executions task_1_ex = self._assert_single_item(task_execs, name='task1') self.await_task_success(task_1_ex.id) with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) task_execs = wf_ex.task_executions task_1_ex = self._assert_single_item( task_execs, name='task1' ) self.assertEqual(states.CANCELLED, wf_ex.state) self.assertEqual("Cancelled by user.", wf_ex.state_info) self.assertEqual(1, len(task_execs)) self.assertEqual(states.SUCCESS, task_1_ex.state) def test_cancel_completed_workflow(self): workflow = """ version: '2.0' wf: type: direct tasks: task1: action: std.echo output="Echo" """ wf_service.create_workflows(workflow) wf_ex = self.engine.start_workflow('wf') self.await_workflow_success(wf_ex.id) self.engine.stop_workflow( wf_ex.id, states.CANCELLED, "Cancelled by user." ) with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) task_execs = wf_ex.task_executions task_1_ex = self._assert_single_item(task_execs, name='task1') self.assertEqual(states.SUCCESS, wf_ex.state) self.assertIsNone(wf_ex.state_info) self.assertEqual(1, len(task_execs)) self.assertEqual(states.SUCCESS, task_1_ex.state) def test_cancel_parent_workflow(self): workbook = """ version: '2.0' name: wb workflows: wf: type: direct tasks: taskx: workflow: subwf subwf: type: direct tasks: task1: action: std.echo output="Echo" """ wb_service.create_workbook_v2(workbook) wf_ex = self.engine.start_workflow('wb.wf') self.engine.stop_workflow( wf_ex.id, states.CANCELLED, "Cancelled by user." ) self.await_workflow_cancelled(wf_ex.id) with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) task_execs = wf_ex.task_executions task_ex = self._assert_single_item(task_execs, name='taskx') self.await_task_cancelled(task_ex.id) with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) task_execs = wf_ex.task_executions task_ex = self._assert_single_item(task_execs, name='taskx') subwf_execs = db_api.get_workflow_executions( task_execution_id=task_ex.id ) self.assertEqual(states.CANCELLED, wf_ex.state) self.assertEqual("Cancelled by user.", wf_ex.state_info) self.assertEqual(states.CANCELLED, task_ex.state) self.assertEqual("Cancelled by user.", task_ex.state_info) self.assertEqual(1, len(subwf_execs)) self.assertEqual(states.CANCELLED, subwf_execs[0].state) self.assertEqual("Cancelled by user.", subwf_execs[0].state_info) def test_cancel_child_workflow(self): workbook = """ version: '2.0' name: wb workflows: wf: type: direct tasks: taskx: workflow: subwf subwf: type: direct tasks: task1: action: std.echo output="Echo" on-complete: - task2 task2: action: std.echo output="foo" wait-before: 3 """ wb_service.create_workbook_v2(workbook) self.engine.start_workflow('wb.wf') with db_api.transaction(): wf_execs = db_api.get_workflow_executions() wf_ex = self._assert_single_item(wf_execs, name='wb.wf') task_ex = self._assert_single_item( wf_ex.task_executions, name='taskx' ) subwf_ex = self._assert_single_item(wf_execs, name='wb.subwf') self.engine.stop_workflow( subwf_ex.id, states.CANCELLED, "Cancelled by user." ) self.await_workflow_cancelled(subwf_ex.id) self.await_task_cancelled(task_ex.id) self.await_workflow_cancelled(wf_ex.id) with db_api.transaction(): wf_execs = db_api.get_workflow_executions() wf_ex = self._assert_single_item(wf_execs, name='wb.wf') task_ex = self._assert_single_item( wf_ex.task_executions, name='taskx' ) subwf_ex = self._assert_single_item(wf_execs, name='wb.subwf') self.assertEqual(states.CANCELLED, subwf_ex.state) self.assertEqual("Cancelled by user.", subwf_ex.state_info) self.assertEqual(states.CANCELLED, task_ex.state) self.assertIn("Cancelled by user.", task_ex.state_info) self.assertEqual(states.CANCELLED, wf_ex.state) self.assertEqual("Cancelled tasks: taskx", wf_ex.state_info) def test_cancel_with_items_parent_workflow(self): workbook = """ version: '2.0' name: wb workflows: wf: type: direct tasks: taskx: with-items: i in [1, 2] workflow: subwf subwf: type: direct tasks: task1: action: std.echo output="Echo" on-complete: - task2 task2: action: std.echo output="foo" wait-before: 1 """ wb_service.create_workbook_v2(workbook) wf_ex = self.engine.start_workflow('wb.wf') self.engine.stop_workflow( wf_ex.id, states.CANCELLED, "Cancelled by user." ) with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) task_execs = wf_ex.task_executions task_ex = self._assert_single_item(task_execs, name='taskx') self.await_workflow_cancelled(wf_ex.id) self.await_task_cancelled(task_ex.id) with db_api.transaction(): wf_execs = db_api.get_workflow_executions() wf_ex = self._assert_single_item(wf_execs, name='wb.wf') task_ex = self._assert_single_item( wf_ex.task_executions, name='taskx' ) subwf_exs = self._assert_multiple_items( wf_execs, 2, name='wb.subwf' ) self.assertEqual(states.CANCELLED, subwf_exs[0].state) self.assertEqual("Cancelled by user.", subwf_exs[0].state_info) self.assertEqual(states.CANCELLED, subwf_exs[1].state) self.assertEqual("Cancelled by user.", subwf_exs[1].state_info) self.assertEqual(states.CANCELLED, task_ex.state) self.assertIn("cancelled", task_ex.state_info) self.assertEqual(states.CANCELLED, wf_ex.state) self.assertEqual("Cancelled by user.", wf_ex.state_info) def test_cancel_with_items_child_workflow(self): workbook = """ version: '2.0' name: wb workflows: wf: type: direct tasks: taskx: with-items: i in [1, 2] workflow: subwf subwf: type: direct tasks: task1: action: std.echo output="Echo" on-complete: - task2 task2: action: std.echo output="foo" wait-before: 1 """ wb_service.create_workbook_v2(workbook) self.engine.start_workflow('wb.wf') with db_api.transaction(): wf_execs = db_api.get_workflow_executions() wf_ex = self._assert_single_item(wf_execs, name='wb.wf') task_ex = self._assert_single_item( wf_ex.task_executions, name='taskx' ) subwf_exs = self._assert_multiple_items( wf_execs, 2, name='wb.subwf' ) self.engine.stop_workflow( subwf_exs[0].id, states.CANCELLED, "Cancelled by user." ) self.await_workflow_cancelled(subwf_exs[0].id) self.await_workflow_success(subwf_exs[1].id) self.await_task_cancelled(task_ex.id) self.await_workflow_cancelled(wf_ex.id) with db_api.transaction(): wf_execs = db_api.get_workflow_executions() wf_ex = self._assert_single_item(wf_execs, name='wb.wf') task_ex = self._assert_single_item( wf_ex.task_executions, name='taskx' ) subwf_exs = self._assert_multiple_items( wf_execs, 2, name='wb.subwf' ) self.assertEqual(states.CANCELLED, subwf_exs[0].state) self.assertEqual("Cancelled by user.", subwf_exs[0].state_info) self.assertEqual(states.SUCCESS, subwf_exs[1].state) self.assertIsNone(subwf_exs[1].state_info) self.assertEqual(states.CANCELLED, task_ex.state) self.assertIn("cancelled", task_ex.state_info) self.assertEqual(states.CANCELLED, wf_ex.state) self.assertEqual("Cancelled tasks: taskx", wf_ex.state_info) def test_cancel_then_fail_with_items_child_workflow(self): workbook = """ version: '2.0' name: wb workflows: wf: type: direct tasks: taskx: with-items: i in [1, 2] workflow: subwf subwf: type: direct tasks: task1: action: std.echo output="Echo" on-complete: - task2 task2: action: std.echo output="foo" wait-before: 1 """ wb_service.create_workbook_v2(workbook) self.engine.start_workflow('wb.wf') with db_api.transaction(): wf_execs = db_api.get_workflow_executions() wf_ex = self._assert_single_item(wf_execs, name='wb.wf') task_ex = self._assert_single_item( wf_ex.task_executions, name='taskx' ) subwf_exs = self._assert_multiple_items( wf_execs, 2, name='wb.subwf' ) self.engine.stop_workflow( subwf_exs[0].id, states.CANCELLED, "Cancelled by user." ) self.engine.stop_workflow( subwf_exs[1].id, states.ERROR, "Failed by user." ) self.await_workflow_cancelled(subwf_exs[0].id) self.await_workflow_error(subwf_exs[1].id) self.await_task_cancelled(task_ex.id) self.await_workflow_cancelled(wf_ex.id) with db_api.transaction(): wf_execs = db_api.get_workflow_executions() wf_ex = self._assert_single_item(wf_execs, name='wb.wf') task_ex = self._assert_single_item( wf_ex.task_executions, name='taskx' ) subwf_exs = self._assert_multiple_items( wf_execs, 2, name='wb.subwf' ) self.assertEqual(states.CANCELLED, subwf_exs[0].state) self.assertEqual("Cancelled by user.", subwf_exs[0].state_info) self.assertEqual(states.ERROR, subwf_exs[1].state) self.assertEqual("Failed by user.", subwf_exs[1].state_info) self.assertEqual(states.CANCELLED, task_ex.state) self.assertIn("cancelled", task_ex.state_info) self.assertEqual(states.CANCELLED, wf_ex.state) self.assertEqual("Cancelled tasks: taskx", wf_ex.state_info) def test_fail_then_cancel_with_items_child_workflow(self): workbook = """ version: '2.0' name: wb workflows: wf: type: direct tasks: taskx: with-items: i in [1, 2] workflow: subwf subwf: type: direct tasks: task1: action: std.echo output="Echo" on-complete: - task2 task2: action: std.echo output="foo" wait-before: 1 """ wb_service.create_workbook_v2(workbook) self.engine.start_workflow('wb.wf') with db_api.transaction(): wf_execs = db_api.get_workflow_executions() wf_ex = self._assert_single_item(wf_execs, name='wb.wf') task_ex = self._assert_single_item( wf_ex.task_executions, name='taskx' ) subwf_exs = self._assert_multiple_items( wf_execs, 2, name='wb.subwf' ) self.engine.stop_workflow( subwf_exs[1].id, states.ERROR, "Failed by user." ) self.engine.stop_workflow( subwf_exs[0].id, states.CANCELLED, "Cancelled by user." ) self.await_workflow_cancelled(subwf_exs[0].id) self.await_workflow_error(subwf_exs[1].id) self.await_task_cancelled(task_ex.id) self.await_workflow_cancelled(wf_ex.id) with db_api.transaction(): wf_execs = db_api.get_workflow_executions() wf_ex = self._assert_single_item(wf_execs, name='wb.wf') task_ex = self._assert_single_item( wf_ex.task_executions, name='taskx' ) subwf_exs = self._assert_multiple_items( wf_execs, 2, name='wb.subwf' ) self.assertEqual(states.CANCELLED, subwf_exs[0].state) self.assertEqual("Cancelled by user.", subwf_exs[0].state_info) self.assertEqual(states.ERROR, subwf_exs[1].state) self.assertEqual("Failed by user.", subwf_exs[1].state_info) self.assertEqual(states.CANCELLED, task_ex.state) self.assertIn("cancelled", task_ex.state_info) self.assertEqual(states.CANCELLED, wf_ex.state) self.assertEqual("Cancelled tasks: taskx", wf_ex.state_info)
{ "content_hash": "c851f2c89baa283562185ff5e556272c", "timestamp": "", "source": "github", "line_count": 609, "max_line_length": 74, "avg_line_length": 29.10673234811166, "alnum_prop": 0.5223964797472639, "repo_name": "openstack/mistral", "id": "ffcc18161da73f91f869138f433ab8d0cd00f9d1", "size": "18389", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "mistral/tests/unit/engine/test_workflow_cancel.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Dockerfile", "bytes": "2091" }, { "name": "Mako", "bytes": "951" }, { "name": "Python", "bytes": "2617595" }, { "name": "Shell", "bytes": "26731" } ], "symlink_target": "" }
import datetime import json import os import random as _random import sys import traceback from getopt import getopt, GetoptError from multiprocessing import Process from os import environ from wsgiref.simple_server import make_server import requests as _requests from jsonrpcbase import JSONRPCService, InvalidParamsError, KeywordError, \ JSONRPCError, InvalidRequestError from jsonrpcbase import ServerError as JSONServerError from biokbase import log from MEGAHIT.authclient import KBaseAuth as _KBaseAuth try: from ConfigParser import ConfigParser except ImportError: from configparser import ConfigParser DEPLOY = 'KB_DEPLOYMENT_CONFIG' SERVICE = 'KB_SERVICE_NAME' AUTH = 'auth-service-url' # Note that the error fields do not match the 2.0 JSONRPC spec def get_config_file(): return environ.get(DEPLOY, None) def get_service_name(): return environ.get(SERVICE, None) def get_config(): if not get_config_file(): return None retconfig = {} config = ConfigParser() config.read(get_config_file()) for nameval in config.items(get_service_name() or 'MEGAHIT'): retconfig[nameval[0]] = nameval[1] return retconfig config = get_config() from MEGAHIT.MEGAHITImpl import MEGAHIT # noqa @IgnorePep8 impl_MEGAHIT = MEGAHIT(config) class JSONObjectEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, set): return list(obj) if isinstance(obj, frozenset): return list(obj) if hasattr(obj, 'toJSONable'): return obj.toJSONable() return json.JSONEncoder.default(self, obj) class JSONRPCServiceCustom(JSONRPCService): def call(self, ctx, jsondata): """ Calls jsonrpc service's method and returns its return value in a JSON string or None if there is none. Arguments: jsondata -- remote method call in jsonrpc format """ result = self.call_py(ctx, jsondata) if result is not None: return json.dumps(result, cls=JSONObjectEncoder) return None def _call_method(self, ctx, request): """Calls given method with given params and returns it value.""" method = self.method_data[request['method']]['method'] params = request['params'] result = None try: if isinstance(params, list): # Does it have enough arguments? if len(params) < self._man_args(method) - 1: raise InvalidParamsError('not enough arguments') # Does it have too many arguments? if(not self._vargs(method) and len(params) > self._max_args(method) - 1): raise InvalidParamsError('too many arguments') result = method(ctx, *params) elif isinstance(params, dict): # Do not accept keyword arguments if the jsonrpc version is # not >=1.1. if request['jsonrpc'] < 11: raise KeywordError result = method(ctx, **params) else: # No params result = method(ctx) except JSONRPCError: raise except Exception as e: # log.exception('method %s threw an exception' % request['method']) # Exception was raised inside the method. newerr = JSONServerError() newerr.trace = traceback.format_exc() if len(e.args) == 1: newerr.data = repr(e.args[0]) else: newerr.data = repr(e.args) raise newerr return result def call_py(self, ctx, jsondata): """ Calls jsonrpc service's method and returns its return value in python object format or None if there is none. This method is same as call() except the return value is a python object instead of JSON string. This method is mainly only useful for debugging purposes. """ rdata = jsondata # we already deserialize the json string earlier in the server code, no # need to do it again # try: # rdata = json.loads(jsondata) # except ValueError: # raise ParseError # set some default values for error handling request = self._get_default_vals() if isinstance(rdata, dict) and rdata: # It's a single request. self._fill_request(request, rdata) respond = self._handle_request(ctx, request) # Don't respond to notifications if respond is None: return None return respond elif isinstance(rdata, list) and rdata: # It's a batch. requests = [] responds = [] for rdata_ in rdata: # set some default values for error handling request_ = self._get_default_vals() self._fill_request(request_, rdata_) requests.append(request_) for request_ in requests: respond = self._handle_request(ctx, request_) # Don't respond to notifications if respond is not None: responds.append(respond) if responds: return responds # Nothing to respond. return None else: # empty dict, list or wrong type raise InvalidRequestError def _handle_request(self, ctx, request): """Handles given request and returns its response.""" if 'types' in self.method_data[request['method']]: self._validate_params_types(request['method'], request['params']) result = self._call_method(ctx, request) # Do not respond to notifications. if request['id'] is None: return None respond = {} self._fill_ver(request['jsonrpc'], respond) respond['result'] = result respond['id'] = request['id'] return respond class MethodContext(dict): def __init__(self, logger): self['client_ip'] = None self['user_id'] = None self['authenticated'] = None self['token'] = None self['module'] = None self['method'] = None self['call_id'] = None self['rpc_context'] = None self['provenance'] = None self._debug_levels = set([7, 8, 9, 'DEBUG', 'DEBUG2', 'DEBUG3']) self._logger = logger def log_err(self, message): self._log(log.ERR, message) def log_info(self, message): self._log(log.INFO, message) def log_debug(self, message, level=1): if level in self._debug_levels: pass else: level = int(level) if level < 1 or level > 3: raise ValueError("Illegal log level: " + str(level)) level = level + 6 self._log(level, message) def set_log_level(self, level): self._logger.set_log_level(level) def get_log_level(self): return self._logger.get_log_level() def clear_log_level(self): self._logger.clear_user_log_level() def _log(self, level, message): self._logger.log_message(level, message, self['client_ip'], self['user_id'], self['module'], self['method'], self['call_id']) def provenance(self): callbackURL = os.environ.get('SDK_CALLBACK_URL') if callbackURL: # OK, there's a callback server from which we can get provenance arg_hash = {'method': 'CallbackServer.get_provenance', 'params': [], 'version': '1.1', 'id': str(_random.random())[2:] } body = json.dumps(arg_hash) response = _requests.post(callbackURL, data=body, timeout=60) response.encoding = 'utf-8' if response.status_code == 500: if ('content-type' in response.headers and response.headers['content-type'] == 'application/json'): err = response.json() if 'error' in err: raise ServerError(**err['error']) else: raise ServerError('Unknown', 0, response.text) else: raise ServerError('Unknown', 0, response.text) if not response.ok: response.raise_for_status() resp = response.json() if 'result' not in resp: raise ServerError('Unknown', 0, 'An unknown server error occurred') return resp['result'][0] else: return self.get('provenance') class ServerError(Exception): ''' The call returned an error. Fields: name - the name of the error. code - the error code. message - a human readable error message. data - the server side stacktrace. ''' def __init__(self, name, code, message, data=None, error=None): super(Exception, self).__init__(message) self.name = name self.code = code self.message = message if message else '' self.data = data or error or '' # data = JSON RPC 2.0, error = 1.1 def __str__(self): return self.name + ': ' + str(self.code) + '. ' + self.message + \ '\n' + self.data def getIPAddress(environ): xFF = environ.get('HTTP_X_FORWARDED_FOR') realIP = environ.get('HTTP_X_REAL_IP') trustXHeaders = config is None or \ config.get('dont_trust_x_ip_headers') != 'true' if (trustXHeaders): if (xFF): return xFF.split(',')[0].strip() if (realIP): return realIP.strip() return environ.get('REMOTE_ADDR') class Application(object): # Wrap the wsgi handler in a class definition so that we can # do some initialization and avoid regenerating stuff over # and over def logcallback(self): self.serverlog.set_log_file(self.userlog.get_log_file()) def log(self, level, context, message): self.serverlog.log_message(level, message, context['client_ip'], context['user_id'], context['module'], context['method'], context['call_id']) def __init__(self): submod = get_service_name() or 'MEGAHIT' self.userlog = log.log( submod, ip_address=True, authuser=True, module=True, method=True, call_id=True, changecallback=self.logcallback, config=get_config_file()) self.serverlog = log.log( submod, ip_address=True, authuser=True, module=True, method=True, call_id=True, logfile=self.userlog.get_log_file()) self.serverlog.set_log_level(6) self.rpc_service = JSONRPCServiceCustom() self.method_authentication = dict() self.rpc_service.add(impl_MEGAHIT.run_megahit, name='MEGAHIT.run_megahit', types=[dict]) self.method_authentication['MEGAHIT.run_megahit'] = 'required' # noqa self.rpc_service.add(impl_MEGAHIT.status, name='MEGAHIT.status', types=[dict]) authurl = config.get(AUTH) if config else None self.auth_client = _KBaseAuth(authurl) def __call__(self, environ, start_response): # Context object, equivalent to the perl impl CallContext ctx = MethodContext(self.userlog) ctx['client_ip'] = getIPAddress(environ) status = '500 Internal Server Error' try: body_size = int(environ.get('CONTENT_LENGTH', 0)) except (ValueError): body_size = 0 if environ['REQUEST_METHOD'] == 'OPTIONS': # we basically do nothing and just return headers status = '200 OK' rpc_result = "" else: request_body = environ['wsgi.input'].read(body_size) try: req = json.loads(request_body) except ValueError as ve: err = {'error': {'code': -32700, 'name': "Parse error", 'message': str(ve), } } rpc_result = self.process_error(err, ctx, {'version': '1.1'}) else: ctx['module'], ctx['method'] = req['method'].split('.') ctx['call_id'] = req['id'] ctx['rpc_context'] = { 'call_stack': [{'time': self.now_in_utc(), 'method': req['method']} ] } prov_action = {'service': ctx['module'], 'method': ctx['method'], 'method_params': req['params'] } ctx['provenance'] = [prov_action] try: token = environ.get('HTTP_AUTHORIZATION') # parse out the method being requested and check if it # has an authentication requirement method_name = req['method'] auth_req = self.method_authentication.get( method_name, 'none') if auth_req != 'none': if token is None and auth_req == 'required': err = JSONServerError() err.data = ( 'Authentication required for ' + 'MEGAHIT ' + 'but no authentication header was passed') raise err elif token is None and auth_req == 'optional': pass else: try: user = self.auth_client.get_user(token) ctx['user_id'] = user ctx['authenticated'] = 1 ctx['token'] = token except Exception as e: if auth_req == 'required': err = JSONServerError() err.data = \ "Token validation failed: %s" % e raise err if (environ.get('HTTP_X_FORWARDED_FOR')): self.log(log.INFO, ctx, 'X-Forwarded-For: ' + environ.get('HTTP_X_FORWARDED_FOR')) self.log(log.INFO, ctx, 'start method') rpc_result = self.rpc_service.call(ctx, req) self.log(log.INFO, ctx, 'end method') status = '200 OK' except JSONRPCError as jre: err = {'error': {'code': jre.code, 'name': jre.message, 'message': jre.data } } trace = jre.trace if hasattr(jre, 'trace') else None rpc_result = self.process_error(err, ctx, req, trace) except Exception: err = {'error': {'code': 0, 'name': 'Unexpected Server Error', 'message': 'An unexpected server error ' + 'occurred', } } rpc_result = self.process_error(err, ctx, req, traceback.format_exc()) # print('Request method was %s\n' % environ['REQUEST_METHOD']) # print('Environment dictionary is:\n%s\n' % pprint.pformat(environ)) # print('Request body was: %s' % request_body) # print('Result from the method call is:\n%s\n' % \ # pprint.pformat(rpc_result)) if rpc_result: response_body = rpc_result else: response_body = '' response_headers = [ ('Access-Control-Allow-Origin', '*'), ('Access-Control-Allow-Headers', environ.get( 'HTTP_ACCESS_CONTROL_REQUEST_HEADERS', 'authorization')), ('content-type', 'application/json'), ('content-length', str(len(response_body)))] start_response(status, response_headers) return [response_body.encode('utf8')] def process_error(self, error, context, request, trace=None): if trace: self.log(log.ERR, context, trace.split('\n')[0:-1]) if 'id' in request: error['id'] = request['id'] if 'version' in request: error['version'] = request['version'] e = error['error'].get('error') if not e: error['error']['error'] = trace elif 'jsonrpc' in request: error['jsonrpc'] = request['jsonrpc'] error['error']['data'] = trace else: error['version'] = '1.0' error['error']['error'] = trace return json.dumps(error) def now_in_utc(self): # noqa Taken from http://stackoverflow.com/questions/3401428/how-to-get-an-isoformat-datetime-string-including-the-default-timezone @IgnorePep8 dtnow = datetime.datetime.now() dtutcnow = datetime.datetime.utcnow() delta = dtnow - dtutcnow hh, mm = divmod((delta.days * 24 * 60 * 60 + delta.seconds + 30) // 60, 60) return "%s%+02d:%02d" % (dtnow.isoformat(), hh, mm) application = Application() # This is the uwsgi application dictionary. On startup uwsgi will look # for this dict and pull its configuration from here. # This simply lists where to "mount" the application in the URL path # # This uwsgi module "magically" appears when running the app within # uwsgi and is not available otherwise, so wrap an exception handler # around it # # To run this server in uwsgi with 4 workers listening on port 9999 use: # uwsgi -M -p 4 --http :9999 --wsgi-file _this_file_ # To run a using the single threaded python BaseHTTP service # listening on port 9999 by default execute this file # try: import uwsgi # Before we do anything with the application, see if the # configs specify patching all std routines to be asynch # *ONLY* use this if you are going to wrap the service in # a wsgi container that has enabled gevent, such as # uwsgi with the --gevent option if config is not None and config.get('gevent_monkeypatch_all', False): print("Monkeypatching std libraries for async") from gevent import monkey monkey.patch_all() uwsgi.applications = {'': application} except ImportError: # Not available outside of wsgi, ignore pass _proc = None def start_server(host='localhost', port=0, newprocess=False): ''' By default, will start the server on localhost on a system assigned port in the main thread. Excecution of the main thread will stay in the server main loop until interrupted. To run the server in a separate process, and thus allow the stop_server method to be called, set newprocess = True. This will also allow returning of the port number.''' global _proc if _proc: raise RuntimeError('server is already running') httpd = make_server(host, port, application) port = httpd.server_address[1] print("Listening on port %s" % port) if newprocess: _proc = Process(target=httpd.serve_forever) _proc.daemon = True _proc.start() else: httpd.serve_forever() return port def stop_server(): global _proc _proc.terminate() _proc = None def process_async_cli(input_file_path, output_file_path, token): exit_code = 0 with open(input_file_path) as data_file: req = json.load(data_file) if 'version' not in req: req['version'] = '1.1' if 'id' not in req: req['id'] = str(_random.random())[2:] ctx = MethodContext(application.userlog) if token: user = application.auth_client.get_user(token) ctx['user_id'] = user ctx['authenticated'] = 1 ctx['token'] = token if 'context' in req: ctx['rpc_context'] = req['context'] ctx['CLI'] = 1 ctx['module'], ctx['method'] = req['method'].split('.') prov_action = {'service': ctx['module'], 'method': ctx['method'], 'method_params': req['params']} ctx['provenance'] = [prov_action] resp = None try: resp = application.rpc_service.call_py(ctx, req) except JSONRPCError as jre: trace = jre.trace if hasattr(jre, 'trace') else None resp = {'id': req['id'], 'version': req['version'], 'error': {'code': jre.code, 'name': jre.message, 'message': jre.data, 'error': trace} } except Exception: trace = traceback.format_exc() resp = {'id': req['id'], 'version': req['version'], 'error': {'code': 0, 'name': 'Unexpected Server Error', 'message': 'An unexpected server error occurred', 'error': trace} } if 'error' in resp: exit_code = 500 with open(output_file_path, "w") as f: f.write(json.dumps(resp, cls=JSONObjectEncoder)) return exit_code if __name__ == "__main__": if (len(sys.argv) >= 3 and len(sys.argv) <= 4 and os.path.isfile(sys.argv[1])): token = None if len(sys.argv) == 4: if os.path.isfile(sys.argv[3]): with open(sys.argv[3]) as token_file: token = token_file.read() else: token = sys.argv[3] sys.exit(process_async_cli(sys.argv[1], sys.argv[2], token)) try: opts, args = getopt(sys.argv[1:], "", ["port=", "host="]) except GetoptError as err: # print help information and exit: print(str(err)) # will print something like "option -a not recognized" sys.exit(2) port = 9999 host = 'localhost' for o, a in opts: if o == '--port': port = int(a) elif o == '--host': host = a print("Host set to %s" % host) else: assert False, "unhandled option" start_server(host=host, port=port) # print("Listening on port %s" % port) # httpd = make_server( host, port, application) # # httpd.serve_forever()
{ "content_hash": "b9450516956ee61520aa60384fddaf77", "timestamp": "", "source": "github", "line_count": 631, "max_line_length": 151, "avg_line_length": 36.72583201267829, "alnum_prop": 0.5283507378959178, "repo_name": "msneddon/kb_megahit", "id": "5da2d9a0e9925519fde8d220510ed96507813616", "size": "23220", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/MEGAHIT/MEGAHITServer.py", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "17121" }, { "name": "JavaScript", "bytes": "4593" }, { "name": "Makefile", "bytes": "2862" }, { "name": "Perl", "bytes": "13956" }, { "name": "Python", "bytes": "123862" }, { "name": "R", "bytes": "2390" }, { "name": "Ruby", "bytes": "2335" }, { "name": "Shell", "bytes": "1198" } ], "symlink_target": "" }
'''Conway's Game of Life displayed with PyGame ''' import os import sys import time import math import pygame from pygame.gfxdraw import pixel from pygame.locals import * from GameOfLife import NumpyWorld, Patterns _SurfaceDepth = 32 class PygameCell(object): def __init__(self,width,height): self.size = (width,height) @property def deadColor(self): try: return self._deadColor except AttributeError: pass self._deadColor = (0,0,0) return self._deadColor @property def size(self): try: return self._size except AttributeError: pass self._size = (10,10) return self._size @size.setter def size(self,newValue): self._size = newValue try: del(self._surface) except: pass @property def surface(self): try: return self._surface except AttributeError: pass self._surface = pygame.surface.Surface(self.size,depth=_SurfaceDepth) return self._surface def draw(self,age): self.surface.fill(self.color(age)) return self.surface def color(self,age): ''' Cell foreground color. ''' if age < 1: return (0,0,0) if age == 1: return (255,255,255) frequency,width,center = 0.01,127,128 c = [] for phase in range(0,6,2): c.append((math.sin((frequency * age) + phase) * width) + center) return tuple(c) class SquareCell(PygameCell): def draw(self,age,deadColor=None): if deadColor is None: deadColor = self.deadColor if age: self.surface.fill(self.color(age)) pygame.draw.rect(self.surface,deadColor,self.surface.get_rect(),1) else: self.surface.fill(deadColor) return self.surface class PygameWorld(NumpyWorld): ''' ''' def __init__(self,width,height,cellClass=SquareCell): ''' ''' super(PygameWorld,self).__init__(width,height) self.cell = cellClass(10,10) pygame.display.set_caption('PGameOfLife - {}'.format(cellClass.__name__)) self.hudHeight = 100 self.paused = False self.events = {QUIT:self.quit} self.controls = {K_ESCAPE:self.quit, K_q:self.quit, K_SPACE: self.togglePaused, K_PAGEUP: self.incInterval, K_PAGEDOWN: self.decInterval} @property def screen(self): ''' ''' try: return self._screen except AttributeError: pass offx,offy = self.cell.size screensz = (self.width*offx,self.height*offy + self.hudHeight) self._screen = pygame.display.set_mode(screensz,0,_SurfaceDepth) self._screen.fill(self.background) return self._screen @property def buffer(self): ''' ''' try: return self._buffer except AttributeError: pass self._buffer = self.screen.copy() self._buffer.fill(self.background) return self._buffer @property def background(self): ''' ''' try: return self._background except AttributeError: pass self._background = self.cell.deadColor return self._background @property def font(self): try: return self._font except AttributeError: pass self._font = pygame.font.Font(pygame.font.get_default_font(),24) return self._font @property def hudRect(self): try: return self._hudRect except AttributeError: pass self._hudRect = self.screen.get_rect() self._hudRect.y = self.hudRect.height - self.hudHeight self._hudRect.height = self.hudHeight return self._hudRect @property def interval(self): ''' ''' try: return self._interval except AttributError: pass self._interval = 0.01 return self._interval @interval.setter def interval(self,newValue): self._interval = float(newValue) if self._interval < 0: self._interval = 0.0 def incInterval(self): ''' ''' self.interval += 0.01 def decInterval(self): ''' ''' self.interval -= 0.01 def togglePaused(self): self.paused = not self.paused @property def gps(self): ''' ''' try: return self._gps except AttributeError: pass self._gps = 0 return self._gps @gps.setter def gps(self,newValue): self._gps = int(newValue) @property def status(self): ''' ''' try: return self._status.format(self=self, nAlive=len(self.alive), nTotal=len(self.cells)) except AttributeError: pass s = ['Generations: {self.generation:<10}', '{self.gps:>4} G/s', 'Census: {nAlive}/{nTotal}'] self._status = ' '.join(s) return self._status.format(self=self, nAlive=len(self.alive), nTotal=len(self.cells)) def reset(self): ''' ''' super(PygameWorld,self).reset() #for cell in self: # cell.rect.x *= cell.size[0] # cell.rect.y *= cell.size[1] def quit(self): ''' ''' exit() def handle_input(self): ''' ''' # first key presses pressed = pygame.key.get_pressed() for key,action in self.controls.items(): if pressed[key]: action() # next events for event in pygame.event.get(): name = pygame.event.event_name(event.type) try: self.events[name](event) except KeyError: pass def drawHud(self,surface,color,frame): ''' ''' labels = [ 'Generations:','Generations/Sec:', '# Cells Alive:','# Total Cells:'] values = ['{self.generation}'.format(self=self), '{self.gps}'.format(self=self), '{nAlive}'.format(nAlive=len(self.alive)), '{nCells}'.format(nCells=len(self.cells))] for n,texts in enumerate(zip(labels,values)): label,value = texts l = self.font.render(label,True,color) r = l.get_rect() r.y = frame.y + (n * r.height) surface.blit(l,r) v = self.font.render(value,True,color) r = v.get_rect() r.y = frame.y + (n*r.height) r.x = 250 surface.blit(v,r) def _rectFor(self,x,y): w,h = self.cell.size return ((x*w,y*h),(w,h)) def draw(self,allCells=False): ''' ''' self.buffer.fill(self.background) for x,y in self.alive: surface = self.cell.draw(self[x,y],self.background) self.buffer.blit(surface,self._rectFor(x,y)) self.drawHud(self.buffer,(255,255,255),self.hudRect) return self.screen.blit(self.buffer,(0,0)) def saveFrame(self,destdir='images',prefix='generation',ext='bmp'): ''' ''' fname_template = '%s/%s-{:05}.%s' % (destdir,prefix,ext) pygame.image.save(self.screen, fname_template.format(self.generation)) def run(self,stop=-1,interval=0.01): ''' ''' self.interval = interval while self.generation != stop: self.handle_input() t0 = time.time() if not self.paused: self.step() rect = self.draw(allCells=self.generation==0) t1 = time.time() if self.paused: self.gps = 0 else: self.gps = 1 / (t1-t0) pygame.display.update(rect) if self.writeGenerations: self.saveFrame(ext='png') time.sleep(self.interval) def usage(argv,msg=None,exit_value=-1): ''' ''' usagefmt = 'usage: {name} [[pattern_name],[X,Y]] ...' namefmt = '\t{}' print(usagefmt.format(name=os.path.basename(argv[0]))) if msg: print(msg) print('pattern names:') [print(namefmt.format(name)) for name in Patterns.keys()] exit(exit_value) if __name__ == '__main__': pygame.init() if len(sys.argv) == 1: usage(sys.argv,"no patterns specified.") w = PygameWorld(128,128,cellClass=SquareCell) w.writeGenerations = False for thing in sys.argv[1:]: name,_,where = thing.partition(',') try: x,y = map(int,where.split(',')) except: x,y = 0,0 w.addPattern(name,x=x,y=y) w.run()
{ "content_hash": "6b6db5b398d54c6a370996bdd491be43", "timestamp": "", "source": "github", "line_count": 401, "max_line_length": 81, "avg_line_length": 23.860349127182044, "alnum_prop": 0.4905936454849498, "repo_name": "JnyJny/GameOfLife", "id": "39fc7ecbc6374bb4c6c53f73671534bdb3a3dedf", "size": "9593", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "contrib/NPGameOfLife.py", "mode": "33261", "license": "mit", "language": [ { "name": "Makefile", "bytes": "3178" }, { "name": "Python", "bytes": "63298" } ], "symlink_target": "" }
from __future__ import print_function from telesign.phoneid import PhoneIdClient customer_id = "FFFFFFFF-EEEE-DDDD-1234-AB1234567890" api_key = "EXAMPLE----TE8sTgg45yusumoN6BYsBVkh+yRJ5czgsnCehZaOYldPJdmFh6NeX8kunZ2zU1YWaUw/0wV6xfw==" phone_number = "phone_number" phone_type_voip = "5" data = PhoneIdClient(customer_id, api_key) response = data.phoneid(phone_number) if response.ok: if response.json['phone_type']['code'] == phone_type_voip: print("Phone number {} is a VOIP phone.".format( phone_number)) else: print("Phone number {} is not a VOIP phone.".format( phone_number))
{ "content_hash": "4816ceb1ea7a49750bb837776c861df3", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 100, "avg_line_length": 33.31578947368421, "alnum_prop": 0.6998420221169036, "repo_name": "TeleSign/python_telesign", "id": "d3ccf6492dd4ed12ec168c56289ff2701ef03941", "size": "633", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "examples/phoneid/1_check_phone_type_to_block_voip.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "31439" } ], "symlink_target": "" }
import uuid from draftHost import models class BotTeamCreator(object): NAME = "AI Drafting Opponent #{}" EMAIL = "[email protected]" def __init__(self, draft, draft_form): self.draft = draft self.num_teams = draft_form.cleaned_data['team_limit'] self.season = models.FantasySeason.objects.filter(year="2014-2015")[0] # Eww. self.brain = "default" def run(self): for i in range(1, self.num_teams): team_data = { "draft": self.draft, "auth_key": uuid.uuid4(), "name": self.NAME.format(i), "email": self.EMAIL, } team, _ = models.FantasyTeam.objects.get_or_create(**team_data) bot_data = { "season": self.season, "draft": self.draft, "brain": self.brain, "team": team, } bot = models.MockDraftBot(**bot_data) bot.save()
{ "content_hash": "132f6c60d3a7203e95b348187fc3c43a", "timestamp": "", "source": "github", "line_count": 31, "max_line_length": 85, "avg_line_length": 32.38709677419355, "alnum_prop": 0.5139442231075697, "repo_name": "gnmerritt/autodraft", "id": "e93c9e1ff215a3932c717296d085ecbf0789cdc6", "size": "1004", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "autodraft/draftHost/logic/mock_draft.py", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "17544" }, { "name": "Python", "bytes": "73830" }, { "name": "Shell", "bytes": "994" } ], "symlink_target": "" }
import sys import os PathPop= "" if os.path.isfile(PathPop+"population.py")==0: print "set the variable PathPop to the path where you compile population, for instance D:\Users/vtariel/Desktop/ANV/Population-build/. This folder must contain population.py" sys.exit(-1) sys.path.append(PathPop) from population import * #### class member ### def classmember() : img = Mat2UI8(PathPop+"/image/Lena.bmp") img.display()#display the image img.display("lena",False,False)#display the image with lena title without stopping the execution without resizing the image #### display class ### def classdisplay() : img = Mat2UI8(PathPop+"/image/Lena.bmp") disp = MatNDisplay()#class to display the image #execute and display the result in the object window disp while this windows is not closed i =1 while True : proc = Processing() img = proc.erosionRegionGrowing(img,i,2) img = proc.dilationRegionGrowing(img,i,2) i = i+1 disp.display(img) if disp.is_closed() is True : break #### Visualization algorithms ### def visualizealgorithm() : # label image in foreground of a grey(color) imageto check segmentation or seed localization img = Mat2UI8(PathPop+"/image/Lena.bmp") proc = Processing() thre = proc.thresholdOtsuMethod(img) visu = Visualization() foreground = visu.labelForeground (thre,img,0.7) foreground.display() # display each label with a random colour d = DistributionPoisson(0.001) field = Mat2UI32(512,512)#realisation of a discrete poisson field it = field.getIteratorEDomain() label = 1 while it.next() is True: if d.randomVariable() != 0: field.setValue(it.x(),label) label=label+1 proc = Processing() field = proc.voronoiTesselationEuclidean(field)#voronoi tesselation with 2-norm visu = Visualization() visu.labelToRandomRGB(field).display() classmember() classdisplay() visualizealgorithm()
{ "content_hash": "7e4d8ae7a1ccf8c87d9a6719e8760039", "timestamp": "", "source": "github", "line_count": 62, "max_line_length": 178, "avg_line_length": 30.241935483870968, "alnum_prop": 0.7322666666666666, "repo_name": "Population-image/Population", "id": "cfb68dbf34d8a821da067c4d11f479640c3d5da6", "size": "1875", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "other/tutorial/visualization2d.py", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "1929" }, { "name": "C++", "bytes": "2047159" }, { "name": "CMake", "bytes": "28757" }, { "name": "Cuda", "bytes": "86814" }, { "name": "Python", "bytes": "19343" }, { "name": "QMake", "bytes": "24894" } ], "symlink_target": "" }
from alamo_common.conf import AlamoSettings settings = AlamoSettings()
{ "content_hash": "0f2c250f1099f7459c26de48f5277335", "timestamp": "", "source": "github", "line_count": 3, "max_line_length": 43, "avg_line_length": 24, "alnum_prop": 0.8194444444444444, "repo_name": "RulersOfAsgard/ALAMO-scheduler", "id": "30895e909832c58d9ea128efe501fcdc1ad4dad9", "size": "96", "binary": false, "copies": "1", "ref": "refs/heads/develop", "path": "alamo_scheduler/conf/__init__.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "49002" }, { "name": "Ruby", "bytes": "1372" }, { "name": "Shell", "bytes": "1597" } ], "symlink_target": "" }
from flask import Blueprint ui = Blueprint('ui', __name__, template_folder='templates') import location.ui.views
{ "content_hash": "187457b1c9cfeea1cc42a692213e5990", "timestamp": "", "source": "github", "line_count": 5, "max_line_length": 59, "avg_line_length": 23, "alnum_prop": 0.7391304347826086, "repo_name": "zackdever/location", "id": "860d9001122b2a1cb32bae2b8938084441d8f415", "size": "115", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "location/ui/__init__.py", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "12822" }, { "name": "Python", "bytes": "30386" } ], "symlink_target": "" }
""" An exception collector that finds traceback information plus supplements """ import sys import traceback import time from six.moves import cStringIO as StringIO import linecache from paste.exceptions import serial_number_generator import warnings DEBUG_EXCEPTION_FORMATTER = True DEBUG_IDENT_PREFIX = 'E-' FALLBACK_ENCODING = 'UTF-8' __all__ = ['collect_exception', 'ExceptionCollector'] class ExceptionCollector(object): """ Produces a data structure that can be used by formatters to display exception reports. Magic variables: If you define one of these variables in your local scope, you can add information to tracebacks that happen in that context. This allows applications to add all sorts of extra information about the context of the error, including URLs, environmental variables, users, hostnames, etc. These are the variables we look for: ``__traceback_supplement__``: You can define this locally or globally (unlike all the other variables, which must be defined locally). ``__traceback_supplement__`` is a tuple of ``(factory, arg1, arg2...)``. When there is an exception, ``factory(arg1, arg2, ...)`` is called, and the resulting object is inspected for supplemental information. ``__traceback_info__``: This information is added to the traceback, usually fairly literally. ``__traceback_hide__``: If set and true, this indicates that the frame should be hidden from abbreviated tracebacks. This way you can hide some of the complexity of the larger framework and let the user focus on their own errors. By setting it to ``'before'``, all frames before this one will be thrown away. By setting it to ``'after'`` then all frames after this will be thrown away until ``'reset'`` is found. In each case the frame where it is set is included, unless you append ``'_and_this'`` to the value (e.g., ``'before_and_this'``). Note that formatters will ignore this entirely if the frame that contains the error wouldn't normally be shown according to these rules. ``__traceback_reporter__``: This should be a reporter object (see the reporter module), or a list/tuple of reporter objects. All reporters found this way will be given the exception, innermost first. ``__traceback_decorator__``: This object (defined in a local or global scope) will get the result of this function (the CollectedException defined below). It may modify this object in place, or return an entirely new object. This gives the object the ability to manipulate the traceback arbitrarily. The actually interpretation of these values is largely up to the reporters and formatters. ``collect_exception(*sys.exc_info())`` will return an object with several attributes: ``frames``: A list of frames ``exception_formatted``: The formatted exception, generally a full traceback ``exception_type``: The type of the exception, like ``ValueError`` ``exception_value``: The string value of the exception, like ``'x not in list'`` ``identification_code``: A hash of the exception data meant to identify the general exception, so that it shares this code with other exceptions that derive from the same problem. The code is a hash of all the module names and function names in the traceback, plus exception_type. This should be shown to users so they can refer to the exception later. (@@: should it include a portion that allows identification of the specific instance of the exception as well?) The list of frames goes innermost first. Each frame has these attributes; some values may be None if they could not be determined. ``modname``: the name of the module ``filename``: the filename of the module ``lineno``: the line of the error ``revision``: the contents of __version__ or __revision__ ``name``: the function name ``supplement``: an object created from ``__traceback_supplement__`` ``supplement_exception``: a simple traceback of any exception ``__traceback_supplement__`` created ``traceback_info``: the str() of any ``__traceback_info__`` variable found in the local scope (@@: should it str()-ify it or not?) ``traceback_hide``: the value of any ``__traceback_hide__`` variable ``traceback_log``: the value of any ``__traceback_log__`` variable ``__traceback_supplement__`` is thrown away, but a fixed set of attributes are captured; each of these attributes is optional. ``object``: the name of the object being visited ``source_url``: the original URL requested ``line``: the line of source being executed (for interpreters, like ZPT) ``column``: the column of source being executed ``expression``: the expression being evaluated (also for interpreters) ``warnings``: a list of (string) warnings to be displayed ``getInfo``: a function/method that takes no arguments, and returns a string describing any extra information ``extraData``: a function/method that takes no arguments, and returns a dictionary. The contents of this dictionary will not be displayed in the context of the traceback, but globally for the exception. Results will be grouped by the keys in the dictionaries (which also serve as titles). The keys can also be tuples of (importance, title); in this case the importance should be ``important`` (shows up at top), ``normal`` (shows up somewhere; unspecified), ``supplemental`` (shows up at bottom), or ``extra`` (shows up hidden or not at all). These are used to create an object with attributes of the same names (``getInfo`` becomes a string attribute, not a method). ``__traceback_supplement__`` implementations should be careful to produce values that are relatively static and unlikely to cause further errors in the reporting system -- any complex introspection should go in ``getInfo()`` and should ultimately return a string. Note that all attributes are optional, and under certain circumstances may be None or may not exist at all -- the collector can only do a best effort, but must avoid creating any exceptions itself. Formatters may want to use ``__traceback_hide__`` as a hint to hide frames that are part of the 'framework' or underlying system. There are a variety of rules about special values for this variables that formatters should be aware of. TODO: More attributes in __traceback_supplement__? Maybe an attribute that gives a list of local variables that should also be collected? Also, attributes that would be explicitly meant for the entire request, not just a single frame. Right now some of the fixed set of attributes (e.g., source_url) are meant for this use, but there's no explicit way for the supplement to indicate new values, e.g., logged-in user, HTTP referrer, environment, etc. Also, the attributes that do exist are Zope/Web oriented. More information on frames? cgitb, for instance, produces extensive information on local variables. There exists the possibility that getting this information may cause side effects, which can make debugging more difficult; but it also provides fodder for post-mortem debugging. However, the collector is not meant to be configurable, but to capture everything it can and let the formatters be configurable. Maybe this would have to be a configuration value, or maybe it could be indicated by another magical variable (which would probably mean 'show all local variables below this frame') """ show_revisions = 0 def __init__(self, limit=None): self.limit = limit def getLimit(self): limit = self.limit if limit is None: limit = getattr(sys, 'tracebacklimit', None) return limit def getRevision(self, globals): if not self.show_revisions: return None revision = globals.get('__revision__', None) if revision is None: # Incorrect but commonly used spelling revision = globals.get('__version__', None) if revision is not None: try: revision = str(revision).strip() except: revision = '???' return revision def collectSupplement(self, supplement, tb): result = {} for name in ('object', 'source_url', 'line', 'column', 'expression', 'warnings'): result[name] = getattr(supplement, name, None) func = getattr(supplement, 'getInfo', None) if func: result['info'] = func() else: result['info'] = None func = getattr(supplement, 'extraData', None) if func: result['extra'] = func() else: result['extra'] = None return SupplementaryData(**result) def collectLine(self, tb, extra_data): f = tb.tb_frame lineno = tb.tb_lineno co = f.f_code filename = co.co_filename name = co.co_name globals = f.f_globals locals = f.f_locals if not hasattr(locals, 'keys'): # Something weird about this frame; it's not a real dict warnings.warn( "Frame %s has an invalid locals(): %r" % ( globals.get('__name__', 'unknown'), locals)) locals = {} data = {} data['modname'] = globals.get('__name__', None) data['filename'] = filename data['lineno'] = lineno data['revision'] = self.getRevision(globals) data['name'] = name data['tbid'] = id(tb) # Output a traceback supplement, if any. if '__traceback_supplement__' in locals: # Use the supplement defined in the function. tbs = locals['__traceback_supplement__'] elif '__traceback_supplement__' in globals: # Use the supplement defined in the module. # This is used by Scripts (Python). tbs = globals['__traceback_supplement__'] else: tbs = None if tbs is not None: factory = tbs[0] args = tbs[1:] try: supp = factory(*args) data['supplement'] = self.collectSupplement(supp, tb) if data['supplement'].extra: for key, value in data['supplement'].extra.items(): extra_data.setdefault(key, []).append(value) except: if DEBUG_EXCEPTION_FORMATTER: out = StringIO() traceback.print_exc(file=out) text = out.getvalue() data['supplement_exception'] = text # else just swallow the exception. try: tbi = locals.get('__traceback_info__', None) if tbi is not None: data['traceback_info'] = str(tbi) except: pass marker = [] for name in ('__traceback_hide__', '__traceback_log__', '__traceback_decorator__'): try: tbh = locals.get(name, globals.get(name, marker)) if tbh is not marker: data[name[2:-2]] = tbh except: pass return data def collectExceptionOnly(self, etype, value): return traceback.format_exception_only(etype, value) def collectException(self, etype, value, tb, limit=None): # The next line provides a way to detect recursion. __exception_formatter__ = 1 frames = [] ident_data = [] traceback_decorators = [] if limit is None: limit = self.getLimit() n = 0 extra_data = {} while tb is not None and (limit is None or n < limit): if tb.tb_frame.f_locals.get('__exception_formatter__'): # Stop recursion. @@: should make a fake ExceptionFrame frames.append('(Recursive formatException() stopped)\n') break data = self.collectLine(tb, extra_data) frame = ExceptionFrame(**data) frames.append(frame) if frame.traceback_decorator is not None: traceback_decorators.append(frame.traceback_decorator) ident_data.append(frame.modname or '?') ident_data.append(frame.name or '?') tb = tb.tb_next n = n + 1 ident_data.append(str(etype)) ident = serial_number_generator.hash_identifier( ' '.join(ident_data), length=5, upper=True, prefix=DEBUG_IDENT_PREFIX) result = CollectedException( frames=frames, exception_formatted=self.collectExceptionOnly(etype, value), exception_type=etype, exception_value=self.safeStr(value), identification_code=ident, date=time.localtime(), extra_data=extra_data) if etype is ImportError: extra_data[('important', 'sys.path')] = [sys.path] for decorator in traceback_decorators: try: new_result = decorator(result) if new_result is not None: result = new_result except: pass return result def safeStr(self, obj): try: return str(obj) except UnicodeEncodeError: try: return unicode(obj).encode(FALLBACK_ENCODING, 'replace') except UnicodeEncodeError: # This is when something is really messed up, but this can # happen when the __str__ of an object has to handle unicode return repr(obj) limit = 200 class Bunch(object): """ A generic container """ def __init__(self, **attrs): for name, value in attrs.items(): setattr(self, name, value) def __repr__(self): name = '<%s ' % self.__class__.__name__ name += ' '.join(['%s=%r' % (name, str(value)[:30]) for name, value in self.__dict__.items() if not name.startswith('_')]) return name + '>' class CollectedException(Bunch): """ This is the result of collection the exception; it contains copies of data of interest. """ # A list of frames (ExceptionFrame instances), innermost last: frames = [] # The result of traceback.format_exception_only; this looks # like a normal traceback you'd see in the interactive interpreter exception_formatted = None # The *string* representation of the type of the exception # (@@: should we give the # actual class? -- we can't keep the # actual exception around, but the class should be safe) # Something like 'ValueError' exception_type = None # The string representation of the exception, from ``str(e)``. exception_value = None # An identifier which should more-or-less classify this particular # exception, including where in the code it happened. identification_code = None # The date, as time.localtime() returns: date = None # A dictionary of supplemental data: extra_data = {} class SupplementaryData(Bunch): """ The result of __traceback_supplement__. We don't keep the supplement object around, for fear of GC problems and whatnot. (@@: Maybe I'm being too superstitious about copying only specific information over) """ # These attributes are copied from the object, or left as None # if the object doesn't have these attributes: object = None source_url = None line = None column = None expression = None warnings = None # This is the *return value* of supplement.getInfo(): info = None class ExceptionFrame(Bunch): """ This represents one frame of the exception. Each frame is a context in the call stack, typically represented by a line number and module name in the traceback. """ # The name of the module; can be None, especially when the code # isn't associated with a module. modname = None # The filename (@@: when no filename, is it None or '?'?) filename = None # Line number lineno = None # The value of __revision__ or __version__ -- but only if # show_revision = True (by defaut it is false). (@@: Why not # collect this?) revision = None # The name of the function with the error (@@: None or '?' when # unknown?) name = None # A SupplementaryData object, if __traceback_supplement__ was found # (and produced no errors) supplement = None # If accessing __traceback_supplement__ causes any error, the # plain-text traceback is stored here supplement_exception = None # The str() of any __traceback_info__ value found traceback_info = None # The value of __traceback_hide__ traceback_hide = False # The value of __traceback_decorator__ traceback_decorator = None # The id() of the traceback scope, can be used to reference the # scope for use elsewhere tbid = None def get_source_line(self, context=0): """ Return the source of the current line of this frame. You probably want to .strip() it as well, as it is likely to have leading whitespace. If context is given, then that many lines on either side will also be returned. E.g., context=1 will give 3 lines. """ if not self.filename or not self.lineno: return None lines = [] for lineno in range(self.lineno-context, self.lineno+context+1): lines.append(linecache.getline(self.filename, lineno)) return ''.join(lines) if hasattr(sys, 'tracebacklimit'): limit = min(limit, sys.tracebacklimit) col = ExceptionCollector() def collect_exception(t, v, tb, limit=None): """ Collection an exception from ``sys.exc_info()``. Use like:: try: blah blah except: exc_data = collect_exception(*sys.exc_info()) """ return col.collectException(t, v, tb, limit=limit)
{ "content_hash": "d350e83bae09c1bfbab57614347434e1", "timestamp": "", "source": "github", "line_count": 506, "max_line_length": 76, "avg_line_length": 37.073122529644266, "alnum_prop": 0.6140519217442294, "repo_name": "stefanv/aandete", "id": "632ce0665aaece41cc00de58dd74d7aa3861070f", "size": "19681", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/lib/paste/exceptions/collector.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "JavaScript", "bytes": "248684" }, { "name": "Python", "bytes": "6478502" } ], "symlink_target": "" }