text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
# -*- coding: utf-8 -*-
# vispy: gallery 2
# -----------------------------------------------------------------------------
# Copyright (c) Vispy Development Team. All Rights Reserved.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
# -----------------------------------------------------------------------------
"""
+----+-------------+
| | |
| y | viewbox |
| | |
+----+-------------+
| sp | x |
+----+-------------+
"""
import sys
from vispy import scene, app
canvas = scene.SceneCanvas(keys='interactive')
canvas.size = 600, 600
canvas.show()
grid = canvas.central_widget.add_grid()
widget_y_axis = grid.add_widget(row=0, col=0)
widget_y_axis.bgcolor = "#999999"
widget_viewbox = grid.add_widget(row=0, col=1)
widget_viewbox.bgcolor = "#dd0000"
widget_spacer_bottom = grid.add_widget(row=1, col=0)
widget_spacer_bottom.bgcolor = "#efefef"
widget_x_axis = grid.add_widget(row=1, col=1)
widget_x_axis.bgcolor = "#0000dd"
widget_y_axis.width_min = 50
widget_y_axis.width_max = 50
widget_x_axis.height_min = 50
widget_x_axis.height_max = 50
if __name__ == '__main__' and sys.flags.interactive == 0:
app.run()
| Eric89GXL/vispy | examples/basics/plotting/grid_x_y_viewbox.py | Python | bsd-3-clause | 1,194 | 0 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Nicira Networks, Inc.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Dan Wendlandt, Nicira, Inc
#
import contextlib
import copy
import itertools
import mock
from oslo.config import cfg
import testtools
from webob import exc
import webtest
from quantum.api import extensions
from quantum.api.rpc.agentnotifiers import l3_rpc_agent_api
from quantum.api.v2 import attributes
from quantum.common import config
from quantum.common import constants as l3_constants
from quantum.common import exceptions as q_exc
from quantum.common.test_lib import test_config
from quantum import context
from quantum.db import db_base_plugin_v2
from quantum.db import l3_db
from quantum.db import models_v2
from quantum.extensions import l3
from quantum.manager import QuantumManager
from quantum.openstack.common import log as logging
from quantum.openstack.common.notifier import api as notifier_api
from quantum.openstack.common.notifier import test_notifier
from quantum.openstack.common import uuidutils
from quantum.tests import base
from quantum.tests.unit import test_api_v2
from quantum.tests.unit import test_db_plugin
from quantum.tests.unit import test_extensions
from quantum.tests.unit import testlib_api
LOG = logging.getLogger(__name__)
_uuid = uuidutils.generate_uuid
_get_path = test_api_v2._get_path
class L3TestExtensionManager(object):
def get_resources(self):
return l3.L3.get_resources()
def get_actions(self):
return []
def get_request_extensions(self):
return []
class L3NatExtensionTestCase(testlib_api.WebTestCase):
fmt = 'json'
def setUp(self):
super(L3NatExtensionTestCase, self).setUp()
plugin = 'quantum.extensions.l3.RouterPluginBase'
# Ensure 'stale' patched copies of the plugin are never returned
QuantumManager._instance = None
# Ensure existing ExtensionManager is not used
extensions.PluginAwareExtensionManager._instance = None
# Save the global RESOURCE_ATTRIBUTE_MAP
self.saved_attr_map = {}
for resource, attrs in attributes.RESOURCE_ATTRIBUTE_MAP.iteritems():
self.saved_attr_map[resource] = attrs.copy()
# Create the default configurations
args = ['--config-file', test_api_v2.etcdir('quantum.conf.test')]
config.parse(args=args)
# Update the plugin and extensions path
cfg.CONF.set_override('core_plugin', plugin)
cfg.CONF.set_override('allow_pagination', True)
cfg.CONF.set_override('allow_sorting', True)
self._plugin_patcher = mock.patch(plugin, autospec=True)
self.plugin = self._plugin_patcher.start()
instances = self.plugin.return_value
instances._RouterPluginBase__native_pagination_support = True
instances._RouterPluginBase__native_sorting_support = True
# Instantiate mock plugin and enable the 'router' extension
QuantumManager.get_plugin().supported_extension_aliases = (
["router"])
ext_mgr = L3TestExtensionManager()
self.ext_mdw = test_extensions.setup_extensions_middleware(ext_mgr)
self.api = webtest.TestApp(self.ext_mdw)
super(L3NatExtensionTestCase, self).setUp()
def tearDown(self):
self._plugin_patcher.stop()
self.api = None
self.plugin = None
cfg.CONF.reset()
# Restore the global RESOURCE_ATTRIBUTE_MAP
attributes.RESOURCE_ATTRIBUTE_MAP = self.saved_attr_map
super(L3NatExtensionTestCase, self).tearDown()
def test_router_create(self):
router_id = _uuid()
data = {'router': {'name': 'router1', 'admin_state_up': True,
'tenant_id': _uuid(),
'external_gateway_info': None}}
return_value = copy.deepcopy(data['router'])
return_value.update({'status': "ACTIVE", 'id': router_id})
instance = self.plugin.return_value
instance.create_router.return_value = return_value
instance.get_routers_count.return_value = 0
res = self.api.post(_get_path('routers', fmt=self.fmt),
self.serialize(data),
content_type='application/%s' % self.fmt)
instance.create_router.assert_called_with(mock.ANY,
router=data)
self.assertEqual(res.status_int, exc.HTTPCreated.code)
res = self.deserialize(res)
self.assertTrue('router' in res)
router = res['router']
self.assertEqual(router['id'], router_id)
self.assertEqual(router['status'], "ACTIVE")
self.assertEqual(router['admin_state_up'], True)
def test_router_list(self):
router_id = _uuid()
return_value = [{'name': 'router1', 'admin_state_up': True,
'tenant_id': _uuid(), 'id': router_id}]
instance = self.plugin.return_value
instance.get_routers.return_value = return_value
res = self.api.get(_get_path('routers', fmt=self.fmt))
instance.get_routers.assert_called_with(mock.ANY, fields=mock.ANY,
filters=mock.ANY,
sorts=mock.ANY,
limit=mock.ANY,
marker=mock.ANY,
page_reverse=mock.ANY)
self.assertEqual(res.status_int, exc.HTTPOk.code)
res = self.deserialize(res)
self.assertTrue('routers' in res)
self.assertEqual(1, len(res['routers']))
self.assertEqual(router_id, res['routers'][0]['id'])
def test_router_update(self):
router_id = _uuid()
update_data = {'router': {'admin_state_up': False}}
return_value = {'name': 'router1', 'admin_state_up': False,
'tenant_id': _uuid(),
'status': "ACTIVE", 'id': router_id}
instance = self.plugin.return_value
instance.update_router.return_value = return_value
res = self.api.put(_get_path('routers', id=router_id,
fmt=self.fmt),
self.serialize(update_data))
instance.update_router.assert_called_with(mock.ANY, router_id,
router=update_data)
self.assertEqual(res.status_int, exc.HTTPOk.code)
res = self.deserialize(res)
self.assertTrue('router' in res)
router = res['router']
self.assertEqual(router['id'], router_id)
self.assertEqual(router['status'], "ACTIVE")
self.assertEqual(router['admin_state_up'], False)
def test_router_get(self):
router_id = _uuid()
return_value = {'name': 'router1', 'admin_state_up': False,
'tenant_id': _uuid(),
'status': "ACTIVE", 'id': router_id}
instance = self.plugin.return_value
instance.get_router.return_value = return_value
res = self.api.get(_get_path('routers', id=router_id,
fmt=self.fmt))
instance.get_router.assert_called_with(mock.ANY, router_id,
fields=mock.ANY)
self.assertEqual(res.status_int, exc.HTTPOk.code)
res = self.deserialize(res)
self.assertTrue('router' in res)
router = res['router']
self.assertEqual(router['id'], router_id)
self.assertEqual(router['status'], "ACTIVE")
self.assertEqual(router['admin_state_up'], False)
def test_router_delete(self):
router_id = _uuid()
res = self.api.delete(_get_path('routers', id=router_id))
instance = self.plugin.return_value
instance.delete_router.assert_called_with(mock.ANY, router_id)
self.assertEqual(res.status_int, exc.HTTPNoContent.code)
def test_router_add_interface(self):
router_id = _uuid()
subnet_id = _uuid()
port_id = _uuid()
interface_data = {'subnet_id': subnet_id}
return_value = copy.deepcopy(interface_data)
return_value['port_id'] = port_id
instance = self.plugin.return_value
instance.add_router_interface.return_value = return_value
path = _get_path('routers', id=router_id,
action="add_router_interface",
fmt=self.fmt)
res = self.api.put(path, self.serialize(interface_data))
instance.add_router_interface.assert_called_with(mock.ANY, router_id,
interface_data)
self.assertEqual(res.status_int, exc.HTTPOk.code)
res = self.deserialize(res)
self.assertTrue('port_id' in res)
self.assertEqual(res['port_id'], port_id)
self.assertEqual(res['subnet_id'], subnet_id)
class L3NatExtensionTestCaseXML(L3NatExtensionTestCase):
fmt = 'xml'
# This plugin class is just for testing
class TestL3NatPlugin(db_base_plugin_v2.QuantumDbPluginV2,
l3_db.L3_NAT_db_mixin):
__native_pagination_support = True
__native_sorting_support = True
supported_extension_aliases = ["router"]
def create_network(self, context, network):
session = context.session
with session.begin(subtransactions=True):
net = super(TestL3NatPlugin, self).create_network(context,
network)
self._process_l3_create(context, network['network'], net['id'])
self._extend_network_dict_l3(context, net)
return net
def update_network(self, context, id, network):
session = context.session
with session.begin(subtransactions=True):
net = super(TestL3NatPlugin, self).update_network(context, id,
network)
self._process_l3_update(context, network['network'], id)
self._extend_network_dict_l3(context, net)
return net
def delete_network(self, context, id):
session = context.session
with session.begin(subtransactions=True):
super(TestL3NatPlugin, self).delete_network(context, id)
def get_network(self, context, id, fields=None):
net = super(TestL3NatPlugin, self).get_network(context, id, None)
self._extend_network_dict_l3(context, net)
return self._fields(net, fields)
def get_networks(self, context, filters=None, fields=None,
sorts=[], limit=None, marker=None,
page_reverse=False):
nets = super(TestL3NatPlugin, self).get_networks(
context, filters=filters, fields=fields, sorts=sorts, limit=limit,
marker=marker, page_reverse=page_reverse)
for net in nets:
self._extend_network_dict_l3(context, net)
return [self._fields(net, fields) for net in nets]
def delete_port(self, context, id, l3_port_check=True):
if l3_port_check:
self.prevent_l3_port_deletion(context, id)
self.disassociate_floatingips(context, id)
return super(TestL3NatPlugin, self).delete_port(context, id)
class L3NatTestCaseMixin(object):
def _create_network(self, fmt, name, admin_state_up, **kwargs):
""" Override the routine for allowing the router:external attribute """
# attributes containing a colon should be passed with
# a double underscore
new_args = dict(itertools.izip(map(lambda x: x.replace('__', ':'),
kwargs),
kwargs.values()))
arg_list = new_args.pop('arg_list', ()) + (l3.EXTERNAL,)
return super(L3NatTestCaseMixin, self)._create_network(
fmt, name, admin_state_up, arg_list=arg_list, **new_args)
def _create_router(self, fmt, tenant_id, name=None,
admin_state_up=None, set_context=False,
arg_list=None, **kwargs):
data = {'router': {'tenant_id': tenant_id}}
if name:
data['router']['name'] = name
if admin_state_up:
data['router']['admin_state_up'] = admin_state_up
for arg in (('admin_state_up', 'tenant_id') + (arg_list or ())):
# Arg must be present and not empty
if arg in kwargs and kwargs[arg]:
data['router'][arg] = kwargs[arg]
router_req = self.new_create_request('routers', data, fmt)
if set_context and tenant_id:
# create a specific auth context for this request
router_req.environ['quantum.context'] = context.Context(
'', tenant_id)
return router_req.get_response(self.ext_api)
def _make_router(self, fmt, tenant_id, name=None,
admin_state_up=None, set_context=False):
res = self._create_router(fmt, tenant_id, name,
admin_state_up, set_context)
return self.deserialize(fmt, res)
def _add_external_gateway_to_router(self, router_id, network_id,
expected_code=exc.HTTPOk.code,
quantum_context=None):
return self._update('routers', router_id,
{'router': {'external_gateway_info':
{'network_id': network_id}}},
expected_code=expected_code,
quantum_context=quantum_context)
def _remove_external_gateway_from_router(self, router_id, network_id,
expected_code=exc.HTTPOk.code):
return self._update('routers', router_id,
{'router': {'external_gateway_info':
{}}},
expected_code=expected_code)
def _router_interface_action(self, action, router_id, subnet_id, port_id,
expected_code=exc.HTTPOk.code):
interface_data = {}
if subnet_id:
interface_data.update({'subnet_id': subnet_id})
if port_id and (action != 'add' or not subnet_id):
interface_data.update({'port_id': port_id})
req = self.new_action_request('routers', interface_data, router_id,
"%s_router_interface" % action)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, expected_code)
return self.deserialize(self.fmt, res)
@contextlib.contextmanager
def router(self, name='router1', admin_state_up=True,
fmt=None, tenant_id=_uuid(), set_context=False):
router = self._make_router(fmt or self.fmt, tenant_id, name,
admin_state_up, set_context)
try:
yield router
finally:
self._delete('routers', router['router']['id'])
def _set_net_external(self, net_id):
self._update('networks', net_id,
{'network': {l3.EXTERNAL: True}})
def _create_floatingip(self, fmt, network_id, port_id=None,
fixed_ip=None, set_context=False):
data = {'floatingip': {'floating_network_id': network_id,
'tenant_id': self._tenant_id}}
if port_id:
data['floatingip']['port_id'] = port_id
if fixed_ip:
data['floatingip']['fixed_ip_address'] = fixed_ip
floatingip_req = self.new_create_request('floatingips', data, fmt)
if set_context and self._tenant_id:
# create a specific auth context for this request
floatingip_req.environ['quantum.context'] = context.Context(
'', self._tenant_id)
return floatingip_req.get_response(self.ext_api)
def _make_floatingip(self, fmt, network_id, port_id=None,
fixed_ip=None, set_context=False):
res = self._create_floatingip(fmt, network_id, port_id,
fixed_ip, set_context)
self.assertEqual(res.status_int, exc.HTTPCreated.code)
return self.deserialize(fmt, res)
def _validate_floating_ip(self, fip):
body = self._list('floatingips')
self.assertEqual(len(body['floatingips']), 1)
self.assertEqual(body['floatingips'][0]['id'],
fip['floatingip']['id'])
body = self._show('floatingips', fip['floatingip']['id'])
self.assertEqual(body['floatingip']['id'],
fip['floatingip']['id'])
@contextlib.contextmanager
def floatingip_with_assoc(self, port_id=None, fmt=None,
set_context=False):
with self.subnet(cidr='11.0.0.0/24') as public_sub:
self._set_net_external(public_sub['subnet']['network_id'])
with self.port() as private_port:
with self.router() as r:
sid = private_port['port']['fixed_ips'][0]['subnet_id']
private_sub = {'subnet': {'id': sid}}
floatingip = None
try:
self._add_external_gateway_to_router(
r['router']['id'],
public_sub['subnet']['network_id'])
self._router_interface_action(
'add', r['router']['id'],
private_sub['subnet']['id'], None)
floatingip = self._make_floatingip(
fmt or self.fmt,
public_sub['subnet']['network_id'],
port_id=private_port['port']['id'],
set_context=False)
yield floatingip
finally:
if floatingip:
self._delete('floatingips',
floatingip['floatingip']['id'])
self._router_interface_action(
'remove', r['router']['id'],
private_sub['subnet']['id'], None)
self._remove_external_gateway_from_router(
r['router']['id'],
public_sub['subnet']['network_id'])
@contextlib.contextmanager
def floatingip_no_assoc(self, private_sub, fmt=None, set_context=False):
with self.subnet(cidr='12.0.0.0/24') as public_sub:
self._set_net_external(public_sub['subnet']['network_id'])
with self.router() as r:
floatingip = None
try:
self._add_external_gateway_to_router(
r['router']['id'],
public_sub['subnet']['network_id'])
self._router_interface_action('add', r['router']['id'],
private_sub['subnet']['id'],
None)
floatingip = self._make_floatingip(
fmt or self.fmt,
public_sub['subnet']['network_id'],
set_context=set_context)
yield floatingip
finally:
if floatingip:
self._delete('floatingips',
floatingip['floatingip']['id'])
self._router_interface_action('remove', r['router']['id'],
private_sub['subnet']['id'],
None)
self._remove_external_gateway_from_router(
r['router']['id'],
public_sub['subnet']['network_id'])
class L3NatTestCaseBase(L3NatTestCaseMixin,
test_db_plugin.QuantumDbPluginV2TestCase):
def setUp(self):
test_config['plugin_name_v2'] = (
'quantum.tests.unit.test_l3_plugin.TestL3NatPlugin')
# for these tests we need to enable overlapping ips
cfg.CONF.set_default('allow_overlapping_ips', True)
ext_mgr = L3TestExtensionManager()
test_config['extension_manager'] = ext_mgr
super(L3NatTestCaseBase, self).setUp()
# Set to None to reload the drivers
notifier_api._drivers = None
cfg.CONF.set_override("notification_driver", [test_notifier.__name__])
def tearDown(self):
test_notifier.NOTIFICATIONS = []
super(L3NatTestCaseBase, self).tearDown()
class L3NatDBTestCase(L3NatTestCaseBase):
def test_router_create(self):
name = 'router1'
tenant_id = _uuid()
expected_value = [('name', name), ('tenant_id', tenant_id),
('admin_state_up', True), ('status', 'ACTIVE'),
('external_gateway_info', None)]
with self.router(name='router1', admin_state_up=True,
tenant_id=tenant_id) as router:
for k, v in expected_value:
self.assertEqual(router['router'][k], v)
def test_router_create_with_gwinfo(self):
with self.subnet() as s:
self._set_net_external(s['subnet']['network_id'])
data = {'router': {'tenant_id': _uuid()}}
data['router']['name'] = 'router1'
data['router']['external_gateway_info'] = {
'network_id': s['subnet']['network_id']}
router_req = self.new_create_request('routers', data, self.fmt)
res = router_req.get_response(self.ext_api)
router = self.deserialize(self.fmt, res)
self.assertEqual(
s['subnet']['network_id'],
router['router']['external_gateway_info']['network_id'])
self._delete('routers', router['router']['id'])
def test_router_list(self):
with contextlib.nested(self.router(),
self.router(),
self.router()
) as routers:
self._test_list_resources('router', routers)
def test_router_list_with_parameters(self):
with contextlib.nested(self.router(name='router1'),
self.router(name='router2'),
) as (router1, router2):
query_params = 'name=router1'
self._test_list_resources('router', [router1],
query_params=query_params)
query_params = 'name=router2'
self._test_list_resources('router', [router2],
query_params=query_params)
query_params = 'name=router3'
self._test_list_resources('router', [],
query_params=query_params)
def test_router_list_with_sort(self):
with contextlib.nested(self.router(name='router1'),
self.router(name='router2'),
self.router(name='router3')
) as (router1, router2, router3):
self._test_list_with_sort('router', (router3, router2, router1),
[('name', 'desc')])
def test_router_list_with_pagination(self):
with contextlib.nested(self.router(name='router1'),
self.router(name='router2'),
self.router(name='router3')
) as (router1, router2, router3):
self._test_list_with_pagination('router',
(router1, router2, router3),
('name', 'asc'), 2, 2)
def test_router_list_with_pagination_reverse(self):
with contextlib.nested(self.router(name='router1'),
self.router(name='router2'),
self.router(name='router3')
) as (router1, router2, router3):
self._test_list_with_pagination_reverse('router',
(router1, router2,
router3),
('name', 'asc'), 2, 2)
def test_router_update(self):
rname1 = "yourrouter"
rname2 = "nachorouter"
with self.router(name=rname1) as r:
body = self._show('routers', r['router']['id'])
self.assertEqual(body['router']['name'], rname1)
body = self._update('routers', r['router']['id'],
{'router': {'name': rname2}})
body = self._show('routers', r['router']['id'])
self.assertEqual(body['router']['name'], rname2)
def test_router_update_gateway(self):
with self.router() as r:
with self.subnet() as s1:
with self.subnet() as s2:
self._set_net_external(s1['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s1['subnet']['network_id'])
body = self._show('routers', r['router']['id'])
net_id = (body['router']
['external_gateway_info']['network_id'])
self.assertEqual(net_id, s1['subnet']['network_id'])
self._set_net_external(s2['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s2['subnet']['network_id'])
body = self._show('routers', r['router']['id'])
net_id = (body['router']
['external_gateway_info']['network_id'])
self.assertEqual(net_id, s2['subnet']['network_id'])
self._remove_external_gateway_from_router(
r['router']['id'],
s2['subnet']['network_id'])
def test_router_update_gateway_with_existed_floatingip(self):
with self.subnet() as subnet:
self._set_net_external(subnet['subnet']['network_id'])
with self.floatingip_with_assoc() as fip:
self._add_external_gateway_to_router(
fip['floatingip']['router_id'],
subnet['subnet']['network_id'],
expected_code=exc.HTTPConflict.code)
def test_router_update_gateway_to_empty_with_existed_floatingip(self):
with self.floatingip_with_assoc() as fip:
self._remove_external_gateway_from_router(
fip['floatingip']['router_id'], None,
expected_code=exc.HTTPConflict.code)
def test_router_add_interface_subnet(self):
exp_notifications = ['router.create.start',
'router.create.end',
'network.create.start',
'network.create.end',
'subnet.create.start',
'subnet.create.end',
'router.interface.create',
'router.interface.delete']
with self.router() as r:
with self.subnet() as s:
body = self._router_interface_action('add',
r['router']['id'],
s['subnet']['id'],
None)
self.assertTrue('port_id' in body)
# fetch port and confirm device_id
r_port_id = body['port_id']
body = self._show('ports', r_port_id)
self.assertEqual(body['port']['device_id'], r['router']['id'])
body = self._router_interface_action('remove',
r['router']['id'],
s['subnet']['id'],
None)
body = self._show('ports', r_port_id,
expected_code=exc.HTTPNotFound.code)
self.assertEqual(
set(n['event_type'] for n in test_notifier.NOTIFICATIONS),
set(exp_notifications))
for n in test_notifier.NOTIFICATIONS:
if n['event_type'].startswith('router.interface.'):
payload = n['payload']['router.interface']
self.assertIn('id', payload)
self.assertEquals(payload['id'], r['router']['id'])
self.assertIn('tenant_id', payload)
stid = s['subnet']['tenant_id']
# tolerate subnet tenant deliberately to '' in the
# nicira metadata access case
self.assertTrue(payload['tenant_id'] == stid or
payload['tenant_id'] == '')
def test_router_add_interface_subnet_with_bad_tenant_returns_404(self):
with mock.patch('quantum.context.Context.to_dict') as tdict:
tenant_id = _uuid()
admin_context = {'roles': ['admin']}
tenant_context = {'tenant_id': 'bad_tenant',
'roles': []}
tdict.return_value = admin_context
with self.router(tenant_id=tenant_id) as r:
with self.network(tenant_id=tenant_id) as n:
with self.subnet(network=n) as s:
tdict.return_value = tenant_context
err_code = exc.HTTPNotFound.code
self._router_interface_action('add',
r['router']['id'],
s['subnet']['id'],
None,
err_code)
tdict.return_value = admin_context
body = self._router_interface_action('add',
r['router']['id'],
s['subnet']['id'],
None)
self.assertTrue('port_id' in body)
tdict.return_value = tenant_context
self._router_interface_action('remove',
r['router']['id'],
s['subnet']['id'],
None,
err_code)
tdict.return_value = admin_context
body = self._router_interface_action('remove',
r['router']['id'],
s['subnet']['id'],
None)
def test_router_add_interface_subnet_with_port_from_other_tenant(self):
tenant_id = _uuid()
other_tenant_id = _uuid()
tenant_context = context.Context(user_id=None, tenant_id=tenant_id)
admin_context = context.get_admin_context()
with mock.patch('quantum.context.Context') as ctx:
ctx.return_value = admin_context
with contextlib.nested(
self.router(tenant_id=tenant_id),
self.network(tenant_id=tenant_id),
self.network(tenant_id=other_tenant_id)) as (r, n1, n2):
with contextlib.nested(
self.subnet(network=n1, cidr='10.0.0.0/24'),
self.subnet(network=n2, cidr='10.1.0.0/24')) as (s1, s2):
ctx.return_value = admin_context
body = self._router_interface_action(
'add',
r['router']['id'],
s2['subnet']['id'],
None)
self.assertTrue('port_id' in body)
ctx.return_value = tenant_context
self._router_interface_action(
'add',
r['router']['id'],
s1['subnet']['id'],
None)
self.assertTrue('port_id' in body)
self._router_interface_action(
'remove',
r['router']['id'],
s1['subnet']['id'],
None)
ctx.return_value = admin_context
body = self._router_interface_action(
'remove',
r['router']['id'],
s2['subnet']['id'],
None)
def test_router_add_interface_port(self):
with self.router() as r:
with self.port(no_delete=True) as p:
body = self._router_interface_action('add',
r['router']['id'],
None,
p['port']['id'])
self.assertTrue('port_id' in body)
self.assertEqual(body['port_id'], p['port']['id'])
# fetch port and confirm device_id
body = self._show('ports', p['port']['id'])
self.assertEqual(body['port']['device_id'], r['router']['id'])
# clean-up
self._router_interface_action('remove',
r['router']['id'],
None,
p['port']['id'])
def test_router_add_interface_port_bad_tenant_returns_404(self):
with mock.patch('quantum.context.Context.to_dict') as tdict:
admin_context = {'roles': ['admin']}
tenant_context = {'tenant_id': 'bad_tenant',
'roles': []}
tdict.return_value = admin_context
with self.router() as r:
with self.port(no_delete=True) as p:
tdict.return_value = tenant_context
err_code = exc.HTTPNotFound.code
self._router_interface_action('add',
r['router']['id'],
None,
p['port']['id'],
err_code)
tdict.return_value = admin_context
self._router_interface_action('add',
r['router']['id'],
None,
p['port']['id'])
tdict.return_value = tenant_context
# clean-up
self._router_interface_action('remove',
r['router']['id'],
None,
p['port']['id'],
err_code)
tdict.return_value = admin_context
self._router_interface_action('remove',
r['router']['id'],
None,
p['port']['id'])
def test_router_add_interface_dup_subnet1_returns_400(self):
with self.router() as r:
with self.subnet() as s:
self._router_interface_action('add',
r['router']['id'],
s['subnet']['id'],
None)
self._router_interface_action('add',
r['router']['id'],
s['subnet']['id'],
None,
expected_code=exc.
HTTPBadRequest.code)
self._router_interface_action('remove',
r['router']['id'],
s['subnet']['id'],
None)
def test_router_add_interface_dup_subnet2_returns_400(self):
with self.router() as r:
with self.subnet() as s:
with self.port(subnet=s, no_delete=True) as p1:
with self.port(subnet=s) as p2:
self._router_interface_action('add',
r['router']['id'],
None,
p1['port']['id'])
self._router_interface_action('add',
r['router']['id'],
None,
p2['port']['id'],
expected_code=exc.
HTTPBadRequest.code)
# clean-up
self._router_interface_action('remove',
r['router']['id'],
None,
p1['port']['id'])
def test_router_add_interface_overlapped_cidr_returns_400(self):
with self.router() as r:
with self.subnet(cidr='10.0.1.0/24') as s1:
self._router_interface_action('add',
r['router']['id'],
s1['subnet']['id'],
None)
def try_overlapped_cidr(cidr):
with self.subnet(cidr=cidr) as s2:
self._router_interface_action('add',
r['router']['id'],
s2['subnet']['id'],
None,
expected_code=exc.
HTTPBadRequest.code)
# another subnet with same cidr
try_overlapped_cidr('10.0.1.0/24')
# another subnet with overlapped cidr including s1
try_overlapped_cidr('10.0.0.0/16')
# another subnet with overlapped cidr included by s1
try_overlapped_cidr('10.0.1.1/32')
# clean-up
self._router_interface_action('remove',
r['router']['id'],
s1['subnet']['id'],
None)
def test_router_add_interface_no_data_returns_400(self):
with self.router() as r:
self._router_interface_action('add',
r['router']['id'],
None,
None,
expected_code=exc.
HTTPBadRequest.code)
def test_router_add_gateway_dup_subnet1_returns_400(self):
with self.router() as r:
with self.subnet() as s:
self._router_interface_action('add',
r['router']['id'],
s['subnet']['id'],
None)
self._set_net_external(s['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s['subnet']['network_id'],
expected_code=exc.HTTPBadRequest.code)
self._router_interface_action('remove',
r['router']['id'],
s['subnet']['id'],
None)
def test_router_add_gateway_dup_subnet2_returns_400(self):
with self.router() as r:
with self.subnet() as s:
self._set_net_external(s['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s['subnet']['network_id'])
self._router_interface_action('add',
r['router']['id'],
s['subnet']['id'],
None,
expected_code=exc.
HTTPBadRequest.code)
self._remove_external_gateway_from_router(
r['router']['id'],
s['subnet']['network_id'])
def test_router_add_gateway(self):
with self.router() as r:
with self.subnet() as s:
self._set_net_external(s['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s['subnet']['network_id'])
body = self._show('routers', r['router']['id'])
net_id = body['router']['external_gateway_info']['network_id']
self.assertEqual(net_id, s['subnet']['network_id'])
self._remove_external_gateway_from_router(
r['router']['id'],
s['subnet']['network_id'])
body = self._show('routers', r['router']['id'])
gw_info = body['router']['external_gateway_info']
self.assertEqual(gw_info, None)
def test_router_add_gateway_tenant_ctx(self):
with self.router(tenant_id='noadmin',
set_context=True) as r:
with self.subnet() as s:
self._set_net_external(s['subnet']['network_id'])
ctx = context.Context('', 'noadmin')
self._add_external_gateway_to_router(
r['router']['id'],
s['subnet']['network_id'],
quantum_context=ctx)
body = self._show('routers', r['router']['id'])
net_id = body['router']['external_gateway_info']['network_id']
self.assertEqual(net_id, s['subnet']['network_id'])
self._remove_external_gateway_from_router(
r['router']['id'],
s['subnet']['network_id'])
body = self._show('routers', r['router']['id'])
gw_info = body['router']['external_gateway_info']
self.assertEqual(gw_info, None)
def test_router_add_gateway_invalid_network_returns_404(self):
with self.router() as r:
self._add_external_gateway_to_router(
r['router']['id'],
"foobar", expected_code=exc.HTTPNotFound.code)
def test_router_add_gateway_net_not_external_returns_400(self):
with self.router() as r:
with self.subnet() as s:
# intentionally do not set net as external
self._add_external_gateway_to_router(
r['router']['id'],
s['subnet']['network_id'],
expected_code=exc.HTTPBadRequest.code)
def test_router_add_gateway_no_subnet_returns_400(self):
with self.router() as r:
with self.network() as n:
self._set_net_external(n['network']['id'])
self._add_external_gateway_to_router(
r['router']['id'],
n['network']['id'], expected_code=exc.HTTPBadRequest.code)
def test_router_remove_interface_inuse_returns_409(self):
with self.router() as r:
with self.subnet() as s:
self._router_interface_action('add',
r['router']['id'],
s['subnet']['id'],
None)
self._delete('routers', r['router']['id'],
expected_code=exc.HTTPConflict.code)
# remove interface so test can exit without errors
self._router_interface_action('remove',
r['router']['id'],
s['subnet']['id'],
None)
def test_router_remove_interface_wrong_subnet_returns_409(self):
with self.router() as r:
with self.subnet() as s:
with self.port(no_delete=True) as p:
self._router_interface_action('add',
r['router']['id'],
None,
p['port']['id'])
self._router_interface_action('remove',
r['router']['id'],
s['subnet']['id'],
p['port']['id'],
exc.HTTPConflict.code)
#remove properly to clean-up
self._router_interface_action('remove',
r['router']['id'],
None,
p['port']['id'])
def test_router_remove_interface_wrong_port_returns_404(self):
with self.router() as r:
with self.subnet():
with self.port(no_delete=True) as p:
self._router_interface_action('add',
r['router']['id'],
None,
p['port']['id'])
# create another port for testing failure case
res = self._create_port(self.fmt, p['port']['network_id'])
p2 = self.deserialize(self.fmt, res)
self._router_interface_action('remove',
r['router']['id'],
None,
p2['port']['id'],
exc.HTTPNotFound.code)
# remove correct interface to cleanup
self._router_interface_action('remove',
r['router']['id'],
None,
p['port']['id'])
# remove extra port created
self._delete('ports', p2['port']['id'])
def test_router_delete(self):
with self.router() as router:
router_id = router['router']['id']
req = self.new_show_request('router', router_id)
res = req.get_response(self._api_for_resource('router'))
self.assertEqual(res.status_int, 404)
def test_router_delete_with_port_existed_returns_409(self):
with self.subnet() as subnet:
res = self._create_router(self.fmt, _uuid())
router = self.deserialize(self.fmt, res)
self._router_interface_action('add',
router['router']['id'],
subnet['subnet']['id'],
None)
self._delete('routers', router['router']['id'],
exc.HTTPConflict.code)
self._router_interface_action('remove',
router['router']['id'],
subnet['subnet']['id'],
None)
self._delete('routers', router['router']['id'])
def test_router_delete_with_floatingip_existed_returns_409(self):
with self.port() as p:
private_sub = {'subnet': {'id':
p['port']['fixed_ips'][0]['subnet_id']}}
with self.subnet(cidr='12.0.0.0/24') as public_sub:
self._set_net_external(public_sub['subnet']['network_id'])
res = self._create_router(self.fmt, _uuid())
r = self.deserialize(self.fmt, res)
self._add_external_gateway_to_router(
r['router']['id'],
public_sub['subnet']['network_id'])
self._router_interface_action('add', r['router']['id'],
private_sub['subnet']['id'],
None)
res = self._create_floatingip(
self.fmt, public_sub['subnet']['network_id'],
port_id=p['port']['id'])
self.assertEqual(res.status_int, exc.HTTPCreated.code)
floatingip = self.deserialize(self.fmt, res)
self._delete('routers', r['router']['id'],
expected_code=exc.HTTPConflict.code)
# Cleanup
self._delete('floatingips', floatingip['floatingip']['id'])
self._router_interface_action('remove', r['router']['id'],
private_sub['subnet']['id'],
None)
self._delete('routers', r['router']['id'])
def test_router_show(self):
name = 'router1'
tenant_id = _uuid()
expected_value = [('name', name), ('tenant_id', tenant_id),
('admin_state_up', True), ('status', 'ACTIVE'),
('external_gateway_info', None)]
with self.router(name='router1', admin_state_up=True,
tenant_id=tenant_id) as router:
res = self._show('routers', router['router']['id'])
for k, v in expected_value:
self.assertEqual(res['router'][k], v)
def test_network_update_external_failure(self):
with self.router() as r:
with self.subnet() as s1:
self._set_net_external(s1['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s1['subnet']['network_id'])
self._update('networks', s1['subnet']['network_id'],
{'network': {'router:external': False}},
expected_code=exc.HTTPConflict.code)
self._remove_external_gateway_from_router(
r['router']['id'],
s1['subnet']['network_id'])
def test_network_update_external(self):
with self.router() as r:
with self.network('test_net') as testnet:
self._set_net_external(testnet['network']['id'])
with self.subnet() as s1:
self._set_net_external(s1['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s1['subnet']['network_id'])
self._update('networks', testnet['network']['id'],
{'network': {'router:external': False}})
self._remove_external_gateway_from_router(
r['router']['id'],
s1['subnet']['network_id'])
def test_floatingip_crd_ops(self):
with self.floatingip_with_assoc() as fip:
self._validate_floating_ip(fip)
# post-delete, check that it is really gone
body = self._list('floatingips')
self.assertEqual(len(body['floatingips']), 0)
self._show('floatingips', fip['floatingip']['id'],
expected_code=exc.HTTPNotFound.code)
def _test_floatingip_with_assoc_fails(self, plugin_class):
with self.subnet(cidr='200.0.0.1/24') as public_sub:
self._set_net_external(public_sub['subnet']['network_id'])
with self.port() as private_port:
with self.router() as r:
sid = private_port['port']['fixed_ips'][0]['subnet_id']
private_sub = {'subnet': {'id': sid}}
self._add_external_gateway_to_router(
r['router']['id'],
public_sub['subnet']['network_id'])
self._router_interface_action('add', r['router']['id'],
private_sub['subnet']['id'],
None)
method = plugin_class + '._update_fip_assoc'
with mock.patch(method) as pl:
pl.side_effect = q_exc.BadRequest(
resource='floatingip',
msg='fake_error')
res = self._create_floatingip(
self.fmt,
public_sub['subnet']['network_id'],
port_id=private_port['port']['id'])
self.assertEqual(res.status_int, 400)
for p in self._list('ports')['ports']:
if p['device_owner'] == 'network:floatingip':
self.fail('garbage port is not deleted')
self._remove_external_gateway_from_router(
r['router']['id'],
public_sub['subnet']['network_id'])
self._router_interface_action('remove',
r['router']['id'],
private_sub['subnet']['id'],
None)
def test_floatingip_with_assoc_fails(self):
self._test_floatingip_with_assoc_fails(
'quantum.db.l3_db.L3_NAT_db_mixin')
def test_floatingip_update(self):
with self.port() as p:
private_sub = {'subnet': {'id':
p['port']['fixed_ips'][0]['subnet_id']}}
with self.floatingip_no_assoc(private_sub) as fip:
body = self._show('floatingips', fip['floatingip']['id'])
self.assertEqual(body['floatingip']['port_id'], None)
self.assertEqual(body['floatingip']['fixed_ip_address'], None)
port_id = p['port']['id']
ip_address = p['port']['fixed_ips'][0]['ip_address']
body = self._update('floatingips', fip['floatingip']['id'],
{'floatingip': {'port_id': port_id}})
self.assertEqual(body['floatingip']['port_id'], port_id)
self.assertEqual(body['floatingip']['fixed_ip_address'],
ip_address)
def test_floatingip_with_assoc(self):
with self.floatingip_with_assoc() as fip:
body = self._show('floatingips', fip['floatingip']['id'])
self.assertEqual(body['floatingip']['id'],
fip['floatingip']['id'])
self.assertEqual(body['floatingip']['port_id'],
fip['floatingip']['port_id'])
self.assertTrue(body['floatingip']['fixed_ip_address'] is not None)
self.assertTrue(body['floatingip']['router_id'] is not None)
def test_floatingip_port_delete(self):
with self.subnet() as private_sub:
with self.floatingip_no_assoc(private_sub) as fip:
with self.port(subnet=private_sub) as p:
body = self._update('floatingips', fip['floatingip']['id'],
{'floatingip':
{'port_id': p['port']['id']}})
# note: once this port goes out of scope, the port will be
# deleted, which is what we want to test. We want to confirm
# that the fields are set back to None
body = self._show('floatingips', fip['floatingip']['id'])
self.assertEqual(body['floatingip']['id'],
fip['floatingip']['id'])
self.assertEqual(body['floatingip']['port_id'], None)
self.assertEqual(body['floatingip']['fixed_ip_address'], None)
self.assertEqual(body['floatingip']['router_id'], None)
def test_two_fips_one_port_invalid_return_409(self):
with self.floatingip_with_assoc() as fip1:
res = self._create_floatingip(
self.fmt,
fip1['floatingip']['floating_network_id'],
fip1['floatingip']['port_id'])
self.assertEqual(res.status_int, exc.HTTPConflict.code)
def test_floating_ip_direct_port_delete_returns_409(self):
found = False
with self.floatingip_with_assoc():
for p in self._list('ports')['ports']:
if p['device_owner'] == 'network:floatingip':
self._delete('ports', p['id'],
expected_code=exc.HTTPConflict.code)
found = True
self.assertTrue(found)
def test_create_floatingip_no_ext_gateway_return_404(self):
with self.subnet() as public_sub:
self._set_net_external(public_sub['subnet']['network_id'])
with self.port() as private_port:
with self.router():
res = self._create_floatingip(
self.fmt,
public_sub['subnet']['network_id'],
port_id=private_port['port']['id'])
# this should be some kind of error
self.assertEqual(res.status_int, exc.HTTPNotFound.code)
def test_create_floating_non_ext_network_returns_400(self):
with self.subnet() as public_sub:
# normally we would set the network of public_sub to be
# external, but the point of this test is to handle when
# that is not the case
with self.router():
res = self._create_floatingip(
self.fmt,
public_sub['subnet']['network_id'])
self.assertEqual(res.status_int, exc.HTTPBadRequest.code)
def test_create_floatingip_no_public_subnet_returns_400(self):
with self.network() as public_network:
with self.port() as private_port:
with self.router() as r:
sid = private_port['port']['fixed_ips'][0]['subnet_id']
private_sub = {'subnet': {'id': sid}}
self._router_interface_action('add', r['router']['id'],
private_sub['subnet']['id'],
None)
res = self._create_floatingip(
self.fmt,
public_network['network']['id'],
port_id=private_port['port']['id'])
self.assertEqual(res.status_int, exc.HTTPBadRequest.code)
# cleanup
self._router_interface_action('remove',
r['router']['id'],
private_sub['subnet']['id'],
None)
def test_create_floatingip_invalid_floating_network_id_returns_400(self):
# API-level test - no need to create all objects for l3 plugin
res = self._create_floatingip(self.fmt, 'iamnotanuuid',
uuidutils.generate_uuid(), '192.168.0.1')
self.assertEqual(res.status_int, 400)
def test_create_floatingip_invalid_floating_port_id_returns_400(self):
# API-level test - no need to create all objects for l3 plugin
res = self._create_floatingip(self.fmt, uuidutils.generate_uuid(),
'iamnotanuuid', '192.168.0.1')
self.assertEqual(res.status_int, 400)
def test_create_floatingip_invalid_fixed_ip_address_returns_400(self):
# API-level test - no need to create all objects for l3 plugin
res = self._create_floatingip(self.fmt, uuidutils.generate_uuid(),
uuidutils.generate_uuid(), 'iamnotnanip')
self.assertEqual(res.status_int, 400)
def test_floatingip_list_with_sort(self):
with contextlib.nested(self.subnet(cidr="10.0.0.0/24"),
self.subnet(cidr="11.0.0.0/24"),
self.subnet(cidr="12.0.0.0/24")
) as (s1, s2, s3):
network_id1 = s1['subnet']['network_id']
network_id2 = s2['subnet']['network_id']
network_id3 = s3['subnet']['network_id']
self._set_net_external(network_id1)
self._set_net_external(network_id2)
self._set_net_external(network_id3)
fp1 = self._make_floatingip(self.fmt, network_id1)
fp2 = self._make_floatingip(self.fmt, network_id2)
fp3 = self._make_floatingip(self.fmt, network_id3)
try:
self._test_list_with_sort('floatingip', (fp3, fp2, fp1),
[('floating_ip_address', 'desc')])
finally:
self._delete('floatingips', fp1['floatingip']['id'])
self._delete('floatingips', fp2['floatingip']['id'])
self._delete('floatingips', fp3['floatingip']['id'])
def test_floatingip_list_with_port_id(self):
with self.floatingip_with_assoc() as fip:
port_id = fip['floatingip']['port_id']
res = self._list('floatingips',
query_params="port_id=%s" % port_id)
self.assertEqual(len(res['floatingips']), 1)
res = self._list('floatingips', query_params="port_id=aaa")
self.assertEqual(len(res['floatingips']), 0)
def test_floatingip_list_with_pagination(self):
with contextlib.nested(self.subnet(cidr="10.0.0.0/24"),
self.subnet(cidr="11.0.0.0/24"),
self.subnet(cidr="12.0.0.0/24")
) as (s1, s2, s3):
network_id1 = s1['subnet']['network_id']
network_id2 = s2['subnet']['network_id']
network_id3 = s3['subnet']['network_id']
self._set_net_external(network_id1)
self._set_net_external(network_id2)
self._set_net_external(network_id3)
fp1 = self._make_floatingip(self.fmt, network_id1)
fp2 = self._make_floatingip(self.fmt, network_id2)
fp3 = self._make_floatingip(self.fmt, network_id3)
try:
self._test_list_with_pagination(
'floatingip', (fp1, fp2, fp3),
('floating_ip_address', 'asc'), 2, 2)
finally:
self._delete('floatingips', fp1['floatingip']['id'])
self._delete('floatingips', fp2['floatingip']['id'])
self._delete('floatingips', fp3['floatingip']['id'])
def test_floatingip_list_with_pagination_reverse(self):
with contextlib.nested(self.subnet(cidr="10.0.0.0/24"),
self.subnet(cidr="11.0.0.0/24"),
self.subnet(cidr="12.0.0.0/24")
) as (s1, s2, s3):
network_id1 = s1['subnet']['network_id']
network_id2 = s2['subnet']['network_id']
network_id3 = s3['subnet']['network_id']
self._set_net_external(network_id1)
self._set_net_external(network_id2)
self._set_net_external(network_id3)
fp1 = self._make_floatingip(self.fmt, network_id1)
fp2 = self._make_floatingip(self.fmt, network_id2)
fp3 = self._make_floatingip(self.fmt, network_id3)
try:
self._test_list_with_pagination_reverse(
'floatingip', (fp1, fp2, fp3),
('floating_ip_address', 'asc'), 2, 2)
finally:
self._delete('floatingips', fp1['floatingip']['id'])
self._delete('floatingips', fp2['floatingip']['id'])
self._delete('floatingips', fp3['floatingip']['id'])
def test_floatingip_delete_router_intf_with_subnet_id_returns_409(self):
found = False
with self.floatingip_with_assoc():
for p in self._list('ports')['ports']:
if p['device_owner'] == 'network:router_interface':
subnet_id = p['fixed_ips'][0]['subnet_id']
router_id = p['device_id']
self._router_interface_action(
'remove', router_id, subnet_id, None,
expected_code=exc.HTTPConflict.code)
found = True
break
self.assertTrue(found)
def test_floatingip_delete_router_intf_with_port_id_returns_409(self):
found = False
with self.floatingip_with_assoc():
for p in self._list('ports')['ports']:
if p['device_owner'] == 'network:router_interface':
router_id = p['device_id']
self._router_interface_action(
'remove', router_id, None, p['id'],
expected_code=exc.HTTPConflict.code)
found = True
break
self.assertTrue(found)
def test_list_nets_external(self):
with self.network() as n1:
self._set_net_external(n1['network']['id'])
with self.network():
body = self._list('networks')
self.assertEqual(len(body['networks']), 2)
body = self._list('networks',
query_params="%s=True" % l3.EXTERNAL)
self.assertEqual(len(body['networks']), 1)
body = self._list('networks',
query_params="%s=False" % l3.EXTERNAL)
self.assertEqual(len(body['networks']), 1)
def test_list_nets_external_pagination(self):
if self._skip_native_pagination:
self.skipTest("Skip test for not implemented pagination feature")
with contextlib.nested(self.network(name='net1'),
self.network(name='net3')) as (n1, n3):
self._set_net_external(n1['network']['id'])
self._set_net_external(n3['network']['id'])
with self.network(name='net2') as n2:
self._test_list_with_pagination(
'network', (n1, n3), ('name', 'asc'), 1, 3,
query_params='router:external=True')
self._test_list_with_pagination(
'network', (n2, ), ('name', 'asc'), 1, 2,
query_params='router:external=False')
def test_get_network_succeeds_without_filter(self):
plugin = QuantumManager.get_plugin()
ctx = context.Context(None, None, is_admin=True)
result = plugin.get_networks(ctx, filters=None)
self.assertEqual(result, [])
def test_network_filter_hook_admin_context(self):
plugin = QuantumManager.get_plugin()
ctx = context.Context(None, None, is_admin=True)
model = models_v2.Network
conditions = plugin._network_filter_hook(ctx, model, [])
self.assertEqual(conditions, [])
def test_network_filter_hook_nonadmin_context(self):
plugin = QuantumManager.get_plugin()
ctx = context.Context('edinson', 'cavani')
model = models_v2.Network
txt = "externalnetworks.network_id IS NOT NULL"
conditions = plugin._network_filter_hook(ctx, model, [])
self.assertEqual(conditions.__str__(), txt)
# Try to concatenate confitions
conditions = plugin._network_filter_hook(ctx, model, conditions)
self.assertEqual(conditions.__str__(), "%s OR %s" % (txt, txt))
def test_create_port_external_network_non_admin_fails(self):
with self.network(router__external=True) as ext_net:
with self.subnet(network=ext_net) as ext_subnet:
with testtools.ExpectedException(
exc.HTTPClientError) as ctx_manager:
with self.port(subnet=ext_subnet,
set_context='True',
tenant_id='noadmin'):
pass
self.assertEqual(ctx_manager.exception.code, 403)
def test_create_port_external_network_admin_suceeds(self):
with self.network(router__external=True) as ext_net:
with self.subnet(network=ext_net) as ext_subnet:
with self.port(subnet=ext_subnet) as port:
self.assertEqual(port['port']['network_id'],
ext_net['network']['id'])
def test_create_external_network_non_admin_fails(self):
with testtools.ExpectedException(exc.HTTPClientError) as ctx_manager:
with self.network(router__external=True,
set_context='True',
tenant_id='noadmin'):
pass
self.assertEqual(ctx_manager.exception.code, 403)
def test_create_external_network_admin_suceeds(self):
with self.network(router__external=True) as ext_net:
self.assertEqual(ext_net['network'][l3.EXTERNAL],
True)
def _test_notify_op_agent(self, target_func, *args):
l3_rpc_agent_api_str = (
'quantum.api.rpc.agentnotifiers.l3_rpc_agent_api.L3AgentNotifyAPI')
oldNotify = l3_rpc_agent_api.L3AgentNotify
try:
with mock.patch(l3_rpc_agent_api_str) as notifyApi:
l3_rpc_agent_api.L3AgentNotify = notifyApi
kargs = [item for item in args]
kargs.append(notifyApi)
target_func(*kargs)
except:
l3_rpc_agent_api.L3AgentNotify = oldNotify
raise
else:
l3_rpc_agent_api.L3AgentNotify = oldNotify
def _test_router_gateway_op_agent(self, notifyApi):
with self.router() as r:
with self.subnet() as s:
self._set_net_external(s['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s['subnet']['network_id'])
self._remove_external_gateway_from_router(
r['router']['id'],
s['subnet']['network_id'])
self.assertEqual(
2, notifyApi.routers_updated.call_count)
def test_router_gateway_op_agent(self):
self._test_notify_op_agent(self._test_router_gateway_op_agent)
def _test_interfaces_op_agent(self, r, notifyApi):
with self.port(no_delete=True) as p:
self._router_interface_action('add',
r['router']['id'],
None,
p['port']['id'])
# clean-up
self._router_interface_action('remove',
r['router']['id'],
None,
p['port']['id'])
self.assertEqual(2, notifyApi.routers_updated.call_count)
def test_interfaces_op_agent(self):
with self.router() as r:
self._test_notify_op_agent(
self._test_interfaces_op_agent, r)
def _test_floatingips_op_agent(self, notifyApi):
with self.floatingip_with_assoc():
pass
# add gateway, add interface, associate, deletion of floatingip,
# delete gateway, delete interface
self.assertEqual(6, notifyApi.routers_updated.call_count)
def test_floatingips_op_agent(self):
self._test_notify_op_agent(self._test_floatingips_op_agent)
def test_l3_agent_routers_query_interfaces(self):
with self.router() as r:
with self.port(no_delete=True) as p:
self._router_interface_action('add',
r['router']['id'],
None,
p['port']['id'])
plugin = TestL3NatPlugin()
routers = plugin.get_sync_data(context.get_admin_context(),
None)
self.assertEqual(1, len(routers))
interfaces = routers[0][l3_constants.INTERFACE_KEY]
self.assertEqual(1, len(interfaces))
subnet_id = interfaces[0]['subnet']['id']
wanted_subnetid = p['port']['fixed_ips'][0]['subnet_id']
self.assertEqual(wanted_subnetid, subnet_id)
# clean-up
self._router_interface_action('remove',
r['router']['id'],
None,
p['port']['id'])
def test_l3_agent_routers_query_ignore_interfaces_with_moreThanOneIp(self):
with self.router() as r:
with self.subnet(cidr='9.0.1.0/24') as subnet:
with self.port(subnet=subnet,
no_delete=True,
fixed_ips=[{'ip_address': '9.0.1.3'}]) as p:
self._router_interface_action('add',
r['router']['id'],
None,
p['port']['id'])
port = {'port': {'fixed_ips':
[{'ip_address': '9.0.1.4',
'subnet_id': subnet['subnet']['id']},
{'ip_address': '9.0.1.5',
'subnet_id': subnet['subnet']['id']}]}}
plugin = TestL3NatPlugin()
ctx = context.get_admin_context()
plugin.update_port(ctx, p['port']['id'], port)
routers = plugin.get_sync_data(ctx, None)
self.assertEqual(1, len(routers))
interfaces = routers[0].get(l3_constants.INTERFACE_KEY, [])
self.assertEqual(1, len(interfaces))
# clean-up
self._router_interface_action('remove',
r['router']['id'],
None,
p['port']['id'])
def test_l3_agent_routers_query_gateway(self):
with self.router() as r:
with self.subnet() as s:
self._set_net_external(s['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s['subnet']['network_id'])
plugin = TestL3NatPlugin()
routers = plugin.get_sync_data(context.get_admin_context(),
[r['router']['id']])
self.assertEqual(1, len(routers))
gw_port = routers[0]['gw_port']
self.assertEqual(s['subnet']['id'], gw_port['subnet']['id'])
self._remove_external_gateway_from_router(
r['router']['id'],
s['subnet']['network_id'])
def test_l3_agent_routers_query_floatingips(self):
with self.floatingip_with_assoc() as fip:
plugin = TestL3NatPlugin()
routers = plugin.get_sync_data(context.get_admin_context(),
[fip['floatingip']['router_id']])
self.assertEqual(1, len(routers))
floatingips = routers[0][l3_constants.FLOATINGIP_KEY]
self.assertEqual(1, len(floatingips))
self.assertEqual(floatingips[0]['id'],
fip['floatingip']['id'])
self.assertEqual(floatingips[0]['port_id'],
fip['floatingip']['port_id'])
self.assertTrue(floatingips[0]['fixed_ip_address'] is not None)
self.assertTrue(floatingips[0]['router_id'] is not None)
def test_router_delete_subnet_inuse_returns_409(self):
with self.router() as r:
with self.subnet() as s:
self._router_interface_action('add',
r['router']['id'],
s['subnet']['id'],
None)
# subnet cannot be delete as it's attached to a router
self._delete('subnets', s['subnet']['id'],
expected_code=exc.HTTPConflict.code)
# remove interface so test can exit without errors
self._router_interface_action('remove',
r['router']['id'],
s['subnet']['id'],
None)
class L3NatDBTestCaseXML(L3NatDBTestCase):
fmt = 'xml'
| wallnerryan/quantum_migrate | quantum/tests/unit/test_l3_plugin.py | Python | apache-2.0 | 80,012 | 0.000087 |
from gevent.core import loop
count = 0
def incr():
global count
count += 1
loop = loop()
loop.run_callback(incr)
loop.run()
assert count == 1, count
| mgadi/naemonbox | sources/psdash/gevent-1.0.1/greentest/test__loop_callback.py | Python | gpl-2.0 | 161 | 0.006211 |
#!/usr/bin/env python
# Copyright 2021 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Analytics Data API sample application demonstrating the usage of
property quota metadata.
See https://developers.google.com/analytics/devguides/reporting/data/v1/rest/v1beta/properties/runReport#body.request_body.FIELDS.return_property_quota
for more information.
"""
# [START analyticsdata_run_report_with_property_quota]
from google.analytics.data_v1beta import BetaAnalyticsDataClient
from google.analytics.data_v1beta.types import DateRange
from google.analytics.data_v1beta.types import Dimension
from google.analytics.data_v1beta.types import Metric
from google.analytics.data_v1beta.types import RunReportRequest
def run_sample():
"""Runs the sample."""
# TODO(developer): Replace this variable with your Google Analytics 4
# property ID before running the sample.
property_id = "YOUR-GA4-PROPERTY-ID"
run_report_with_property_quota(property_id)
def run_report_with_property_quota(property_id="YOUR-GA4-PROPERTY-ID"):
"""Runs a report and prints property quota information."""
client = BetaAnalyticsDataClient()
request = RunReportRequest(
property=f"properties/{property_id}",
return_property_quota=True,
dimensions=[Dimension(name="country")],
metrics=[Metric(name="activeUsers")],
date_ranges=[DateRange(start_date="7daysAgo", end_date="today")],
)
response = client.run_report(request)
# [START analyticsdata_run_report_with_property_quota_print_response]
if response.property_quota:
print(
f"Tokens per day quota consumed: {response.property_quota.tokens_per_day.consumed}, "
f"remaining: {response.property_quota.tokens_per_day.remaining}."
)
print(
f"Tokens per hour quota consumed: {response.property_quota.tokens_per_hour.consumed}, "
f"remaining: {response.property_quota.tokens_per_hour.remaining}."
)
print(
f"Concurrent requests quota consumed: {response.property_quota.concurrent_requests.consumed}, "
f"remaining: {response.property_quota.concurrent_requests.remaining}."
)
print(
f"Server errors per project per hour quota consumed: {response.property_quota.server_errors_per_project_per_hour.consumed}, "
f"remaining: {response.property_quota.server_errors_per_project_per_hour.remaining}."
)
print(
f"Potentially thresholded requests per hour quota consumed: {response.property_quota.potentially_thresholded_requests_per_hour.consumed}, "
f"remaining: {response.property_quota.potentially_thresholded_requests_per_hour.remaining}."
)
# [END analyticsdata_run_report_with_property_quota_print_response]
# [END analyticsdata_run_report_with_property_quota]
if __name__ == "__main__":
run_sample()
| googleapis/python-analytics-data | samples/snippets/run_report_with_property_quota.py | Python | apache-2.0 | 3,462 | 0.0026 |
__author__ = 'Xsank'
from handlers import IndexHandler
from handlers import WSHandler
handlers=[
(r"/",IndexHandler),
(r"/ws",WSHandler),
] | hanmichael/Pyrumpetroll | urls.py | Python | mit | 153 | 0.026144 |
# -*- coding: utf-8 -*-
'''
Nose wrapper that runs the test files using robot and then compare the
screenshots using perceptualdiff.
'''
import glob
import os
import subprocess
import string
import sys
import unittest
from robot.running.builder import TestSuiteBuilder
class TestRobotS2LScreenshot(unittest.TestCase):
def robot_launcher(self, test_file, test_case):
# run robot test
p = subprocess.Popen(['pybot', '-t', test_case, test_file],
close_fds=True,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
sys.stdout.write(p.communicate()[0])
self.assertEqual(
p.returncode, 0,
msg='Robot test case failed to complete successfully: %r' % p.returncode)
# diff screenshots with baseline
curr_img = test_case + '.png'
base_img = 'baseline-' + curr_img
diff_img = 'diff-' + curr_img
d = subprocess.Popen(['perceptualdiff', base_img, curr_img,
'-output', diff_img],
close_fds=True,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
sys.stdout.write(d.communicate()[0])
self.assertEqual(
d.returncode, 0,
msg='Screenshots are different: %r' % d.returncode)
robot_files = glob.glob(os.path.join(os.path.dirname(__file__), 'test*.txt'))
for robot_file in robot_files:
robot_suite = TestSuiteBuilder().build(robot_file)
for robot_case in robot_suite.tests:
def maker(tf, tc):
def test_run(self):
self.robot_launcher(tf, tc)
return test_run
test_name = string.replace(robot_suite.name + '.' + robot_case.name, ' ', '_')
setattr(TestRobotS2LScreenshot, test_name, maker(robot_file, robot_case.name))
| canaryhealth/RobotS2LScreenshot | test/test_robots2lscreenshot.py | Python | mit | 1,787 | 0.007834 |
# -*- coding: utf-8 -*-
import pytest
from cfme.configure import about
from utils import version
@pytest.mark.tier(3)
@pytest.mark.sauce
def test_version():
"""Check version presented in UI against version retrieved directly from the machine.
Version retrieved from appliance is in this format: 1.2.3.4
Version in the UI is always: 1.2.3.4.20140505xyzblabla
So we check whether the UI version starts with SSH version
"""
ssh_version = str(version.current_version())
ui_version = about.get_detail(about.VERSION)
assert ui_version.startswith(ssh_version), "UI: {}, SSH: {}".format(ui_version, ssh_version)
| dajohnso/cfme_tests | cfme/tests/configure/test_version.py | Python | gpl-2.0 | 641 | 0.00312 |
# -*- coding: utf-8 -*-
import os, io
from setuptools import setup
from SVNOnline.SVNOnline import __version__
here = os.path.abspath(os.path.dirname(__file__))
README = io.open(os.path.join(here, 'README.rst'), encoding='UTF-8').read()
CHANGES = io.open(os.path.join(here, 'CHANGES.rst'), encoding='UTF-8').read()
setup(name='SVNOnline',
version=__version__,
description='A svn online client.',
keywords=('svn', 'svn client', 'svn online'),
long_description=README + '\n\n\n' + CHANGES,
url='https://github.com/sintrb/SVNOnline',
classifiers=[
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
],
author='sintrb',
author_email='[email protected]',
license='Apache',
packages=['SVNOnline'],
scripts=['SVNOnline/SVNOnline', 'SVNOnline/SVNOnline.bat'],
include_package_data=True,
install_requires=['svn==0.3.36'],
zip_safe=False)
| sintrb/SVNOnline | setup.py | Python | apache-2.0 | 1,012 | 0.000988 |
#!/usr/bin/env python
import sys
def parse_map_file(path):
map_grid = []
# Create a two-dimensional list based on the input data
with open(path, 'r') as f:
width, height = map(int, f.readline().split())
for line in f:
row = map(int, line.split())
map_grid.append(row)
# Input checking
if height < 1 or width < 1:
raise ValueError('grid height and width should be >= 1')
elif height != len(map_grid) or width != len(map_grid[0]):
raise ValueError('actual map does not match declared map dimensions')
return width, height, map_grid
def make_grid(width, height, initial_value):
return [width*[initial_value] for i in range(height)]
def get_length_and_elevation(x, y, map_grid, path_lengths, final_elevations):
path_length = path_lengths[y][x]
if path_length != -1:
return path_length, final_elevations[y][x]
current_elevation = map_grid[y][x]
longest_path = 0
lowest_elevation = current_elevation
neighbors = [
(x, y - 1), # up
(x, y + 1), # down
(x - 1, y), # left
(x + 1, y), # right
]
for xn, yn in neighbors:
try:
neighbor = map_grid[yn][xn]
except IndexError:
continue
if neighbor < current_elevation:
path_length, final_elevation = get_length_and_elevation(xn, yn, map_grid, path_lengths, final_elevations)
if path_length > longest_path or (path_length == longest_path and final_elevation < lowest_elevation):
longest_path = path_length
lowest_elevation = final_elevation
path_length = longest_path + 1
path_lengths[y][x] = path_length
final_elevations[y][x] = lowest_elevation
return path_length, lowest_elevation
def main():
if len(sys.argv) != 2:
sys.exit('Usage: {} <map file>'.format(sys.argv[0]))
print 'Parsing map data...'
try:
width, height, map_grid = parse_map_file(sys.argv[1])
except IOError as e:
sys.exit('Unable to read map file: {}'.format(e))
except ValueError as e:
sys.exit('Invalid map file: {}: {}'.format(sys.argv[1], e))
# Initialize corresponding grids for path lengths and final elevations
path_lengths = make_grid(width, height, -1)
final_elevations = make_grid(width, height, -1)
print 'Finding the best path...'
longest_path = -1
steepest_drop = -1
for y, row in enumerate(map_grid):
for x, initial_elevation in enumerate(row):
path_length, final_elevation = get_length_and_elevation(x, y, map_grid, path_lengths, final_elevations)
drop = initial_elevation - final_elevation
if path_length > longest_path or (path_length == longest_path and drop > steepest_drop):
longest_path = path_length
steepest_drop = drop
print '\nlength = {}, drop = {}\n'.format(longest_path, steepest_drop)
if __name__ == '__main__':
main()
| baudm/sg-ski | sg-ski.py | Python | mit | 3,012 | 0.002988 |
#
# The Python Imaging Library
# $Id: ImagePath.py,v 1.2 2007/06/17 14:12:15 robertoconnor Exp $
#
# path interface
#
# History:
# 1996-11-04 fl Created
# 2002-04-14 fl Added documentation stub class
#
# Copyright (c) Secret Labs AB 1997.
# Copyright (c) Fredrik Lundh 1996.
#
# See the README file for information on usage and redistribution.
#
import Image
##
# Path wrapper.
class Path:
##
# Creates a path object.
#
# @param xy Sequence. The sequence can contain 2-tuples [(x, y), ...]
# or a flat list of numbers [x, y, ...].
def __init__(self, xy):
pass
##
# Compacts the path, by removing points that are close to each
# other. This method modifies the path in place.
def compact(self, distance=2):
pass
##
# Gets the bounding box.
def getbbox(self):
pass
##
# Maps the path through a function.
def map(self, function):
pass
##
# Converts the path to Python list.
#
# @param flat By default, this function returns a list of 2-tuples
# [(x, y), ...]. If this argument is true, it returns a flat
# list [x, y, ...] instead.
# @return A list of coordinates.
def tolist(self, flat=0):
pass
##
# Transforms the path.
def transform(self, matrix):
pass
# override with C implementation
Path = Image.core.path
| arpruss/plucker | plucker_desktop/installer/osx/application_bundle_files/Resources/parser/python/vm/PIL/ImagePath.py | Python | gpl-2.0 | 1,472 | 0.000679 |
from itertools import chain
import jellyfish # type: ignore
import pytest # type: ignore
import pytz
import us
# attribute
def test_attribute():
for state in us.STATES_AND_TERRITORIES:
assert state == getattr(us.states, state.abbr)
def test_valid_timezones():
for state in us.STATES_AND_TERRITORIES:
if state.capital:
assert pytz.timezone(state.capital_tz)
for tz in state.time_zones:
assert pytz.timezone(tz)
# During migration from SQLite to Python classes, a duplicate
# time zone had been found
assert len(state.time_zones) == len(set(state.time_zones))
# maryland lookup
def test_fips():
assert us.states.lookup("24") == us.states.MD
assert us.states.lookup("51") != us.states.MD
def test_abbr():
assert us.states.lookup("MD") == us.states.MD
assert us.states.lookup("md") == us.states.MD
assert us.states.lookup("VA") != us.states.MD
assert us.states.lookup("va") != us.states.MD
def test_name():
assert us.states.lookup("Maryland") == us.states.MD
assert us.states.lookup("maryland") == us.states.MD
assert us.states.lookup("Maryland", field="name") == us.states.MD
assert us.states.lookup("maryland", field="name") is None
assert us.states.lookup("murryland") == us.states.MD
assert us.states.lookup("Virginia") != us.states.MD
# lookups
def test_abbr_lookup():
for state in us.STATES:
assert us.states.lookup(state.abbr) == state
def test_fips_lookup():
for state in us.STATES:
assert us.states.lookup(state.fips) == state
def test_name_lookup():
for state in us.STATES:
assert us.states.lookup(state.name) == state
def test_obsolete_lookup():
for state in us.OBSOLETE:
assert us.states.lookup(state.name) is None
# test metaphone
def test_jellyfish_metaphone():
for state in chain(us.STATES_AND_TERRITORIES, us.OBSOLETE):
assert state.name_metaphone == jellyfish.metaphone(state.name)
# mappings
def test_mapping():
states = us.STATES[:5]
assert us.states.mapping("abbr", "fips", states=states) == dict(
(s.abbr, s.fips) for s in states
)
def test_obsolete_mapping():
mapping = us.states.mapping("abbr", "fips")
for state in us.states.OBSOLETE:
assert state.abbr not in mapping
def test_custom_mapping():
mapping = us.states.mapping("abbr", "fips", states=[us.states.DC, us.states.MD])
assert len(mapping) == 2
assert "DC" in mapping
assert "MD" in mapping
# known bugs
def test_kentucky_uppercase():
assert us.states.lookup("kentucky") == us.states.KY
assert us.states.lookup("KENTUCKY") == us.states.KY
def test_wayoming():
assert us.states.lookup("Wyoming") == us.states.WY
assert us.states.lookup("Wayoming") is None
def test_dc():
assert us.states.DC not in us.STATES
assert us.states.lookup("DC") == us.states.DC
assert us.states.lookup("District of Columbia") == us.states.DC
assert "DC" in us.states.mapping("abbr", "name")
# shapefiles
@pytest.mark.skip
def test_head():
import requests
for state in us.STATES_AND_TERRITORIES:
for url in state.shapefile_urls().values():
resp = requests.head(url)
assert resp.status_code == 200
# counts
def test_obsolete():
assert len(us.OBSOLETE) == 3
def test_states():
assert len(us.STATES) == 50
def test_territories():
assert len(us.TERRITORIES) == 5
def test_contiguous():
# Lower 48
assert len(us.STATES_CONTIGUOUS) == 48
def test_continental():
# Lower 48 + Alaska
assert len(us.STATES_CONTINENTAL) == 49
def test_dc():
assert us.states.DC not in us.STATES
| unitedstates/python-us | us/tests/test_us.py | Python | bsd-3-clause | 3,729 | 0.000268 |
import unittest
import os
import sys
import shutil
import string
from optparse import OptionParser
import ConfigParser
import logging
sys.dont_write_bytecode = True
sys.path.insert(0, os.path.abspath(".."))
sys.path.insert(0, os.path.abspath(os.path.join("..", "coshsh")))
import coshsh
from coshsh.generator import Generator
from coshsh.datasource import Datasource
from coshsh.application import Application
from coshsh.util import setup_logging
class CoshshTest(unittest.TestCase):
def print_header(self):
print "#" * 80 + "\n" + "#" + " " * 78 + "#"
print "#" + string.center(self.id(), 78) + "#"
print "#" + " " * 78 + "#\n" + "#" * 80 + "\n"
def setUp(self):
shutil.rmtree("./var/objects/test1", True)
os.makedirs("./var/objects/test1")
shutil.rmtree("./var/log", True)
os.makedirs("./var/log")
self.config = ConfigParser.ConfigParser()
self.config.read('etc/coshsh.cfg')
self.generator = coshsh.generator.Generator()
setup_logging(logfile="zishsh.log", logdir="./var/log", scrnloglevel=logging.DEBUG, txtloglevel=logging.INFO)
# default, wie im coshsh-cook
setup_logging(logdir="./var/log", scrnloglevel=logging.INFO)
def tearDown(self):
#shutil.rmtree("./var/objects/test1", True)
print
def test_log(self):
logger = logging.getLogger('zishsh')
print logger.__dict__
print
for h in logger.handlers:
print h.__dict__
print
logger.warn("i warn you")
logger.info("i inform you")
logger.debug("i spam you")
self.assert_(os.path.exists("./var/log/zishsh.log"))
with open('./var/log/zishsh.log') as x: zishshlog = x.read()
self.assert_("WARNING" in zishshlog)
self.assert_("INFO" in zishshlog)
self.assert_("DEBUG" not in zishshlog)
def test_write(self):
# innendrin im Code wird logging.getLogger('coshsh') aufgerufen
self.print_header()
self.generator.add_recipe(name='test4', **dict(self.config.items('recipe_TEST4')))
self.config.set("datasource_SIMPLESAMPLE", "name", "simplesample")
cfg = self.config.items("datasource_SIMPLESAMPLE")
self.generator.recipes['test4'].add_datasource(**dict(cfg))
# remove target dir / create empty
self.generator.recipes['test4'].count_before_objects()
self.generator.recipes['test4'].cleanup_target_dir()
self.generator.recipes['test4'].prepare_target_dir()
# check target
# read the datasources
self.generator.recipes['test4'].collect()
self.generator.recipes['test4'].assemble()
# for each host, application get the corresponding template files
# get the template files and cache them in a struct owned by the recipe
# resolve the templates and attach the result as config_files to host/app
self.generator.recipes['test4'].render()
self.assert_(hasattr(self.generator.recipes['test4'].objects['hosts']['test_host_0'], 'config_files'))
self.assert_('host.cfg' in self.generator.recipes['test4'].objects['hosts']['test_host_0'].config_files['nagios'])
# write hosts/apps to the filesystem
self.generator.recipes['test4'].output()
self.assert_(os.path.exists("var/objects/test1/dynamic/hosts"))
self.assert_(os.path.exists("var/objects/test1/dynamic/hosts/test_host_0"))
self.assert_(os.path.exists("var/objects/test1/dynamic/hosts/test_host_0/os_linux_default.cfg"))
self.assert_(os.path.exists("var/objects/test1/dynamic/hosts/test_host_0/os_windows_default.cfg"))
os_windows_default_cfg = open("var/objects/test1/dynamic/hosts/test_host_0/os_windows_default.cfg").read()
self.assert_('os_windows_default_check_unittest' in os_windows_default_cfg)
self.assert_(os.path.exists("./var/log/coshsh.log"))
with open('./var/log/coshsh.log') as x: coshshlog = x.read()
self.assert_("test_host_0" in coshshlog)
if __name__ == '__main__':
unittest.main()
| lausser/coshsh | tests/test_logging.py | Python | agpl-3.0 | 4,032 | 0.010665 |
# jhbuild - a tool to ease building collections of source packages
# Copyright (C) 2001-2006 James Henstridge
# Copyright (C) 2008 Frederic Peters
#
# bot.py: buildbot control commands
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
#
# Some methods are derived from Buildbot own methods (when it was not possible
# to override just some parts of them). Buildbot is also licensed under the
# GNU General Public License.
import os
import signal
import sys
import urllib
from optparse import make_option
import socket
import __builtin__
import csv
import logging
try:
import elementtree.ElementTree as ET
except ImportError:
import xml.etree.ElementTree as ET
import jhbuild.moduleset
import jhbuild.frontends
from jhbuild.commands import Command, register_command
from jhbuild.commands.base import cmd_build
from jhbuild.config import addpath
from jhbuild.errors import UsageError, FatalError, CommandError
try:
import buildbot
except ImportError:
buildbot = None
class cmd_bot(Command):
doc = N_('Control buildbot')
name = 'bot'
usage_args = N_('[ options ... ]')
def __init__(self):
Command.__init__(self, [
make_option('--setup',
action='store_true', dest='setup', default=False,
help=_('setup a buildbot environment')),
make_option('--start',
action='store_true', dest='start', default=False,
help=_('start a buildbot slave server')),
make_option('--stop',
action='store_true', dest='stop', default=False,
help=_('stop a buildbot slave server')),
make_option('--start-server',
action='store_true', dest='start_server', default=False,
help=_('start a buildbot master server')),
make_option('--reload-server-config',
action='store_true', dest='reload_server_config', default=False,
help=_('reload a buildbot master server configuration')),
make_option('--stop-server',
action='store_true', dest='stop_server', default=False,
help=_('stop a buildbot master server')),
make_option('--daemon',
action='store_true', dest='daemon', default=False,
help=_('start as daemon')),
make_option('--pidfile', metavar='PIDFILE',
action='store', dest='pidfile', default=None,
help=_('PID file location')),
make_option('--logfile', metavar='LOGFILE',
action='store', dest='logfile', default=None,
help=_('log file location')),
make_option('--slaves-dir', metavar='SLAVESDIR',
action='store', dest='slaves_dir', default=None,
help=_('directory with slave files (only with --start-server)')),
make_option('--buildbot-dir', metavar='BUILDBOTDIR',
action='store', dest='buildbot_dir', default=None,
help=_('directory with buildbot work files (only with --start-server)')),
make_option('--mastercfg', metavar='CFGFILE',
action='store', dest='mastercfgfile', default=None,
help=_('master cfg file location (only with --start-server)')),
make_option('--step',
action='store_true', dest='step', default=False,
help=_('exec a buildbot step (internal use only)')),
])
def run(self, config, options, args, help=None):
if options.setup:
return self.setup(config)
global buildbot
if buildbot is None:
import site
pythonversion = 'python' + str(sys.version_info[0]) + '.' + str(sys.version_info[1])
pythonpath = os.path.join(config.prefix, 'lib', pythonversion, 'site-packages')
site.addsitedir(pythonpath)
try:
import buildbot
except ImportError:
raise FatalError(_('buildbot and twisted not found, run jhbuild bot --setup'))
# make jhbuild config file accessible to buildbot files
# (master.cfg , steps.py, etc.)
__builtin__.__dict__['jhbuild_config'] = config
daemonize = False
pidfile = None
logfile = None
slaves_dir = config.jhbuildbot_slaves_dir
mastercfgfile = config.jhbuildbot_mastercfg
buildbot_dir = config.jhbuildbot_dir
if options.daemon:
daemonize = True
if options.pidfile:
pidfile = options.pidfile
if options.logfile:
logfile = options.logfile
if options.slaves_dir:
slaves_dir = options.slaves_dir
if options.mastercfgfile:
mastercfgfile = options.mastercfgfile
if options.buildbot_dir:
buildbot_dir = os.path.abspath(options.buildbot_dir)
if options.start:
return self.start(config, daemonize, pidfile, logfile)
if options.step:
os.environ['JHBUILDRC'] = config.filename
os.environ['LC_ALL'] = 'C'
os.environ['LANGUAGE'] = 'C'
os.environ['LANG'] = 'C'
__builtin__.__dict__['_'] = lambda x: x
config.interact = False
config.nonetwork = True
os.environ['TERM'] = 'dumb'
if args[0] in ('update', 'build', 'check', 'clean'):
module_set = jhbuild.moduleset.load(config)
buildscript = jhbuild.frontends.get_buildscript(config,
[module_set.get_module(x, ignore_case=True) for x in args[1:]],
module_set=module_set)
phases = None
if args[0] == 'update':
config.nonetwork = False
phases = ['checkout']
elif args[0] == 'build':
config.alwaysautogen = True
# make check will be run in another step
config.makecheck = False
config.build_targets = ['install']
elif args[0] == 'check':
config.makecheck = True
config.build_targets = ['check']
phases = ['check']
elif args[0] == 'clean':
phases = ['clean']
rc = buildscript.build(phases=phases)
else:
command = args[0]
rc = jhbuild.commands.run(command, config, args[1:], help=None)
sys.exit(rc)
if options.start_server:
return self.start_server(config, daemonize, pidfile, logfile,
slaves_dir, mastercfgfile, buildbot_dir)
if options.stop or options.stop_server:
return self.stop(config, pidfile)
if options.reload_server_config:
return self.reload_server_config(config, pidfile)
def setup(self, config):
module_set = jhbuild.moduleset.load(config, 'buildbot')
module_list = module_set.get_module_list('all', config.skip)
build = jhbuild.frontends.get_buildscript(config, module_list, module_set=module_set)
return build.build()
def start(self, config, daemonize, pidfile, logfile):
from twisted.application import service
application = service.Application('buildslave')
if ':' in config.jhbuildbot_master:
master_host, master_port = config.jhbuildbot_master.split(':')
master_port = int(master_port)
else:
master_host, master_port = config.jhbuildbot_master, 9070
slave_name = config.jhbuildbot_slavename or socket.gethostname()
keepalive = 600
usepty = 0
umask = None
basedir = os.path.join(config.checkoutroot, 'jhbuildbot')
if not os.path.exists(os.path.join(basedir, 'builddir')):
os.makedirs(os.path.join(basedir, 'builddir'))
os.chdir(basedir)
from buildbot.slave.bot import BuildSlave
s = BuildSlave(master_host, master_port,
slave_name, config.jhbuildbot_password, basedir,
keepalive, usepty, umask=umask)
s.setServiceParent(application)
from twisted.scripts._twistd_unix import UnixApplicationRunner, ServerOptions
opts = ['--no_save']
if not daemonize:
opts.append('--nodaemon')
if pidfile:
opts.extend(['--pidfile', pidfile])
if logfile:
opts.extend(['--logfile', logfile])
options = ServerOptions()
options.parseOptions(opts)
class JhBuildbotApplicationRunner(UnixApplicationRunner):
application = None
def createOrGetApplication(self):
return self.application
JhBuildbotApplicationRunner.application = application
JhBuildbotApplicationRunner(options).run()
def start_server(self, config, daemonize, pidfile, logfile, slaves_dir,
mastercfgfile, buildbot_dir):
from twisted.scripts._twistd_unix import UnixApplicationRunner, ServerOptions
opts = ['--no_save']
if not daemonize:
opts.append('--nodaemon')
if pidfile:
opts.extend(['--pidfile', pidfile])
if pidfile:
opts.extend(['--logfile', logfile])
options = ServerOptions()
options.parseOptions(opts)
class JhBuildbotApplicationRunner(UnixApplicationRunner):
application = None
def createOrGetApplication(self):
return self.application
from twisted.application import service, strports
from buildbot.master import BuildMaster
application = service.Application('buildmaster')
from buildbot.buildslave import BuildSlave
from twisted.python import log
from twisted.internet import defer
from buildbot import interfaces
from buildbot.process.properties import Properties
class JhBuildSlave(BuildSlave):
contact_name = None
contact_email = None
url = None
distribution = None
architecture = None
version = None
max_builds = 2
scheduler = None
run_checks = True
run_coverage_report = False
run_clean_afterwards = False
def load_extra_configuration(self, slaves_dir):
from twisted.python import log
slave_xml_file = os.path.join(slaves_dir, self.slavename + '.xml')
if not os.path.exists(slave_xml_file):
log.msg(_('No description for slave %s.') % self.slavename)
return
try:
cfg = ET.parse(slave_xml_file)
except: # parse error
log.msg(_('Failed to parse slave config for %s.') % self.slavename)
return
for attribute in ('config/max_builds', 'config/missing_timeout',
'config/run_checks', 'config/run_coverage_report',
'config/run_clean_afterwards',
'config/scheduler',
'nightly_scheduler/minute',
'nightly_scheduler/hour',
'nightly_scheduler/dayOfMonth',
'nightly_scheduler/month',
'nightly_scheduler/dayOfWeek',
'info/contact_name', 'info/contact_email',
'info/url', 'info/distribution', 'info/architecture',
'info/version'):
attr_name = attribute.split('/')[-1]
try:
value = cfg.find(attribute).text
except AttributeError:
continue
if attr_name in ('max_builds', 'missing_timeout'): # int value
try:
value = int(value)
except ValueError:
continue
if attr_name in ('run_checks', 'run_coverage_report', 'run_clean_afterwards'):
value = (value == 'yes')
if attr_name in ('minute', 'hour', 'dayOfMonth', 'month', 'dayOfWeek'):
try:
value = int(value)
except ValueError:
value = '*'
setattr(self, attr_name, value)
if self.scheduler == 'nightly':
self.nightly_kwargs = {}
for attr_name in ('minute', 'hour', 'dayOfMonth', 'month', 'dayOfWeek'):
if hasattr(self, attr_name):
self.nightly_kwargs[attr_name] = getattr(self, attr_name)
class JhBuildMaster(BuildMaster):
jhbuild_config = config
def loadConfig(self, f):
# modified from parent method to get slaves, projects, change
# sources, schedulers, builders and web status ouf of
# master.cfg [it would have been cleaner if jhbuild didn't
# have to copy all that code.]
localDict = {'basedir': os.path.expanduser(self.basedir)}
try:
exec f in localDict
except:
log.msg("error while parsing config file")
raise
jhbuild_config.load()
try:
config = localDict['BuildmasterConfig']
except KeyError:
log.err("missing config dictionary")
log.err("config file must define BuildmasterConfig")
raise
known_keys = ("bots", "slaves",
"sources", "change_source",
"schedulers", "builders", "mergeRequests",
"slavePortnum", "debugPassword", "logCompressionLimit",
"manhole", "status", "projectName", "projectURL",
"buildbotURL", "properties", "prioritizeBuilders",
"eventHorizon", "buildCacheSize", "logHorizon", "buildHorizon",
"changeHorizon", "logMaxSize", "logMaxTailSize",
"logCompressionMethod",
)
for k in config.keys():
if k not in known_keys:
log.msg("unknown key '%s' defined in config dictionary" % k)
# the 'slaves' list is read from the 'slaves.csv' file in the
# current directory (unless instructed different from command line)
# it is a CSV file structured like this:
# slavename,password
config['slaves'] = []
slaves_csv_file = os.path.join(slaves_dir, 'slaves.csv')
if os.path.exists(slaves_csv_file):
for x in csv.reader(file(slaves_csv_file)):
if not x or x[0].startswith('#'):
continue
kw = {}
build_slave = JhBuildSlave(x[0], x[1])
build_slave.load_extra_configuration(slaves_dir)
config['slaves'].append(build_slave)
if len(config['slaves']) == 0:
log.msg('you must fill slaves.csv with slaves')
module_set = jhbuild.moduleset.load(self.jhbuild_config)
module_list = module_set.get_module_list(
self.jhbuild_config.modules,
self.jhbuild_config.skip,
include_afters=True)
config['projects'] = [x.name for x in module_list \
if not x.name.startswith('meta-')]
if self.jhbuild_config.jhbuildbot_svn_commits_box:
# trigger builds from mails to svn-commit-list
# (note Maildir must be correct, or everything will fail)
from jhbuild.buildbot.changes import GnomeMaildirSource
config['change_source'] = GnomeMaildirSource(
self.jhbuild_config.jhbuildbot_svn_commits_box,
modules=module_list,
prefix=None)
else:
# support injection (use 'buildbot sendchange')
from buildbot.changes.pb import PBChangeSource
config['change_source'] = PBChangeSource()
# Schedulers
from jhbuild.buildbot.scheduler import SerialScheduler, NightlySerialScheduler, OnCommitScheduler
config['schedulers'] = []
for slave in config['slaves']:
s = None
for project in config['projects']:
buildername = str('%s-%s' % (project, slave.slavename))
scheduler_kwargs = {}
if slave.scheduler == 'nightly':
scheduler_class = NightlySerialScheduler
scheduler_kwargs = slave.nightly_kwargs
else:
scheduler_class = SerialScheduler
s = scheduler_class(buildername, project, upstream=s,
builderNames=[buildername],
**scheduler_kwargs)
config['schedulers'].append(s)
if self.jhbuild_config.jhbuildbot_svn_commits_box:
# schedulers that will launch job when receiving
# change notifications
s2 = OnCommitScheduler('oc-' + buildername,
project, builderNames=[buildername])
config['schedulers'].append(s2)
# Builders
from jhbuild.buildbot.factory import JHBuildFactory
config['builders'] = []
for project in config['projects']:
for slave in config['slaves']:
f = JHBuildFactory(project, slave)
config['builders'].append({
'name' : "%s-%s" % (project, slave.slavename),
'slavename' : slave.slavename,
'builddir' : 'builddir/%s.%s' % (project, slave.slavename),
'factory' : f,
'category' : project
})
# Status targets
if not config.has_key('status'):
# let it be possible to define additional status in
# master.cfg
config['status'] = []
from jhbuild.buildbot.status.web import JHBuildWebStatus
config['status'].append(
JHBuildWebStatus(
self.jhbuild_config.moduleset,
config['projects'],
[x.slavename for x in config['slaves']],
http_port=8080, allowForce=True)
)
# remaining of the method is a straight copy from buildbot
# ...
try:
# required
schedulers = config['schedulers']
builders = config['builders']
slavePortnum = config['slavePortnum']
#slaves = config['slaves']
#change_source = config['change_source']
# optional
debugPassword = config.get('debugPassword')
manhole = config.get('manhole')
status = config.get('status', [])
projectName = config.get('projectName')
projectURL = config.get('projectURL')
buildbotURL = config.get('buildbotURL')
properties = config.get('properties', {})
buildCacheSize = config.get('buildCacheSize', None)
eventHorizon = config.get('eventHorizon', None)
logHorizon = config.get('logHorizon', None)
buildHorizon = config.get('buildHorizon', None)
logCompressionLimit = config.get('logCompressionLimit', 4*1024)
if logCompressionLimit is not None and not \
isinstance(logCompressionLimit, int):
raise ValueError("logCompressionLimit needs to be bool or int")
logCompressionMethod = config.get('logCompressionMethod', "bz2")
if logCompressionMethod not in ('bz2', 'gz'):
raise ValueError("logCompressionMethod needs to be 'bz2', or 'gz'")
logMaxSize = config.get('logMaxSize')
if logMaxSize is not None and not \
isinstance(logMaxSize, int):
raise ValueError("logMaxSize needs to be None or int")
logMaxTailSize = config.get('logMaxTailSize')
if logMaxTailSize is not None and not \
isinstance(logMaxTailSize, int):
raise ValueError("logMaxTailSize needs to be None or int")
mergeRequests = config.get('mergeRequests')
if mergeRequests is not None and not callable(mergeRequests):
raise ValueError("mergeRequests must be a callable")
prioritizeBuilders = config.get('prioritizeBuilders')
if prioritizeBuilders is not None and not callable(prioritizeBuilders):
raise ValueError("prioritizeBuilders must be callable")
changeHorizon = config.get("changeHorizon")
if changeHorizon is not None and not isinstance(changeHorizon, int):
raise ValueError("changeHorizon needs to be an int")
except KeyError, e:
log.msg("config dictionary is missing a required parameter")
log.msg("leaving old configuration in place")
raise
#if "bots" in config:
# raise KeyError("c['bots'] is no longer accepted")
slaves = config.get('slaves', [])
if "bots" in config:
m = ("c['bots'] is deprecated as of 0.7.6 and will be "
"removed by 0.8.0 . Please use c['slaves'] instead.")
log.msg(m)
warnings.warn(m, DeprecationWarning)
for name, passwd in config['bots']:
slaves.append(JhBuildSlave(name, passwd))
if "bots" not in config and "slaves" not in config:
log.msg("config dictionary must have either 'bots' or 'slaves'")
log.msg("leaving old configuration in place")
raise KeyError("must have either 'bots' or 'slaves'")
#if "sources" in config:
# raise KeyError("c['sources'] is no longer accepted")
if changeHorizon is not None:
self.change_svc.changeHorizon = changeHorizon
change_source = config.get('change_source', [])
if isinstance(change_source, (list, tuple)):
change_sources = change_source
else:
change_sources = [change_source]
if "sources" in config:
m = ("c['sources'] is deprecated as of 0.7.6 and will be "
"removed by 0.8.0 . Please use c['change_source'] instead.")
log.msg(m)
warnings.warn(m, DeprecationWarning)
for s in config['sources']:
change_sources.append(s)
# do some validation first
for s in slaves:
assert interfaces.IBuildSlave.providedBy(s)
if s.slavename in ("debug", "change", "status"):
raise KeyError(
"reserved name '%s' used for a bot" % s.slavename)
if config.has_key('interlocks'):
raise KeyError("c['interlocks'] is no longer accepted")
assert isinstance(change_sources, (list, tuple))
for s in change_sources:
assert interfaces.IChangeSource(s, None)
# this assertion catches c['schedulers'] = Scheduler(), since
# Schedulers are service.MultiServices and thus iterable.
errmsg = "c['schedulers'] must be a list of Scheduler instances"
assert isinstance(schedulers, (list, tuple)), errmsg
for s in schedulers:
assert interfaces.IScheduler(s, None), errmsg
assert isinstance(status, (list, tuple))
for s in status:
assert interfaces.IStatusReceiver(s, None)
slavenames = [s.slavename for s in slaves]
buildernames = []
dirnames = []
# convert builders from objects to config dictionaries
builders_dicts = []
for b in builders:
if isinstance(b, buildbot.config.BuilderConfig):
builders_dicts.append(b.getConfigDict())
elif type(b) is dict:
builders_dicts.append(b)
else:
raise ValueError("builder %s is not a BuilderConfig object (or a dict)" % b)
builders = builders_dicts
for b in builders:
if b.has_key('slavename') and b['slavename'] not in slavenames:
raise ValueError("builder %s uses undefined slave %s" \
% (b['name'], b['slavename']))
for n in b.get('slavenames', []):
if n not in slavenames:
raise ValueError("builder %s uses undefined slave %s" \
% (b['name'], n))
if b['name'] in buildernames:
raise ValueError("duplicate builder name %s"
% b['name'])
buildernames.append(b['name'])
# sanity check name (BuilderConfig does this too)
if b['name'].startswith("_"):
errmsg = ("builder names must not start with an "
"underscore: " + b['name'])
log.err(errmsg)
raise ValueError(errmsg)
# Fix the dictionnary with default values, in case this wasn't
# specified with a BuilderConfig object (which sets the same defaults)
b.setdefault('builddir', buildbot.util.safeTranslate(b['name']))
b.setdefault('slavebuilddir', b['builddir'])
if b['builddir'] in dirnames:
raise ValueError("builder %s reuses builddir %s"
% (b['name'], b['builddir']))
dirnames.append(b['builddir'])
unscheduled_buildernames = buildernames[:]
schedulernames = []
for s in schedulers:
for b in s.listBuilderNames():
assert b in buildernames, \
"%s uses unknown builder %s" % (s, b)
if b in unscheduled_buildernames:
unscheduled_buildernames.remove(b)
if s.name in schedulernames:
# TODO: schedulers share a namespace with other Service
# children of the BuildMaster node, like status plugins, the
# Manhole, the ChangeMaster, and the BotMaster (although most
# of these don't have names)
msg = ("Schedulers must have unique names, but "
"'%s' was a duplicate" % (s.name,))
raise ValueError(msg)
schedulernames.append(s.name)
if unscheduled_buildernames:
log.msg("Warning: some Builders have no Schedulers to drive them:"
" %s" % (unscheduled_buildernames,))
# assert that all locks used by the Builds and their Steps are
# uniquely named.
lock_dict = {}
for b in builders:
for l in b.get('locks', []):
if isinstance(l, locks.LockAccess): # User specified access to the lock
l = l.lockid
if lock_dict.has_key(l.name):
if lock_dict[l.name] is not l:
raise ValueError("Two different locks (%s and %s) "
"share the name %s"
% (l, lock_dict[l.name], l.name))
else:
lock_dict[l.name] = l
# TODO: this will break with any BuildFactory that doesn't use a
# .steps list, but I think the verification step is more
# important.
for s in b['factory'].steps:
for l in s[1].get('locks', []):
if isinstance(l, locks.LockAccess): # User specified access to the lock
l = l.lockid
if lock_dict.has_key(l.name):
if lock_dict[l.name] is not l:
raise ValueError("Two different locks (%s and %s)"
" share the name %s"
% (l, lock_dict[l.name], l.name))
else:
lock_dict[l.name] = l
if not isinstance(properties, dict):
raise ValueError("c['properties'] must be a dictionary")
# slavePortnum supposed to be a strports specification
if type(slavePortnum) is int:
slavePortnum = "tcp:%d" % slavePortnum
# now we're committed to implementing the new configuration, so do
# it atomically
# TODO: actually, this is spread across a couple of Deferreds, so it
# really isn't atomic.
d = defer.succeed(None)
self.projectName = projectName
self.projectURL = projectURL
self.buildbotURL = buildbotURL
self.properties = Properties()
self.properties.update(properties, self.configFileName)
self.status.logCompressionLimit = logCompressionLimit
self.status.logCompressionMethod = logCompressionMethod
self.status.logMaxSize = logMaxSize
self.status.logMaxTailSize = logMaxTailSize
# Update any of our existing builders with the current log parameters.
# This is required so that the new value is picked up after a
# reconfig.
for builder in self.botmaster.builders.values():
builder.builder_status.setLogCompressionLimit(logCompressionLimit)
builder.builder_status.setLogCompressionMethod(logCompressionMethod)
builder.builder_status.setLogMaxSize(logMaxSize)
builder.builder_status.setLogMaxTailSize(logMaxTailSize)
if mergeRequests is not None:
self.botmaster.mergeRequests = mergeRequests
if prioritizeBuilders is not None:
self.botmaster.prioritizeBuilders = prioritizeBuilders
self.buildCacheSize = buildCacheSize
self.eventHorizon = eventHorizon
self.logHorizon = logHorizon
self.buildHorizon = buildHorizon
# self.slaves: Disconnect any that were attached and removed from the
# list. Update self.checker with the new list of passwords, including
# debug/change/status.
d.addCallback(lambda res: self.loadConfig_Slaves(slaves))
# self.debugPassword
if debugPassword:
self.checker.addUser("debug", debugPassword)
self.debugPassword = debugPassword
# self.manhole
if manhole != self.manhole:
# changing
if self.manhole:
# disownServiceParent may return a Deferred
d.addCallback(lambda res: self.manhole.disownServiceParent())
def _remove(res):
self.manhole = None
return res
d.addCallback(_remove)
if manhole:
def _add(res):
self.manhole = manhole
manhole.setServiceParent(self)
d.addCallback(_add)
# add/remove self.botmaster.builders to match builders. The
# botmaster will handle startup/shutdown issues.
d.addCallback(lambda res: self.loadConfig_Builders(builders))
d.addCallback(lambda res: self.loadConfig_status(status))
# Schedulers are added after Builders in case they start right away
d.addCallback(lambda res: self.loadConfig_Schedulers(schedulers))
# and Sources go after Schedulers for the same reason
d.addCallback(lambda res: self.loadConfig_Sources(change_sources))
# self.slavePort
if self.slavePortnum != slavePortnum:
if self.slavePort:
def closeSlavePort(res):
d1 = self.slavePort.disownServiceParent()
self.slavePort = None
return d1
d.addCallback(closeSlavePort)
if slavePortnum is not None:
def openSlavePort(res):
self.slavePort = strports.service(slavePortnum,
self.slaveFactory)
self.slavePort.setServiceParent(self)
d.addCallback(openSlavePort)
log.msg("BuildMaster listening on port %s" % slavePortnum)
self.slavePortnum = slavePortnum
log.msg("configuration update started")
def _done(res):
self.readConfig = True
log.msg("configuration update complete")
d.addCallback(_done)
d.addCallback(lambda res: self.botmaster.maybeStartAllBuilds())
return d
if buildbot_dir:
basedir = buildbot_dir
else:
if PKGDATADIR:
basedir = os.path.join(PKGDATADIR, 'buildbot')
else:
basedir = os.path.join(SRCDIR, 'buildbot')
os.chdir(basedir)
if not os.path.exists(os.path.join(basedir, 'builddir')):
os.makedirs(os.path.join(basedir, 'builddir'))
master_cfg_path = mastercfgfile
JhBuildMaster(basedir, master_cfg_path).setServiceParent(application)
JhBuildbotApplicationRunner.application = application
JhBuildbotApplicationRunner(options).run()
def stop(self, config, pidfile):
try:
pid = int(file(pidfile).read())
except:
raise FatalError(_('failed to get buildbot PID'))
os.kill(pid, signal.SIGTERM)
def reload_server_config(self, config, pidfile):
try:
pid = int(file(pidfile).read())
except:
raise FatalError(_('failed to get buildbot PID'))
os.kill(pid, signal.SIGHUP)
register_command(cmd_bot)
| bboozzoo/jhbuild | jhbuild/commands/bot.py | Python | gpl-2.0 | 37,964 | 0.003293 |
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth.models import Permission, Group
from rest_framework import serializers
from .models import User, Company
# Django models
class ContentTypeSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = ContentType
fields = ("id", "name", "app_label", "model")
class PermissionSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Permission
fields = ("id", "name", "content_type", "codename")
class GroupSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Group
fields = ("id", "name", "permissions")
# Our models
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ("id", "password", "last_login", "is_superuser", "username",
"first_name", "last_name", "email", "is_staff", "is_active",
"date_joined", "url", "gravatar", "activation_token",
"reset_password_token", "groups", "user_permissions")
class CompanySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Company
fields = ("id", "name", "url", "description", "logo", "company_user")
| sharestack/sharestack-api | sharestackapi/members/serializers.py | Python | mit | 1,298 | 0 |
import models
from django.contrib import admin
class ThumbnailInline(admin.StackedInline):
model = models.Thumbnail
fk_name = 'video'
extra = 0
class VideoAdmin(admin.ModelAdmin):
readonly_fields = ('video_id', 'youtube_url', 'swf_url',)
inlines = [ThumbnailInline]
list_filter = ('title', 'user__username',)
search_fields = ['title', 'user__first_name', 'user__email',
'user__username', 'keywords', ]
list_display = ('title', 'video_id', 'swf',)
def swf(self, instance):
return '<a href="%s">Swf link</a>' % (instance.get_absolute_url())
swf.allow_tags = True
admin.site.register(models.Video, VideoAdmin)
| laplacesdemon/django-youtube | django_youtube/admin.py | Python | bsd-3-clause | 684 | 0 |
"""Utility functions"""
import os
import difflib
def get_diff(str1, str2):
"""Returns git-diff-like diff between two strings"""
expected = str1.splitlines(1)
actual = str2.splitlines(1)
diff = difflib.unified_diff(expected, actual, lineterm=-0, n=0)
return ''.join(diff)
def ensure_directory(path):
"""Creates the given directory, if not existing"""
os.makedirs(path, exist_ok=True)
def ensure_directory_of_file(file_path):
"""Creates the parent directory of a given file path, if not existing"""
ensure_directory(os.path.dirname(file_path))
def check_service_name(service_name):
"""Raises an exception if service_name is not valid"""
service_name_errors = get_service_name_errors(service_name)
if service_name_errors:
raise Exception('errors: %s' % str(service_name_errors))
def get_service_name_errors(service_name):
"""Checks if service_name is valid and returns errors if it is not.
Returns None if service_name is valid"""
errors = []
legal_characters = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789\\'
for index in range(len(service_name)):
if not service_name[index] in legal_characters:
errors.append('Illegal character in service name: %s at position %s'
% (service_name[index], index))
return errors
| perfalle/smartbox | common/utils.py | Python | gpl-3.0 | 1,360 | 0.005147 |
# -*- coding: utf-8 -*-
# This code is part of Qiskit.
#
# (C) Copyright IBM 2017, 2018.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""
Cities visualization
"""
from string import Template
import sys
import time
import re
from qiskit.visualization.utils import _validate_input_state
if ('ipykernel' in sys.modules) and ('spyder' not in sys.modules):
try:
from IPython.core.display import display, HTML
except ImportError:
print("Error importing IPython.core.display")
def iplot_state_city(rho, figsize=None):
""" Create a cities representation.
Graphical representation of the input array using a city style graph.
Args:
rho (array): State vector or density matrix.
figsize (tuple): The figure size in pixels.
"""
# HTML
html_template = Template("""
<p>
<div id="content_$divNumber" style="position: absolute; z-index: 1;">
<div id="cities_$divNumber"></div>
</div>
</p>
""")
# JavaScript
javascript_template = Template("""
<script>
requirejs.config({
paths: {
qVisualization: "https://qvisualization.mybluemix.net/q-visualizations"
}
});
require(["qVisualization"], function(qVisualizations) {
data = {
real: $real,
titleReal: "Real.[rho]",
imaginary: $imag,
titleImaginary: "Im.[rho]",
qbits: $qbits
};
qVisualizations.plotState("cities_$divNumber",
"cities",
data,
$options);
});
</script>
""")
rho = _validate_input_state(rho)
if figsize is None:
options = {}
else:
options = {'width': figsize[0], 'height': figsize[1]}
# Process data and execute
real = []
imag = []
for xvalue in rho:
row_real = []
col_imag = []
for value_real in xvalue.real:
row_real.append(float(value_real))
real.append(row_real)
for value_imag in xvalue.imag:
col_imag.append(float(value_imag))
imag.append(col_imag)
div_number = str(time.time())
div_number = re.sub('[.]', '', div_number)
html = html_template.substitute({
'divNumber': div_number
})
javascript = javascript_template.substitute({
'real': real,
'imag': imag,
'qbits': len(real),
'divNumber': div_number,
'options': options
})
display(HTML(html + javascript))
| QISKit/qiskit-sdk-py | qiskit/visualization/interactive/iplot_cities.py | Python | apache-2.0 | 3,026 | 0.00033 |
# DOENDO - A Python-Based Fortran Refactoring Tool
# Copyright (C) 2011 Felipe H. da Jornada <[email protected]>
import re
import xml.dom.minidom
import analyze
import common
def rename(lines, doc, ren_dict, block=None):
'''
Rename a variable in a particular block
lines: line-oriented buffer to be altered
doc: DOM object containing info about code
ren_dict: dictionary of renames. This can be a map of string->strings
or node->string.
block: routine/module/sub to work in. None means the root node.
This property only makes sense if you specified the ren_dict as a map
of string->string, otherwise the routine know the block in which it should
work automatically.
Example:
fname = 'file.f90'
lines = open(fname).readlines()
doc = analyze(fname, ''.join(lines))
rename(lines, doc, {'i':'ii'})
'''
if block is None:
el = doc.childNodes[0]
else:
els = doc.getElementsByTagName('block')
el=None
for el_ in els:
if el_.getAttribute('name')==block:
el=el_
break
if el is None:
print 'Could not find block '+block
return
for var0, new_var in ren_dict.iteritems():
if isinstance(var0, str):
orig_var = var0
_vars = el.getElementsByTagName('var')
var=None
for var_ in _vars:
if var_.getAttribute('name')==orig_var:
var=var_
break
else:
var = var0
el = var.parentNode
orig_var = var.getAttribute('name')
if var is None:
print 'Could not find variable '+orig_var+' in block '+block
sys.exit(1)
#get the initial and final lines
start = int(el.getAttribute('start'))
end = int(el.getAttribute('end'))
#this will match only variables
cmp_obj = re.compile(r'^([^!]*[^a-zA-Z0-9_!%%])%s([^a-zA-Z0-9_!%%])'%(orig_var))
subs_str=r'\1%s\2'%(new_var)
for i in range(start, end+1):
old_line = ''
new_line = ' '+lines[i]
#hack to do multiple substitution on the same line
#I probablly need to learn more regexp..
while old_line != new_line:
old_line = new_line
new_line = cmp_obj.sub(subs_str, old_line)
lines[i] = new_line[1:]
#re-analyze file
fname = doc.childNodes[0].nodeName
data = ''.join(lines)
doc = analyze.analyze(fname, data)
| jornada/DOENDO | rename.py | Python | gpl-3.0 | 2,180 | 0.037156 |
# EF-SMT solver implementation
#
# This example shows:
# 1. How to combine 2 different solvers
# 2. How to extract information from a model
#
from pysmt.shortcuts import Solver, get_model
from pysmt.shortcuts import Symbol, Bool, Real, Implies, And, Not, Equals
from pysmt.shortcuts import GT, LT, LE, Minus, Times
from pysmt.logics import AUTO, QF_LRA
from pysmt.typing import REAL
from pysmt.exceptions import SolverReturnedUnknownResultError
def efsmt(y, phi, logic=AUTO, maxloops=None,
esolver_name=None, fsolver_name=None,
verbose=False):
"""Solves exists x. forall y. phi(x, y)"""
y = set(y)
x = phi.get_free_variables() - y
with Solver(logic=logic, name=esolver_name) as esolver:
esolver.add_assertion(Bool(True))
loops = 0
while maxloops is None or loops <= maxloops:
loops += 1
eres = esolver.solve()
if not eres:
return False
else:
tau = {v: esolver.get_value(v) for v in x}
sub_phi = phi.substitute(tau).simplify()
if verbose: print("%d: Tau = %s" % (loops, tau))
fmodel = get_model(Not(sub_phi),
logic=logic, solver_name=fsolver_name)
if fmodel is None:
return tau
else:
sigma = {v: fmodel[v] for v in y}
sub_phi = phi.substitute(sigma).simplify()
if verbose: print("%d: Sigma = %s" % (loops, sigma))
esolver.add_assertion(sub_phi)
raise SolverReturnedUnknownResultError
def run_test(y, f):
print("Testing " + str(f))
try:
res = efsmt(y, f, logic=QF_LRA, maxloops=20, verbose=True)
if res == False:
print("unsat")
else:
print("sat : %s" % str(res))
except SolverReturnedUnknownResultError:
print("unknown")
print("\n\n")
def main():
x,y = [Symbol(n, REAL) for n in "xy"]
f_sat = Implies(And(GT(y, Real(0)), LT(y, Real(10))),
LT(Minus(y, Times(x, Real(2))), Real(7)))
f_incomplete = And(GT(x, Real(0)), LE(x, Real(10)),
Implies(And(GT(y, Real(0)), LE(y, Real(10)),
Not(Equals(x, y))),
GT(y, x)))
run_test([y], f_sat)
run_test([y], f_incomplete)
if __name__ == "__main__":
main()
| idkwim/pysmt | examples/efsmt.py | Python | apache-2.0 | 2,475 | 0.00202 |
class Animal(object):
def __init__(self, name): # Constructor of the class
self.name = name
def talk(self): # Abstract method, defined by convention only
raise NotImplementedError("Subclass must implement abstract method")
class Cat(Animal):
def talk(self):
return self.meow()
def meow(self):
return 'Meow!'
class Dog(Animal):
def talk(self):
return self.bark()
def bark(self):
return 'Woof! Woof!'
class Fish(Animal):
def swim(self):
pass
def __str__(self):
return "I am a fish!"
animals = [Cat('Foo'),
Dog('Bar'),
Fish('nemo')]
for animal in animals:
print animal.name + ': ' + animal.talk()
# f = Fish("foo")
# print "Hi, " + str(f) | mcdickenson/python-washu-2014 | day2/polymorphism.py | Python | mit | 771 | 0.035019 |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Operations that generate constants.
See the @{$python/constant_op$constants guide}.
@@zeros
@@zeros_like
@@ones
@@ones_like
@@fill
@@constant
@@linspace
@@range
@@random_normal
@@truncated_normal
@@random_uniform
@@random_shuffle
@@random_crop
@@multinomial
@@random_gamma
@@random_poisson
@@set_random_seed
"""
# Must be separate from array_ops to avoid a cyclic dependency.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.core.framework import attr_value_pb2
from tensorflow.python.eager import context
from tensorflow.python.eager import execute
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
def _eager_reshape(tensor, shape, ctx):
"""Eager-only version of Reshape op; requires tensor is an eager Tensor."""
attr_t = tensor.dtype.as_datatype_enum
attr_tshape, (shape,) = execute.args_to_matching_eager(
[shape], ctx, dtypes.int32)
attr_tshape = attr_tshape.as_datatype_enum
inputs_flat = [tensor, shape]
attrs = ("T", attr_t, "Tshape", attr_tshape)
result, = execute.execute(
b"Reshape", 1, inputs=inputs_flat, attrs=attrs, ctx=ctx)
return result
def _eager_fill(dims, value, ctx):
"""Eager-only version of Fill op; requires value is an eager Tensor."""
attr_t = value.dtype.as_datatype_enum
dims = convert_to_eager_tensor(dims, ctx, dtypes.int32)
inputs_flat = [dims, value]
attrs = ("T", attr_t)
result, = execute.execute(
b"Fill", 1, inputs=inputs_flat, attrs=attrs, ctx=ctx)
return result
def _eager_identity(tensor, ctx):
"""Eager-only version of Identity op; requires tensor is an eager Tensor."""
attrs = ("T", tensor.dtype.as_datatype_enum)
result, = execute.execute(
b"Identity", 1, inputs=[tensor], attrs=attrs, ctx=ctx)
return result
def convert_to_eager_tensor(t, ctx, dtype=None):
"""Converts the given `value` to an `EagerTensor`."""
if isinstance(t, ops.EagerTensor):
if dtype is not None and t.dtype != dtype:
raise TypeError("Expected tensor with type %r not %r" % (dtype, t.dtype))
return t
if isinstance(t, (int, float)):
# Use a scalar cache. This will put each scalar of each type only once on
# each device. Scalars don't use much device memory but copying scalars can
# trigger memcpys which are slow.
device = ctx.device_name
cache_key = device, t, dtype, type(t)
scalar_cache = ctx.scalar_cache()
tensor = scalar_cache.get(cache_key, None)
if tensor is not None:
return tensor
value = ops.EagerTensor(t, ctx, dtype=dtype)
scalar_cache[cache_key] = value
return value
return ops.EagerTensor(t, ctx, dtype=dtype)
def constant(value, dtype=None, shape=None, name="Const", verify_shape=False):
"""Creates a constant tensor.
The resulting tensor is populated with values of type `dtype`, as
specified by arguments `value` and (optionally) `shape` (see examples
below).
The argument `value` can be a constant value, or a list of values of type
`dtype`. If `value` is a list, then the length of the list must be less
than or equal to the number of elements implied by the `shape` argument (if
specified). In the case where the list length is less than the number of
elements specified by `shape`, the last element in the list will be used
to fill the remaining entries.
The argument `shape` is optional. If present, it specifies the dimensions of
the resulting tensor. If not present, the shape of `value` is used.
If the argument `dtype` is not specified, then the type is inferred from
the type of `value`.
For example:
```python
# Constant 1-D Tensor populated with value list.
tensor = tf.constant([1, 2, 3, 4, 5, 6, 7]) => [1 2 3 4 5 6 7]
# Constant 2-D tensor populated with scalar value -1.
tensor = tf.constant(-1.0, shape=[2, 3]) => [[-1. -1. -1.]
[-1. -1. -1.]]
```
Args:
value: A constant value (or list) of output type `dtype`.
dtype: The type of the elements of the resulting tensor.
shape: Optional dimensions of resulting tensor.
name: Optional name for the tensor.
verify_shape: Boolean that enables verification of a shape of values.
Returns:
A Constant Tensor.
Raises:
TypeError if shape is incorrectly specified or unsupported.
"""
ctx = context.context()
if not ctx.in_graph_mode():
if shape is None:
return convert_to_eager_tensor(value, ctx, dtype)
t = convert_to_eager_tensor(value, ctx, dtype)
shape = tensor_shape.as_shape(shape)
if shape == t.shape:
return t
if verify_shape:
raise TypeError("Expected Tensor's shape: %s, got %s." % (tuple(shape),
tuple(t.shape)))
num_t = t.shape.num_elements()
# TODO(josh11b): Implement shape -> eager tensor conversion.
if num_t == shape.num_elements():
return _eager_reshape(t, shape.as_list(), ctx)
if num_t == 1:
if t.dtype == dtypes.bool:
# We don't have a Fill kernel for bool dtype on GPU. So we first run
# Fill on CPU and then copy to GPU if needed.
with ops.device("/device:CPU:0"):
x = _eager_fill(shape.as_list(), t.as_cpu_tensor(), ctx)
return _eager_identity(x, ctx)
else:
return _eager_fill(shape.as_list(), t, ctx)
raise TypeError("Eager execution of tf.constant with unsupported shape "
"(value has %d elements, shape is %s with %d elements)." %
(num_t, shape, shape.num_elements()))
g = ops.get_default_graph()
tensor_value = attr_value_pb2.AttrValue()
tensor_value.tensor.CopyFrom(
tensor_util.make_tensor_proto(
value, dtype=dtype, shape=shape, verify_shape=verify_shape))
dtype_value = attr_value_pb2.AttrValue(type=tensor_value.tensor.dtype)
const_tensor = g.create_op(
"Const", [], [dtype_value.type],
attrs={"value": tensor_value,
"dtype": dtype_value},
name=name).outputs[0]
return const_tensor
def is_constant(tensor_or_op):
if isinstance(tensor_or_op, ops.Tensor):
op = tensor_or_op.op
else:
op = tensor_or_op
return op.type == "Const"
def _constant_tensor_conversion_function(v, dtype=None, name=None,
as_ref=False):
_ = as_ref
return constant(v, dtype=dtype, name=name)
ops.register_tensor_conversion_function(
(list, tuple), _constant_tensor_conversion_function, 100)
ops.register_tensor_conversion_function(
np.ndarray, _constant_tensor_conversion_function, 100)
ops.register_tensor_conversion_function(
np.generic, _constant_tensor_conversion_function, 100)
ops.register_tensor_conversion_function(
object, _constant_tensor_conversion_function, 200)
def _tensor_shape_tensor_conversion_function(s,
dtype=None,
name=None,
as_ref=False):
"""Function to convert TensorShape to Tensor."""
_ = as_ref
if not s.is_fully_defined():
raise ValueError(
"Cannot convert a partially known TensorShape to a Tensor: %s" % s)
s_list = s.as_list()
int64_value = 0
for dim in s_list:
if dim >= 2**31:
int64_value = dim
break
if dtype is not None:
if dtype not in (dtypes.int32, dtypes.int64):
raise TypeError("Cannot convert a TensorShape to dtype: %s" % dtype)
if dtype == dtypes.int32 and int64_value:
raise ValueError("Cannot convert a TensorShape to dtype int32; "
"a dimension is too large (%s)" % int64_value)
else:
dtype = dtypes.int64 if int64_value else dtypes.int32
if name is None:
name = "shape_as_tensor"
return constant(s_list, dtype=dtype, name=name)
ops.register_tensor_conversion_function(
tensor_shape.TensorShape, _tensor_shape_tensor_conversion_function, 100)
def _dimension_tensor_conversion_function(d,
dtype=None,
name=None,
as_ref=False):
"""Function to convert Dimension to Tensor."""
_ = as_ref
if d.value is None:
raise ValueError("Cannot convert an unknown Dimension to a Tensor: %s" % d)
if dtype is not None:
if dtype not in (dtypes.int32, dtypes.int64):
raise TypeError("Cannot convert a TensorShape to dtype: %s" % dtype)
else:
dtype = dtypes.int32
if name is None:
name = "shape_as_tensor"
return constant(d.value, dtype=dtype, name=name)
ops.register_tensor_conversion_function(
tensor_shape.Dimension, _dimension_tensor_conversion_function, 100)
| tillahoffmann/tensorflow | tensorflow/python/framework/constant_op.py | Python | apache-2.0 | 9,632 | 0.007164 |
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
# old (pre-0.8.4) location for ChangeFilter
from buildbot.changes.filter import ChangeFilter
_hush_pyflakes = ChangeFilter # keep pyflakes happy
| eunchong/build | third_party/buildbot_8_4p1/buildbot/schedulers/filter.py | Python | bsd-3-clause | 851 | 0.001175 |
import os
from werkzeug.utils import secure_filename
from flask import url_for
from .utils import convert_to_snake_case
from . import AbstractStorage, StorageExists, StorageNotExists, StorageNotAllowed
class LocalStorage(AbstractStorage):
def __init__(self, app):
super(LocalStorage, self).__init__(app)
self.img_folder = app.config.get("FILEUPLOAD_LOCALSTORAGE_IMG_FOLDER", "upload")
self.img_folder = self.img_folder if self.img_folder.endswith("/") else self.img_folder + "/"
self.abs_img_folder = os.path.join(app.root_path, "static", self.img_folder)
if not os.path.exists(self.abs_img_folder):
os.makedirs(self.abs_img_folder)
def get_existing_files(self):
return [f for f in os.listdir(self.abs_img_folder)]
def get_base_path(self):
return url_for("static", filename=self.img_folder)
def store(self, filename, file_data):
filename = secure_filename(filename)
if self.snake_case:
filename = convert_to_snake_case(filename)
if filename in self.get_existing_files():
raise StorageExists()
if self.all_allowed or any(filename.endswith('.' + x) for x in self.allowed):
file_data.save(os.path.join(self.abs_img_folder, filename))
else:
raise StorageNotAllowed()
return filename
def delete(self, filename):
existing_files = self.get_existing_files()
if filename not in existing_files:
raise StorageNotExists()
else:
os.remove(os.path.join(self.abs_img_folder, filename))
| Speedy1991/Flask-FileUpload | flask_fileupload/storage/storage.py | Python | mit | 1,614 | 0.003098 |
import numpy as np
import pickle
import glob
from matplotlib import rc
from lamost import load_spectra, load_labels
from TheCannon import continuum_normalization
from TheCannon import dataset
from TheCannon import model
rc('text', usetex=True)
rc('font', family='serif')
with np.load("test_data_raw.npz") as data:
test_IDs = data['arr_0']
wl = data['arr_1']
test_flux = data['arr_2']
test_ivar = data['arr_3']
data = dataset.Dataset(
wl, test_IDs[0:10], test_flux[0:10,:], test_ivar[0:10,:], [1], test_IDs, test_flux, test_ivar)
data.set_label_names(['T_{eff}', '\log g', '[M/H]', '[\\alpha/Fe]'])
data.continuum_normalize_gaussian_smoothing(L=50)
np.savez("./test_norm", test_IDs, wl, data.test_flux, data.test_ivar)
| annayqho/TheCannon | code/lamost/xcalib_5labels/cont_norm_test_obj.py | Python | mit | 747 | 0.005355 |
"""Utilities to help convert mp4s to fmp4s."""
import io
def find_box(segment: io.BytesIO, target_type: bytes, box_start: int = 0) -> int:
"""Find location of first box (or sub_box if box_start provided) of given type."""
if box_start == 0:
box_end = segment.seek(0, io.SEEK_END)
segment.seek(0)
index = 0
else:
segment.seek(box_start)
box_end = box_start + int.from_bytes(segment.read(4), byteorder="big")
index = box_start + 8
while 1:
if index > box_end - 8: # End of box, not found
break
segment.seek(index)
box_header = segment.read(8)
if box_header[4:8] == target_type:
yield index
segment.seek(index)
index += int.from_bytes(box_header[0:4], byteorder="big")
def get_init(segment: io.BytesIO) -> bytes:
"""Get init section from fragmented mp4."""
moof_location = next(find_box(segment, b"moof"))
segment.seek(0)
return segment.read(moof_location)
def get_m4s(segment: io.BytesIO, sequence: int) -> bytes:
"""Get m4s section from fragmented mp4."""
moof_location = next(find_box(segment, b"moof"))
mfra_location = next(find_box(segment, b"mfra"))
segment.seek(moof_location)
return segment.read(mfra_location - moof_location)
def get_codec_string(segment: io.BytesIO) -> str:
"""Get RFC 6381 codec string."""
codecs = []
# Find moov
moov_location = next(find_box(segment, b"moov"))
# Find tracks
for trak_location in find_box(segment, b"trak", moov_location):
# Drill down to media info
mdia_location = next(find_box(segment, b"mdia", trak_location))
minf_location = next(find_box(segment, b"minf", mdia_location))
stbl_location = next(find_box(segment, b"stbl", minf_location))
stsd_location = next(find_box(segment, b"stsd", stbl_location))
# Get stsd box
segment.seek(stsd_location)
stsd_length = int.from_bytes(segment.read(4), byteorder="big")
segment.seek(stsd_location)
stsd_box = segment.read(stsd_length)
# Base Codec
codec = stsd_box[20:24].decode("utf-8")
# Handle H264
if (
codec in ("avc1", "avc2", "avc3", "avc4")
and stsd_length > 110
and stsd_box[106:110] == b"avcC"
):
profile = stsd_box[111:112].hex()
compatibility = stsd_box[112:113].hex()
# Cap level at 4.1 for compatibility with some Google Cast devices
level = hex(min(stsd_box[113], 41))[2:]
codec += "." + profile + compatibility + level
# Handle H265
elif (
codec in ("hev1", "hvc1")
and stsd_length > 110
and stsd_box[106:110] == b"hvcC"
):
tmp_byte = int.from_bytes(stsd_box[111:112], byteorder="big")
# Profile Space
codec += "."
profile_space_map = {0: "", 1: "A", 2: "B", 3: "C"}
profile_space = tmp_byte >> 6
codec += profile_space_map[profile_space]
general_profile_idc = tmp_byte & 31
codec += str(general_profile_idc)
# Compatibility
codec += "."
general_profile_compatibility = int.from_bytes(
stsd_box[112:116], byteorder="big"
)
reverse = 0
for i in range(0, 32):
reverse |= general_profile_compatibility & 1
if i == 31:
break
reverse <<= 1
general_profile_compatibility >>= 1
codec += hex(reverse)[2:]
# Tier Flag
if (tmp_byte & 32) >> 5 == 0:
codec += ".L"
else:
codec += ".H"
codec += str(int.from_bytes(stsd_box[122:123], byteorder="big"))
# Constraint String
has_byte = False
constraint_string = ""
for i in range(121, 115, -1):
gci = int.from_bytes(stsd_box[i : i + 1], byteorder="big")
if gci or has_byte:
constraint_string = "." + hex(gci)[2:] + constraint_string
has_byte = True
codec += constraint_string
# Handle Audio
elif codec == "mp4a":
oti = None
dsi = None
# Parse ES Descriptors
oti_loc = stsd_box.find(b"\x04\x80\x80\x80")
if oti_loc > 0:
oti = stsd_box[oti_loc + 5 : oti_loc + 6].hex()
codec += f".{oti}"
dsi_loc = stsd_box.find(b"\x05\x80\x80\x80")
if dsi_loc > 0:
dsi_length = int.from_bytes(
stsd_box[dsi_loc + 4 : dsi_loc + 5], byteorder="big"
)
dsi_data = stsd_box[dsi_loc + 5 : dsi_loc + 5 + dsi_length]
dsi0 = int.from_bytes(dsi_data[0:1], byteorder="big")
dsi = (dsi0 & 248) >> 3
if dsi == 31 and len(dsi_data) >= 2:
dsi1 = int.from_bytes(dsi_data[1:2], byteorder="big")
dsi = 32 + ((dsi0 & 7) << 3) + ((dsi1 & 224) >> 5)
codec += f".{dsi}"
codecs.append(codec)
return ",".join(codecs)
| tboyce021/home-assistant | homeassistant/components/stream/fmp4utils.py | Python | apache-2.0 | 5,317 | 0.001128 |
EQUALS = 'equals'
GT = 'gt'
LT = 'lt'
IN = 'in'
OPERATOR_SEPARATOR = '__'
REVERSE_ORDER = '-'
ALL_OPERATORS = {EQUALS: 1, GT: 1, LT: 1, IN: 1}
def split_to_field_and_filter_type(filter_name):
filter_split = filter_name.split(OPERATOR_SEPARATOR)
filter_type = filter_split[-1] if len(filter_split) > 0 else None
if filter_type in ALL_OPERATORS:
return OPERATOR_SEPARATOR.join(filter_split[:-1]), filter_type
else:
return filter_name, None
def split_to_field_and_order_type(field_name_with_operator):
if field_name_with_operator.startswith(REVERSE_ORDER):
return field_name_with_operator[1:], REVERSE_ORDER
else:
return field_name_with_operator, None
def transform_to_list(val):
if isinstance(val, (list, tuple)):
return val
else:
return [val] | Aplopio/rip | rip/filter_operators.py | Python | mit | 831 | 0.001203 |
# Giles: tanbo.py
# Copyright 2012 Phil Bordelon
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from giles.games.seated_game import SeatedGame
from giles.games.piece import Piece
from giles.games.seat import Seat
from giles.games.square_grid_layout import SquareGridLayout, COLS
from giles.state import State
from giles.utils import demangle_move, get_plural_str
# Deltas are useful.
CONNECTION_DELTAS = ((-1, 0), (1, 0), (0, -1), (0, 1))
TAGS = ["abstract", "capture", "square", "2p"]
class Tanbo(SeatedGame):
"""A Tanbo game table implementation. Invented in 1993 by Mark Steere.
This only implements the 2p version, although it does have the 9x9, 13x13,
and 19x19 sizes. There's also a 21x21 size that came from discussion with
Mark, and 5x5 and 7x7 sizes that came naturally from the piece layouts.
"""
def __init__(self, server, table_name):
super(Tanbo, self).__init__(server, table_name)
self.game_display_name = "Tanbo"
self.game_name = "tanbo"
self.seats = [
Seat("Black"),
Seat("White"),
]
self.min_players = 2
self.max_players = 2
self.state = State("need_players")
self.prefix = "(^RTanbo^~): "
self.log_prefix = "%s/%s: " % (self.table_display_name, self.game_display_name)
# Tanbo-specific stuff.
self.size = 19
self.turn = None
self.black = self.seats[0]
self.black.data.seat_str = "^KBlack^~"
self.black.data.root_list = []
self.white = self.seats[1]
self.white.data.seat_str = "^WWhite^~"
self.white.data.root_list = []
self.resigner = None
self.layout = None
# Initialize the starting layout.
self.init_layout()
def get_root_piece(self, seat, num):
if seat == self.black:
p = Piece("^K", "x", "X")
else:
p = Piece("^W", "o", "O")
p.data.owner = seat
p.data.num = num
return p
def init_layout(self):
# Create the layout and fill it with pieces.
self.layout = SquareGridLayout(highlight_color="^I")
self.layout.resize(self.size)
black_count = 0
white_count = 0
self.black.data.root_list = []
self.white.data.root_list = []
# There are different layouts depending on the size. All of them
# alternate between black and white pieces; the smallest sizes have
# 4 roots, the standard sizes 16, and the 21x21 board has 36.
if self.size == 5:
jump_delta = 4
extent = 2
offset = 0
elif self.size == 7:
jump_delta = 6
extent = 2
offset = 0
elif self.size == 9:
jump_delta = 6
extent = 2
offset = 1
elif self.size == 13:
jump_delta = 4
extent = 4
offset = 0
elif self.size == 19:
jump_delta = 6
extent = 4
offset = 0
else: # size == 21
jump_delta = 4
extent = 6
offset = 0
for i in range(extent):
for j in range(extent):
if (i + j) % 2:
p = self.get_root_piece(self.black, black_count)
self.black.data.root_list.append(p)
black_count += 1
else:
p = self.get_root_piece(self.white, white_count)
self.white.data.root_list.append(p)
white_count += 1
row = offset + i * jump_delta
col = offset + j * jump_delta
p.data.start = (row, col)
self.layout.place(p, row, col, update=False)
self.layout.update()
def get_sp_str(self, seat):
return "^C%s^~ (%s)" % (seat.player_name, seat.data.seat_str)
def get_turn_str(self):
if not self.turn:
return "The game has not yet started.\n"
return "It is ^C%s^~'s turn (%s).\n" % (self.turn.player_name, self.turn.data.seat_str)
def show(self, player):
player.tell_cc(self.layout)
player.tell_cc(self.get_turn_str())
def send_board(self):
for player in self.channel.listeners:
self.show(player)
def set_size(self, player, size_str):
if not size_str.isdigit():
self.tell_pre(player, "You didn't even send a number!\n")
return False
size = int(size_str)
if size not in (5, 7, 9, 13, 19, 21,):
self.tell_pre(player, "Size must be 5, 7, 9, 13, 19, or 21.\n")
return False
# Valid!
self.size = size
self.bc_pre("^R%s^~ has set the board size to ^C%d^~.\n" % (player, size))
self.init_layout()
def can_place_at(self, seat, row, col):
# You can place a piece in Tanbo iff it is adjacent to exactly one of
# your own pieces. If a location is valid, we'll return the piece that
# is adjacent; otherwise we return nothing.
if self.layout.grid[row][col]:
# Occupied; clearly can't place here.
return None
adj_count = 0
for r_delta, c_delta in CONNECTION_DELTAS:
new_r = row + r_delta
new_c = col + c_delta
if self.layout.is_valid(new_r, new_c):
loc = self.layout.grid[new_r][new_c]
if loc and loc.data.owner == seat:
piece = loc
adj_count += 1
if adj_count == 1:
return piece
else:
return None
def recurse_is_bound(self, piece, row, col, prev_row, prev_col):
# Thanks to the way Tanbo placement works, we don't have to worry about
# loops, so we can get away with not using an adjacenty map and instead
# just track the direction we came from.
#
# Bail if this isn't a valid location.
if not self.layout.is_valid(row, col):
return True
loc = self.layout.grid[row][col]
# If there's no piece here, see if we can place one.
if not loc:
if self.can_place_at(piece.data.owner, row, col):
# Yup. This root isn't bound.
return False
else:
# No, this location is binding.
return True
elif loc != piece:
# Some other root. Definitely binding.
return True
else:
# Okay, it's another part of this root. Recurse, but don't double
# back.
for r_delta, c_delta in CONNECTION_DELTAS:
new_r = row + r_delta
new_c = col + c_delta
if new_r != prev_row or new_c != prev_col:
if not self.recurse_is_bound(piece, new_r, new_c, row, col):
# A recursive call found a liberty. Awesome!
return False
# All of the recursive calls returned that they were bound. This
# (sub)root is bound.
return True
def root_is_bound(self, piece):
# We'll just start recursing at the root's starting location and find
# whether it's bound or not.
row, col = piece.data.start
return self.recurse_is_bound(piece, row, col, None, None)
def kill_root(self, piece):
# We could do this recursively, but we're lazy.
for r in range(self.size):
for c in range(self.size):
loc = self.layout.grid[r][c]
if loc == piece:
self.layout.remove(r, c, update=False)
self.layout.update()
# Remove this root from the owner's root list.
piece.data.owner.data.root_list.remove(piece)
def update_roots(self, row, col):
# If the piece at row, col is part of a bounded root, that root is killed.
piece = self.layout.grid[row][col]
if self.root_is_bound(piece):
self.kill_root(piece)
# -1 indicates a suicide.
return -1
# Not a suicide; loop through all roots, finding bound ones.
bound_root_list = []
all_roots = self.black.data.root_list[:]
all_roots.extend(self.white.data.root_list)
for root in all_roots:
if self.root_is_bound(root):
bound_root_list.append(root)
bound_count = 0
for bound_root in bound_root_list:
self.kill_root(bound_root)
bound_count += 1
# Return the number of roots we killed.
return bound_count
def move(self, player, move_bits):
seat = self.get_seat_of_player(player)
if not seat:
self.tell_pre(player, "You can't move; you're not playing!\n")
return False
if seat != self.turn:
self.tell_pre(player, "You must wait for your turn to move.\n")
return False
col, row = move_bits
# Is the move on the board?
if not self.layout.is_valid(row, col):
self.tell_pre(player, "Your move is out of bounds.\n")
return False
# Is it a valid Tanbo play?
piece = self.can_place_at(seat, row, col)
if not piece:
self.tell_pre(player, "That location is not adjacent to exactly one of your pieces.\n")
return False
# Valid. Put the piece there.
move_str = "%s%s" % (COLS[col], row + 1)
self.layout.place(piece, row, col, True)
# Update the root statuses.
root_kill_str = ""
root_kill = self.update_roots(row, col)
if root_kill < 0:
root_kill_str = ", ^ysuiciding the root^~"
elif root_kill > 0:
root_kill_str = ", ^Ykilling %s^~" % (get_plural_str(root_kill, "root"))
self.bc_pre("%s grows a root to ^C%s^~%s.\n" % (self.get_sp_str(seat), move_str, root_kill_str))
return True
def resign(self, player):
seat = self.get_seat_of_player(player)
if not seat:
self.tell_pre(player, "You can't resign; you're not playing!\n")
return False
if seat != self.turn:
self.tell_pre(player, "You must wait for your turn to resign.\n")
return False
self.resigner = seat
self.bc_pre("%s is resigning from the game.\n" % self.get_sp_str(seat))
return True
def tick(self):
# If both seats are occupied and the game is active, start.
if (self.state.get() == "need_players" and self.black.player and
self.white.player and self.active):
self.bc_pre("%s: ^C%s^~; %s: ^C%s^~\n" % (self.black.data.seat_str, self.black.player_name, self.white.data.seat_str, self.white.player_name))
self.state.set("playing")
self.turn = self.black
self.send_board()
def handle(self, player, command_str):
# Handle common commands.
handled = self.handle_common_commands(player, command_str)
if not handled:
state = self.state.get()
command_bits = command_str.lower().split()
primary = command_bits[0]
if state == "setup":
if primary in ("size", "sz"):
if len(command_bits) == 2:
self.set_size(player, command_bits[1])
else:
self.tell_pre(player, "Invalid size command.\n")
handled = True
elif primary in ("done", "ready", "d", "r",):
self.bc_pre("The game is now looking for players.\n")
self.state.set("need_players")
handled = True
elif state == "need_players":
if primary in ("config", "setup", "conf",):
self.bc_pre("^R%s^~ has switched the game to setup mode.\n" % player)
self.state.set("setup")
handled = True
elif state == "playing":
made_move = False
if primary in ("move", "play", "mv", "pl",):
move_bits = demangle_move(command_bits[1:])
if move_bits and len(move_bits) == 1:
made_move = self.move(player, move_bits[0])
else:
self.tell_pre(player, "Invalid move command.\n")
handled = True
elif primary in ("resign",):
made_move = self.resign(player)
handled = True
if made_move:
# Did someone win?
winner = self.find_winner()
if winner:
# Yup!
self.resolve(winner)
self.finish()
else:
# No. Change turns and send the board to listeners.
self.turn = self.next_seat(self.turn)
self.send_board()
if not handled:
self.tell_pre(player, "Invalid command.\n")
def find_winner(self):
# Did someone resign?
if self.resigner == self.white:
return self.black
elif self.resigner == self.black:
return self.white
# If one player has no pieces left, the other player won.
if not len(self.white.data.root_list):
return self.black
elif not len(self.black.data.root_list):
return self.white
# No winner.
return None
def resolve(self, winner):
self.send_board()
self.bc_pre("%s wins!\n" % self.get_sp_str(winner))
def show_help(self, player):
super(Tanbo, self).show_help(player)
player.tell_cc("\nTANBO SETUP PHASE:\n\n")
player.tell_cc(" ^!setup^., ^!config^., ^!conf^. Enter setup phase.\n")
player.tell_cc(" ^!size^. 5|7|9|13|19|21, ^!sz^. Set board to <size>.\n")
player.tell_cc(" ^!ready^., ^!done^., ^!r^., ^!d^. End setup phase.\n")
player.tell_cc("\nTANBO PLAY:\n\n")
player.tell_cc(" ^!move^. <ln>, ^!play^., ^!mv^., ^!pl^. Make move <ln> (letter number).\n")
player.tell_cc(" ^!resign^. Resign.\n")
| sunfall/giles | giles/games/tanbo/tanbo.py | Python | agpl-3.0 | 15,089 | 0.00106 |
#!/usr/bin/env python
#
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Provisions Android devices with settings required for bots.
Usage:
./provision_devices.py [-d <device serial number>]
"""
import argparse
import datetime
import json
import logging
import os
import posixpath
import re
import sys
import time
# Import _strptime before threaded code. datetime.datetime.strptime is
# threadsafe except for the initial import of the _strptime module.
# See crbug.com/584730 and https://bugs.python.org/issue7980.
import _strptime # pylint: disable=unused-import
if __name__ == '__main__':
sys.path.append(
os.path.abspath(os.path.join(os.path.dirname(__file__),
'..', '..', '..')))
from devil.android import battery_utils
from devil.android import device_blacklist
from devil.android import device_errors
from devil.android import device_temp_file
from devil.android import device_utils
from devil.android import settings
from devil.android.sdk import adb_wrapper
from devil.android.sdk import intent
from devil.android.sdk import keyevent
from devil.android.sdk import version_codes
from devil.android.tools import script_common
from devil.constants import exit_codes
from devil.utils import run_tests_helper
from devil.utils import timeout_retry
logger = logging.getLogger(__name__)
_SYSTEM_APP_DIRECTORIES = ['/system/app/', '/system/priv-app/']
_SYSTEM_WEBVIEW_NAMES = ['webview', 'WebViewGoogle']
_CHROME_PACKAGE_REGEX = re.compile('.*chrom.*')
_TOMBSTONE_REGEX = re.compile('tombstone.*')
class _DEFAULT_TIMEOUTS(object):
# L can take a while to reboot after a wipe.
LOLLIPOP = 600
PRE_LOLLIPOP = 180
HELP_TEXT = '{}s on L, {}s on pre-L'.format(LOLLIPOP, PRE_LOLLIPOP)
class ProvisionStep(object):
def __init__(self, cmd, reboot=False):
self.cmd = cmd
self.reboot = reboot
def ProvisionDevices(
devices,
blacklist_file,
adb_key_files=None,
disable_location=False,
disable_mock_location=False,
disable_network=False,
disable_system_chrome=False,
emulators=False,
enable_java_debug=False,
max_battery_temp=None,
min_battery_level=None,
output_device_blacklist=None,
reboot_timeout=None,
remove_system_webview=False,
system_app_remove_list=None,
system_package_remove_list=None,
wipe=True):
blacklist = (device_blacklist.Blacklist(blacklist_file)
if blacklist_file
else None)
system_app_remove_list = system_app_remove_list or []
system_package_remove_list = system_package_remove_list or []
try:
devices = script_common.GetDevices(devices, blacklist)
except device_errors.NoDevicesError:
logging.error('No available devices to provision.')
if blacklist:
logging.error('Local device blacklist: %s', blacklist.Read())
raise
devices = [d for d in devices
if not emulators or d.adb.is_emulator]
parallel_devices = device_utils.DeviceUtils.parallel(devices)
steps = []
if wipe:
steps += [ProvisionStep(lambda d: Wipe(d, adb_key_files), reboot=True)]
steps += [ProvisionStep(
lambda d: SetProperties(d, enable_java_debug, disable_location,
disable_mock_location),
reboot=not emulators)]
if disable_network:
steps.append(ProvisionStep(DisableNetwork))
if disable_system_chrome:
steps.append(ProvisionStep(DisableSystemChrome))
if max_battery_temp:
steps.append(ProvisionStep(
lambda d: WaitForBatteryTemperature(d, max_battery_temp)))
if min_battery_level:
steps.append(ProvisionStep(
lambda d: WaitForCharge(d, min_battery_level)))
if remove_system_webview:
system_app_remove_list.extend(_SYSTEM_WEBVIEW_NAMES)
if system_app_remove_list or system_package_remove_list:
steps.append(ProvisionStep(
lambda d: RemoveSystemApps(
d, system_app_remove_list, system_package_remove_list)))
steps.append(ProvisionStep(SetDate))
steps.append(ProvisionStep(CheckExternalStorage))
parallel_devices.pMap(ProvisionDevice, steps, blacklist, reboot_timeout)
blacklisted_devices = blacklist.Read() if blacklist else []
if output_device_blacklist:
with open(output_device_blacklist, 'w') as f:
json.dump(blacklisted_devices, f)
if all(d in blacklisted_devices for d in devices):
raise device_errors.NoDevicesError
return 0
def ProvisionDevice(device, steps, blacklist, reboot_timeout=None):
try:
if not reboot_timeout:
if device.build_version_sdk >= version_codes.LOLLIPOP:
reboot_timeout = _DEFAULT_TIMEOUTS.LOLLIPOP
else:
reboot_timeout = _DEFAULT_TIMEOUTS.PRE_LOLLIPOP
for step in steps:
try:
device.WaitUntilFullyBooted(timeout=reboot_timeout, retries=0)
except device_errors.CommandTimeoutError:
logger.error('Device did not finish booting. Will try to reboot.')
device.Reboot(timeout=reboot_timeout)
step.cmd(device)
if step.reboot:
device.Reboot(False, retries=0)
device.adb.WaitForDevice()
except device_errors.CommandTimeoutError:
logger.exception('Timed out waiting for device %s. Adding to blacklist.',
str(device))
if blacklist:
blacklist.Extend([str(device)], reason='provision_timeout')
except (device_errors.CommandFailedError,
device_errors.DeviceUnreachableError):
logger.exception('Failed to provision device %s. Adding to blacklist.',
str(device))
if blacklist:
blacklist.Extend([str(device)], reason='provision_failure')
def Wipe(device, adb_key_files=None):
if (device.IsUserBuild() or
device.build_version_sdk >= version_codes.MARSHMALLOW):
WipeChromeData(device)
package = 'com.google.android.gms'
if device.GetApplicationPaths(package):
version_name = device.GetApplicationVersion(package)
logger.info('Version name for %s is %s', package, version_name)
else:
logger.info('Package %s is not installed', package)
else:
WipeDevice(device, adb_key_files)
def WipeChromeData(device):
"""Wipes chrome specific data from device
(1) uninstall any app whose name matches *chrom*, except
com.android.chrome, which is the chrome stable package. Doing so also
removes the corresponding dirs under /data/data/ and /data/app/
(2) remove any dir under /data/app-lib/ whose name matches *chrom*
(3) remove any files under /data/tombstones/ whose name matches "tombstone*"
(4) remove /data/local.prop if there is any
(5) remove /data/local/chrome-command-line if there is any
(6) remove anything under /data/local/.config/ if the dir exists
(this is telemetry related)
(7) remove anything under /data/local/tmp/
Arguments:
device: the device to wipe
"""
try:
if device.IsUserBuild():
_UninstallIfMatch(device, _CHROME_PACKAGE_REGEX)
device.RunShellCommand('rm -rf %s/*' % device.GetExternalStoragePath(),
shell=True, check_return=True)
device.RunShellCommand('rm -rf /data/local/tmp/*',
shell=True, check_return=True)
else:
device.EnableRoot()
_UninstallIfMatch(device, _CHROME_PACKAGE_REGEX)
_WipeUnderDirIfMatch(device, '/data/app-lib/', _CHROME_PACKAGE_REGEX)
_WipeUnderDirIfMatch(device, '/data/tombstones/', _TOMBSTONE_REGEX)
_WipeFileOrDir(device, '/data/local.prop')
_WipeFileOrDir(device, '/data/local/chrome-command-line')
_WipeFileOrDir(device, '/data/local/.config/')
_WipeFileOrDir(device, '/data/local/tmp/')
device.RunShellCommand('rm -rf %s/*' % device.GetExternalStoragePath(),
shell=True, check_return=True)
except device_errors.CommandFailedError:
logger.exception('Possible failure while wiping the device. '
'Attempting to continue.')
def _UninstallIfMatch(device, pattern):
installed_packages = device.RunShellCommand(
['pm', 'list', 'packages'], check_return=True)
installed_system_packages = [
pkg.split(':')[1] for pkg in device.RunShellCommand(
['pm', 'list', 'packages', '-s'], check_return=True)]
for package_output in installed_packages:
package = package_output.split(":")[1]
if pattern.match(package) and package not in installed_system_packages:
device.Uninstall(package)
def _WipeUnderDirIfMatch(device, path, pattern):
for filename in device.ListDirectory(path):
if pattern.match(filename):
_WipeFileOrDir(device, posixpath.join(path, filename))
def _WipeFileOrDir(device, path):
if device.PathExists(path):
device.RunShellCommand(['rm', '-rf', path], check_return=True)
def WipeDevice(device, adb_key_files):
"""Wipes data from device, keeping only the adb_keys for authorization.
After wiping data on a device that has been authorized, adb can still
communicate with the device, but after reboot the device will need to be
re-authorized because the adb keys file is stored in /data/misc/adb/.
Thus, adb_keys file is rewritten so the device does not need to be
re-authorized.
Arguments:
device: the device to wipe
"""
try:
device.EnableRoot()
device_authorized = device.FileExists(adb_wrapper.ADB_KEYS_FILE)
if device_authorized:
adb_keys = device.ReadFile(adb_wrapper.ADB_KEYS_FILE,
as_root=True).splitlines()
device.RunShellCommand(['wipe', 'data'],
as_root=True, check_return=True)
device.adb.WaitForDevice()
if device_authorized:
adb_keys_set = set(adb_keys)
for adb_key_file in adb_key_files or []:
try:
with open(adb_key_file, 'r') as f:
adb_public_keys = f.readlines()
adb_keys_set.update(adb_public_keys)
except IOError:
logger.warning('Unable to find adb keys file %s.', adb_key_file)
_WriteAdbKeysFile(device, '\n'.join(adb_keys_set))
except device_errors.CommandFailedError:
logger.exception('Possible failure while wiping the device. '
'Attempting to continue.')
def _WriteAdbKeysFile(device, adb_keys_string):
dir_path = posixpath.dirname(adb_wrapper.ADB_KEYS_FILE)
device.RunShellCommand(['mkdir', '-p', dir_path],
as_root=True, check_return=True)
device.RunShellCommand(['restorecon', dir_path],
as_root=True, check_return=True)
device.WriteFile(adb_wrapper.ADB_KEYS_FILE, adb_keys_string, as_root=True)
device.RunShellCommand(['restorecon', adb_wrapper.ADB_KEYS_FILE],
as_root=True, check_return=True)
def SetProperties(device, enable_java_debug, disable_location,
disable_mock_location):
try:
device.EnableRoot()
except device_errors.CommandFailedError as e:
logger.warning(str(e))
if not device.IsUserBuild():
_ConfigureLocalProperties(device, enable_java_debug)
else:
logger.warning('Cannot configure properties in user builds.')
settings.ConfigureContentSettings(
device, settings.DETERMINISTIC_DEVICE_SETTINGS)
if disable_location:
settings.ConfigureContentSettings(
device, settings.DISABLE_LOCATION_SETTINGS)
else:
settings.ConfigureContentSettings(
device, settings.ENABLE_LOCATION_SETTINGS)
if disable_mock_location:
settings.ConfigureContentSettings(
device, settings.DISABLE_MOCK_LOCATION_SETTINGS)
else:
settings.ConfigureContentSettings(
device, settings.ENABLE_MOCK_LOCATION_SETTINGS)
settings.SetLockScreenSettings(device)
# Some device types can momentarily disappear after setting properties.
device.adb.WaitForDevice()
def DisableNetwork(device):
settings.ConfigureContentSettings(
device, settings.NETWORK_DISABLED_SETTINGS)
if device.build_version_sdk >= version_codes.MARSHMALLOW:
# Ensure that NFC is also switched off.
device.RunShellCommand(['svc', 'nfc', 'disable'],
as_root=True, check_return=True)
def DisableSystemChrome(device):
# The system chrome version on the device interferes with some tests.
device.RunShellCommand(['pm', 'disable', 'com.android.chrome'],
as_root=True, check_return=True)
def _FindSystemPackagePaths(device, system_package_list):
found_paths = []
for system_package in system_package_list:
found_paths.extend(device.GetApplicationPaths(system_package))
return [p for p in found_paths if p.startswith('/system/')]
def _FindSystemAppPaths(device, system_app_list):
found_paths = []
for system_app in system_app_list:
for directory in _SYSTEM_APP_DIRECTORIES:
path = os.path.join(directory, system_app)
if device.PathExists(path):
found_paths.append(path)
return found_paths
def RemoveSystemApps(
device, system_app_remove_list, system_package_remove_list):
"""Attempts to remove the provided system apps from the given device.
Arguments:
device: The device to remove the system apps from.
system_app_remove_list: A list of app names to remove, e.g.
['WebViewGoogle', 'GoogleVrCore']
system_package_remove_list: A list of app packages to remove, e.g.
['com.google.android.webview']
"""
device.EnableRoot()
if device.HasRoot():
system_app_paths = (
_FindSystemAppPaths(device, system_app_remove_list) +
_FindSystemPackagePaths(device, system_package_remove_list))
if system_app_paths:
# Disable Marshmallow's Verity security feature
if device.build_version_sdk >= version_codes.MARSHMALLOW:
logger.info('Disabling Verity on %s', device.serial)
device.adb.DisableVerity()
device.Reboot()
device.WaitUntilFullyBooted()
device.EnableRoot()
device.adb.Remount()
device.RunShellCommand(['stop'], check_return=True)
device.RemovePath(system_app_paths, force=True, recursive=True)
device.RunShellCommand(['start'], check_return=True)
else:
raise device_errors.CommandFailedError(
'Failed to remove system apps from non-rooted device', str(device))
def _ConfigureLocalProperties(device, java_debug=True):
"""Set standard readonly testing device properties prior to reboot."""
local_props = [
'persist.sys.usb.config=adb',
'ro.monkey=1',
'ro.test_harness=1',
'ro.audio.silent=1',
'ro.setupwizard.mode=DISABLED',
]
if java_debug:
local_props.append(
'%s=all' % device_utils.DeviceUtils.JAVA_ASSERT_PROPERTY)
local_props.append('debug.checkjni=1')
try:
device.WriteFile(
device.LOCAL_PROPERTIES_PATH,
'\n'.join(local_props), as_root=True)
# Android will not respect the local props file if it is world writable.
device.RunShellCommand(
['chmod', '644', device.LOCAL_PROPERTIES_PATH],
as_root=True, check_return=True)
except device_errors.CommandFailedError:
logger.exception('Failed to configure local properties.')
def FinishProvisioning(device):
# The lockscreen can't be disabled on user builds, so send a keyevent
# to unlock it.
if device.IsUserBuild():
device.SendKeyEvent(keyevent.KEYCODE_MENU)
def WaitForCharge(device, min_battery_level):
battery = battery_utils.BatteryUtils(device)
try:
battery.ChargeDeviceToLevel(min_battery_level)
except device_errors.DeviceChargingError:
device.Reboot()
battery.ChargeDeviceToLevel(min_battery_level)
def WaitForBatteryTemperature(device, max_battery_temp):
try:
battery = battery_utils.BatteryUtils(device)
battery.LetBatteryCoolToTemperature(max_battery_temp)
except device_errors.CommandFailedError:
logger.exception('Unable to let battery cool to specified temperature.')
def SetDate(device):
def _set_and_verify_date():
if device.build_version_sdk >= version_codes.MARSHMALLOW:
date_format = '%m%d%H%M%Y.%S'
set_date_command = ['date', '-u']
get_date_command = ['date', '-u']
else:
date_format = '%Y%m%d.%H%M%S'
set_date_command = ['date', '-s']
get_date_command = ['date']
# TODO(jbudorick): This is wrong on pre-M devices -- get/set are
# dealing in local time, but we're setting based on GMT.
strgmtime = time.strftime(date_format, time.gmtime())
set_date_command.append(strgmtime)
device.RunShellCommand(set_date_command, as_root=True, check_return=True)
get_date_command.append('+"%Y%m%d.%H%M%S"')
device_time = device.RunShellCommand(
get_date_command, check_return=True,
as_root=True, single_line=True).replace('"', '')
device_time = datetime.datetime.strptime(device_time, "%Y%m%d.%H%M%S")
correct_time = datetime.datetime.strptime(strgmtime, date_format)
tdelta = (correct_time - device_time).seconds
if tdelta <= 1:
logger.info('Date/time successfully set on %s', device)
return True
else:
logger.error('Date mismatch. Device: %s Correct: %s',
device_time.isoformat(), correct_time.isoformat())
return False
# Sometimes the date is not set correctly on the devices. Retry on failure.
if device.IsUserBuild():
# TODO(bpastene): Figure out how to set the date & time on user builds.
pass
else:
if not timeout_retry.WaitFor(
_set_and_verify_date, wait_period=1, max_tries=2):
raise device_errors.CommandFailedError(
'Failed to set date & time.', device_serial=str(device))
device.EnableRoot()
# The following intent can take a bit to complete when ran shortly after
# device boot-up.
device.BroadcastIntent(
intent.Intent(action='android.intent.action.TIME_SET'),
timeout=180)
def LogDeviceProperties(device):
props = device.RunShellCommand(['getprop'], check_return=True)
for prop in props:
logger.info(' %s', prop)
def CheckExternalStorage(device):
"""Checks that storage is writable and if not makes it writable.
Arguments:
device: The device to check.
"""
try:
with device_temp_file.DeviceTempFile(
device.adb, suffix='.sh', dir=device.GetExternalStoragePath()) as f:
device.WriteFile(f.name, 'test')
except device_errors.CommandFailedError:
logger.info('External storage not writable. Remounting / as RW')
device.RunShellCommand(['mount', '-o', 'remount,rw', '/'],
check_return=True, as_root=True)
device.EnableRoot()
with device_temp_file.DeviceTempFile(
device.adb, suffix='.sh', dir=device.GetExternalStoragePath()) as f:
device.WriteFile(f.name, 'test')
def main(raw_args):
# Recommended options on perf bots:
# --disable-network
# TODO(tonyg): We eventually want network on. However, currently radios
# can cause perfbots to drain faster than they charge.
# --min-battery-level 95
# Some perf bots run benchmarks with USB charging disabled which leads
# to gradual draining of the battery. We must wait for a full charge
# before starting a run in order to keep the devices online.
parser = argparse.ArgumentParser(
description='Provision Android devices with settings required for bots.')
script_common.AddDeviceArguments(parser)
script_common.AddEnvironmentArguments(parser)
parser.add_argument(
'--adb-key-files', type=str, nargs='+',
help='list of adb keys to push to device')
parser.add_argument(
'--disable-location', action='store_true',
help='disable Google location services on devices')
parser.add_argument(
'--disable-mock-location', action='store_true', default=False,
help='Set ALLOW_MOCK_LOCATION to false')
parser.add_argument(
'--disable-network', action='store_true',
help='disable network access on devices')
parser.add_argument(
'--disable-java-debug', action='store_false',
dest='enable_java_debug', default=True,
help='disable Java property asserts and JNI checking')
parser.add_argument(
'--disable-system-chrome', action='store_true',
help='DEPRECATED: use --remove-system-packages com.android.google '
'Disable the system chrome from devices.')
parser.add_argument(
'--emulators', action='store_true',
help='provision only emulators and ignore usb devices '
'(this will not wipe emulators)')
parser.add_argument(
'--max-battery-temp', type=int, metavar='NUM',
help='Wait for the battery to have this temp or lower.')
parser.add_argument(
'--min-battery-level', type=int, metavar='NUM',
help='wait for the device to reach this minimum battery'
' level before trying to continue')
parser.add_argument(
'--output-device-blacklist',
help='Json file to output the device blacklist.')
parser.add_argument(
'--reboot-timeout', metavar='SECS', type=int,
help='when wiping the device, max number of seconds to'
' wait after each reboot '
'(default: %s)' % _DEFAULT_TIMEOUTS.HELP_TEXT)
parser.add_argument(
'--remove-system-apps', nargs='*', dest='system_app_remove_list',
help='DEPRECATED: use --remove-system-packages instead. '
'The names of system apps to remove. ')
parser.add_argument(
'--remove-system-packages', nargs='*', dest='system_package_remove_list',
help='The names of system packages to remove.')
parser.add_argument(
'--remove-system-webview', action='store_true',
help='DEPRECATED: use --remove-system-packages '
'com.google.android.webview com.android.webview '
'Remove the system webview from devices.')
parser.add_argument(
'--skip-wipe', action='store_true', default=False,
help='do not wipe device data during provisioning')
parser.add_argument(
'-v', '--verbose', action='count', default=1,
help='Log more information.')
# No-op arguments for compatibility with build/android/provision_devices.py.
# TODO(jbudorick): Remove these once all callers have stopped using them.
parser.add_argument(
'--chrome-specific-wipe', action='store_true',
help=argparse.SUPPRESS)
parser.add_argument(
'--phase', action='append',
help=argparse.SUPPRESS)
parser.add_argument(
'-r', '--auto-reconnect', action='store_true',
help=argparse.SUPPRESS)
parser.add_argument(
'-t', '--target',
help=argparse.SUPPRESS)
args = parser.parse_args(raw_args)
run_tests_helper.SetLogLevel(args.verbose)
script_common.InitializeEnvironment(args)
try:
return ProvisionDevices(
args.devices,
args.blacklist_file,
adb_key_files=args.adb_key_files,
disable_location=args.disable_location,
disable_mock_location=args.disable_mock_location,
disable_network=args.disable_network,
disable_system_chrome=args.disable_system_chrome,
emulators=args.emulators,
enable_java_debug=args.enable_java_debug,
max_battery_temp=args.max_battery_temp,
min_battery_level=args.min_battery_level,
output_device_blacklist=args.output_device_blacklist,
reboot_timeout=args.reboot_timeout,
remove_system_webview=args.remove_system_webview,
system_app_remove_list=args.system_app_remove_list,
system_package_remove_list=args.system_package_remove_list,
wipe=not args.skip_wipe and not args.emulators)
except (device_errors.DeviceUnreachableError, device_errors.NoDevicesError):
logging.exception('Unable to provision local devices.')
return exit_codes.INFRA
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| benschmaus/catapult | devil/devil/android/tools/provision_devices.py | Python | bsd-3-clause | 23,798 | 0.00832 |
# coding=utf-8
# Author: Mr_Orange <[email protected]>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import json
from base64 import b64encode
from requests.exceptions import RequestException
import sickbeard
from sickbeard import logger
from sickbeard.clients.generic import GenericClient
class DelugeAPI(GenericClient):
def __init__(self, host=None, username=None, password=None):
super(DelugeAPI, self).__init__('Deluge', host, username, password)
self.url = '{host}json'.format(host=self.host)
def _get_auth(self):
post_data = json.dumps({
'method': 'auth.login',
'params': [
self.password,
],
'id': 1,
})
try:
self.response = self.session.post(self.url, data=post_data.encode('utf-8'),
verify=sickbeard.TORRENT_VERIFY_CERT)
except RequestException:
return None
self.auth = self.response.json()['result']
post_data = json.dumps({
'method': 'web.connected',
'params': [],
'id': 10,
})
try:
self.response = self.session.post(self.url, data=post_data.encode('utf-8'),
verify=sickbeard.TORRENT_VERIFY_CERT)
except RequestException:
return None
connected = self.response.json()['result']
if not connected:
post_data = json.dumps({
'method': 'web.get_hosts',
'params': [],
'id': 11,
})
try:
self.response = self.session.post(self.url, data=post_data.encode('utf-8'),
verify=sickbeard.TORRENT_VERIFY_CERT)
except RequestException:
return None
hosts = self.response.json()['result']
if not hosts:
logger.log('{name}: WebUI does not contain daemons'.format(name=self.name), logger.ERROR)
return None
post_data = json.dumps({
'method': 'web.connect',
'params': [
hosts[0][0],
],
'id': 11,
})
try:
self.response = self.session.post(self.url, data=post_data.encode('utf-8'),
verify=sickbeard.TORRENT_VERIFY_CERT)
except RequestException:
return None
post_data = json.dumps({
'method': 'web.connected',
'params': [],
'id': 10,
})
try:
self.response = self.session.post(self.url, data=post_data.encode('utf-8'),
verify=sickbeard.TORRENT_VERIFY_CERT)
except RequestException:
return None
connected = self.response.json()['result']
if not connected:
logger.log('{name}: WebUI could not connect to daemon'.format(name=self.name), logger.ERROR)
return None
return self.auth
def _add_torrent_uri(self, result):
post_data = json.dumps({
'method': 'core.add_torrent_magnet',
'params': [
result.url,
{},
],
'id': 2,
})
self._request(method='post', data=post_data)
result.hash = self.response.json()['result']
return self.response.json()['result']
def _add_torrent_file(self, result):
post_data = json.dumps({
'method': 'core.add_torrent_file',
'params': [
'{name}.torrent'.format(name=result.name),
b64encode(result.content),
{},
],
'id': 2,
})
self._request(method='post', data=post_data)
result.hash = self.response.json()['result']
return self.response.json()['result']
def _set_torrent_label(self, result):
label = sickbeard.TORRENT_LABEL.lower()
if result.show.is_anime:
label = sickbeard.TORRENT_LABEL_ANIME.lower()
if ' ' in label:
logger.log('{name}: Invalid label. Label must not contain a space'.format
(name=self.name), logger.ERROR)
return False
if label:
# check if label already exists and create it if not
post_data = json.dumps({
'method': 'label.get_labels',
'params': [],
'id': 3,
})
self._request(method='post', data=post_data)
labels = self.response.json()['result']
if labels is not None:
if label not in labels:
logger.log('{name}: {label} label does not exist in Deluge we must add it'.format
(name=self.name, label=label), logger.DEBUG)
post_data = json.dumps({
'method': 'label.add',
'params': [
label,
],
'id': 4,
})
self._request(method='post', data=post_data)
logger.log('{name}: {label} label added to Deluge'.format
(name=self.name, label=label), logger.DEBUG)
# add label to torrent
post_data = json.dumps({
'method': 'label.set_torrent',
'params': [
result.hash,
label,
],
'id': 5,
})
self._request(method='post', data=post_data)
logger.log('{name}: {label} label added to torrent'.format
(name=self.name, label=label), logger.DEBUG)
else:
logger.log('{name}: label plugin not detected'.format
(name=self.name), logger.DEBUG)
return False
return not self.response.json()['error']
def _set_torrent_ratio(self, result):
ratio = None
if result.ratio:
ratio = result.ratio
# blank is default client ratio, so we also shouldn't set ratio
if ratio and float(ratio) >= 0:
post_data = json.dumps({
'method': 'core.set_torrent_stop_at_ratio',
'params': [
result.hash,
True,
],
'id': 5,
})
self._request(method='post', data=post_data)
# Return false if we couldn't enable setting set_torrent_stop_at_ratio. No reason to set ratio.
if self.response.json()['error']:
return False
post_data = json.dumps({
'method': 'core.set_torrent_stop_ratio',
'params': [
result.hash,
float(ratio),
],
'id': 6,
})
self._request(method='post', data=post_data)
return not self.response.json()['error']
elif ratio and float(ratio) == -1:
# Disable stop at ratio to seed forever
post_data = json.dumps({"method": "core.set_torrent_stop_at_ratio",
"params": [result.hash, False],
"id": 5})
self._request(method='post', data=post_data)
return not self.response.json()['error']
return True
def _set_torrent_path(self, result):
if sickbeard.TORRENT_PATH:
post_data = json.dumps({
'method': 'core.set_torrent_move_completed',
'params': [
result.hash,
True,
],
'id': 7,
})
self._request(method='post', data=post_data)
post_data = json.dumps({
'method': 'core.set_torrent_move_completed_path',
'params': [
result.hash,
sickbeard.TORRENT_PATH,
],
'id': 8,
})
self._request(method='post', data=post_data)
return not self.response.json()['error']
return True
def _set_torrent_pause(self, result):
if sickbeard.TORRENT_PAUSED:
post_data = json.dumps({
'method': 'core.pause_torrent',
'params': [
[result.hash],
],
'id': 9,
})
self._request(method='post', data=post_data)
return not self.response.json()['error']
return True
api = DelugeAPI()
| Thraxis/pymedusa | sickbeard/clients/deluge_client.py | Python | gpl-3.0 | 9,733 | 0.001952 |
# -*- coding: utf-8 -*-
from datetime import date
from django import forms
from django.forms.util import ErrorList
from openedx.core.djangoapps.ga_operation.ga_operation_base_form import (GaOperationEmailField,
FIELD_NOT_INPUT, INVALID_EMAIL)
class SearchByEmailAndPeriodDateForm(forms.Form):
start_date = forms.DateField(input_formats=['%Y%m%d'], required=False)
end_date = forms.DateField(input_formats=['%Y%m%d'], required=False)
email = GaOperationEmailField(required=True,
error_messages={'required': FIELD_NOT_INPUT, 'invalid': INVALID_EMAIL})
def clean_start_date(self):
val = self.cleaned_data['start_date']
if not val:
return date(2014, 1, 9)
if val < date(2014, 1, 9):
raise forms.ValidationError(u"集計開始日は20140109以降の日付を入力してください。")
return val
def clean_end_date(self):
val = self.cleaned_data['end_date']
if not val:
return date.today()
if val > date.today():
raise forms.ValidationError(u"集計終了日は本日以前の日付を入力してください。")
return val
def clean(self):
"""
Checks multi fields correlation
:return: clean data
"""
cleaned_data = self.cleaned_data
# check inputs
start_date = cleaned_data['start_date']
end_date = cleaned_data['end_date']
if start_date > end_date:
self.errors["end_date"] = ErrorList([u"終了日は開始日以降の日付を入力してください。"])
return cleaned_data
| nttks/edx-platform | lms/djangoapps/ga_operation/forms/search_by_email_and_period_date_form.py | Python | agpl-3.0 | 1,730 | 0.001877 |
from dipy.align.transforms import regtransforms, Transform
import numpy as np
from numpy.testing import (assert_array_equal,
assert_array_almost_equal,
assert_almost_equal,
assert_equal,
assert_raises)
def test_number_of_parameters():
expected_params = {('TRANSLATION', 2): 2,
('TRANSLATION', 3): 3,
('ROTATION', 2): 1,
('ROTATION', 3): 3,
('RIGID', 2): 3,
('RIGID', 3): 6,
('SCALING', 2): 1,
('SCALING', 3): 1,
('AFFINE', 2): 6,
('AFFINE', 3): 12}
for ttype, transform in regtransforms.items():
assert_equal(
transform.get_number_of_parameters(),
expected_params[ttype])
def test_param_to_matrix_2d():
rng = np.random.RandomState()
# Test translation matrix 2D
transform = regtransforms[('TRANSLATION', 2)]
dx, dy = rng.uniform(size=(2,))
theta = np.array([dx, dy])
expected = np.array([[1, 0, dx], [0, 1, dy], [0, 0, 1]])
actual = transform.param_to_matrix(theta)
assert_array_equal(actual, expected)
# Test rotation matrix 2D
transform = regtransforms[('ROTATION', 2)]
angle = rng.uniform()
theta = np.array([angle])
ct = np.cos(angle)
st = np.sin(angle)
expected = np.array([[ct, -st, 0], [st, ct, 0], [0, 0, 1]])
actual = transform.param_to_matrix(theta)
assert_array_almost_equal(actual, expected)
# Test rigid matrix 2D
transform = regtransforms[('RIGID', 2)]
angle, dx, dy = rng.uniform(size=(3,))
theta = np.array([angle, dx, dy])
ct = np.cos(angle)
st = np.sin(angle)
expected = np.array([[ct, -st, dx], [st, ct, dy], [0, 0, 1]])
actual = transform.param_to_matrix(theta)
assert_array_almost_equal(actual, expected)
# Test rigid matrix 2D
transform = regtransforms[('SCALING', 2)]
factor = rng.uniform()
theta = np.array([factor])
expected = np.array([[factor, 0, 0], [0, factor, 0], [0, 0, 1]])
actual = transform.param_to_matrix(theta)
assert_array_almost_equal(actual, expected)
# Test affine 2D
transform = regtransforms[('AFFINE', 2)]
theta = rng.uniform(size=(6,))
expected = np.eye(3)
expected[0, :] = theta[:3]
expected[1, :] = theta[3:6]
actual = transform.param_to_matrix(theta)
assert_array_almost_equal(actual, expected)
# Verify that ValueError is raised if incorrect number of parameters
for transform in regtransforms.values():
n = transform.get_number_of_parameters()
# Set incorrect number of parameters
theta = np.zeros(n + 1, dtype=np.float64)
assert_raises(ValueError, transform.param_to_matrix, theta)
def test_param_to_matrix_3d():
rng = np.random.RandomState()
# Test translation matrix 3D
transform = regtransforms[('TRANSLATION', 3)]
dx, dy, dz = rng.uniform(size=(3,))
theta = np.array([dx, dy, dz])
expected = np.array([[1, 0, 0, dx],
[0, 1, 0, dy],
[0, 0, 1, dz],
[0, 0, 0, 1]])
actual = transform.param_to_matrix(theta)
assert_array_equal(actual, expected)
# Test rotation matrix 3D
transform = regtransforms[('ROTATION', 3)]
theta = rng.uniform(size=(3,))
ca = np.cos(theta[0])
sa = np.sin(theta[0])
cb = np.cos(theta[1])
sb = np.sin(theta[1])
cc = np.cos(theta[2])
sc = np.sin(theta[2])
X = np.array([[1, 0, 0],
[0, ca, -sa],
[0, sa, ca]])
Y = np.array([[cb, 0, sb],
[0, 1, 0],
[-sb, 0, cb]])
Z = np.array([[cc, -sc, 0],
[sc, cc, 0],
[0, 0, 1]])
R = Z.dot(X.dot(Y)) # Apply in order: Y, X, Z (Y goes to the right)
expected = np.eye(4)
expected[:3, :3] = R[:3, :3]
actual = transform.param_to_matrix(theta)
assert_array_almost_equal(actual, expected)
# Test rigid matrix 3D
transform = regtransforms[('RIGID', 3)]
theta = rng.uniform(size=(6,))
ca = np.cos(theta[0])
sa = np.sin(theta[0])
cb = np.cos(theta[1])
sb = np.sin(theta[1])
cc = np.cos(theta[2])
sc = np.sin(theta[2])
X = np.array([[1, 0, 0],
[0, ca, -sa],
[0, sa, ca]])
Y = np.array([[cb, 0, sb],
[0, 1, 0],
[-sb, 0, cb]])
Z = np.array([[cc, -sc, 0],
[sc, cc, 0],
[0, 0, 1]])
R = Z.dot(X.dot(Y)) # Apply in order: Y, X, Z (Y goes to the right)
expected = np.eye(4)
expected[:3, :3] = R[:3, :3]
expected[:3, 3] = theta[3:6]
actual = transform.param_to_matrix(theta)
assert_array_almost_equal(actual, expected)
# Test scaling matrix 3D
transform = regtransforms[('SCALING', 3)]
factor = rng.uniform()
theta = np.array([factor])
expected = np.array([[factor, 0, 0, 0],
[0, factor, 0, 0],
[0, 0, factor, 0],
[0, 0, 0, 1]])
actual = transform.param_to_matrix(theta)
assert_array_almost_equal(actual, expected)
# Test affine 3D
transform = regtransforms[('AFFINE', 3)]
theta = rng.uniform(size=(12,))
expected = np.eye(4)
expected[0, :] = theta[:4]
expected[1, :] = theta[4:8]
expected[2, :] = theta[8:12]
actual = transform.param_to_matrix(theta)
assert_array_almost_equal(actual, expected)
# Verify that ValueError is raised if incorrect number of parameters
for transform in regtransforms.values():
n = transform.get_number_of_parameters()
# Set incorrect number of parameters
theta = np.zeros(n + 1, dtype=np.float64)
assert_raises(ValueError, transform.param_to_matrix, theta)
def test_identity_parameters():
for transform in regtransforms.values():
dim = transform.get_dim()
theta = transform.get_identity_parameters()
expected = np.eye(dim + 1)
actual = transform.param_to_matrix(theta)
assert_array_almost_equal(actual, expected)
def test_jacobian_functions():
rng = np.random.RandomState()
# Compare the analytical Jacobians with their numerical approximations
h = 1e-8
nsamples = 50
for transform in regtransforms.values():
n = transform.get_number_of_parameters()
dim = transform.get_dim()
expected = np.empty((dim, n))
theta = rng.uniform(size=(n,))
T = transform.param_to_matrix(theta)
for j in range(nsamples):
x = 255 * (rng.uniform(size=(dim,)) - 0.5)
actual = transform.jacobian(theta, x)
# Approximate with finite differences
x_hom = np.ones(dim + 1)
x_hom[:dim] = x[:]
for i in range(n):
dtheta = theta.copy()
dtheta[i] += h
dT = np.array(transform.param_to_matrix(dtheta))
g = (dT - T).dot(x_hom) / h
expected[:, i] = g[:dim]
assert_array_almost_equal(actual, expected, decimal=5)
# Test ValueError is raised when theta parameter doesn't have the right
# length
for transform in regtransforms.values():
n = transform.get_number_of_parameters()
# Wrong number of parameters
theta = np.zeros(n + 1)
x = np.zeros(dim)
assert_raises(ValueError, transform.jacobian, theta, x)
def test_invalid_transform():
# Note: users should not attempt to use the base class Transform:
# they should get an instance of one of its derived classes from the
# regtransforms dictionary (the base class is not contained there)
# If for some reason the user instantiates it and attempts to use it,
# however, it will raise exceptions when attempting to retrieve its
# Jacobian, identity parameters or its matrix representation. It will
# return -1 if queried about its dimension or number of parameters
transform = Transform()
theta = np.ndarray(3)
x = np.ndarray(3)
assert_raises(ValueError, transform.jacobian, theta, x)
assert_raises(ValueError, transform.get_identity_parameters)
assert_raises(ValueError, transform.param_to_matrix, theta)
expected = -1
actual = transform.get_number_of_parameters()
assert_equal(actual, expected)
actual = transform.get_dim()
assert_equal(actual, expected)
if __name__ == '__main__':
test_number_of_parameters()
test_jacobian_functions()
test_param_to_matrix_2d()
test_param_to_matrix_3d()
test_identity_parameters()
test_invalid_transform()
| FrancoisRheaultUS/dipy | dipy/align/tests/test_transforms.py | Python | bsd-3-clause | 8,834 | 0 |
N = 46340
table = list(range(N))
for i in range(2,int(N**0.5)+1):
if table[i]:
for mult in range(i**2,N,i):
table[mult] = False
primetable= [p for p in table if p][1:]
p=0
a=196
if a<0:
p=1
a*=-1
b=''
while True:
if a in primetable:
b+=str(a)
break
for i in primetable:
if a % i ==0:
a=a/i
b+=str(i)+' x '
break
if p :
b='-1 x '+b
print(b)
| ytlai4851/Uva | Python/Q538.py | Python | gpl-2.0 | 392 | 0.076531 |
import datetime
import json
import mock
from socorro.cron.crontabber_app import CronTabberApp
from socorro.lib.datetimeutil import utc_now
from socorro.unittest.cron.jobs.base import IntegrationTestBase
from socorro.external.postgresql.dbapi2_util import (
execute_no_results,
execute_query_fetchall,
)
class Response(object):
def __init__(self, content, status_code=200):
if not isinstance(content, basestring):
content = json.dumps(content)
self.content = content.strip()
self.status_code = status_code
def json(self):
return json.loads(self.content)
class IntegrationTestFeaturedVersionsAutomatic(IntegrationTestBase):
def setUp(self):
super(IntegrationTestFeaturedVersionsAutomatic, self).setUp()
self.__truncate()
now = utc_now()
build_date = now - datetime.timedelta(days=30)
sunset_date = now + datetime.timedelta(days=30)
execute_no_results(
self.conn,
"""
INSERT INTO products
(product_name, sort, release_name)
VALUES
('Firefox', 1, 'firefox'),
('Fennec', 1, 'mobile')
"""
)
execute_no_results(
self.conn,
"""
INSERT INTO product_versions
(product_version_id, product_name, major_version, release_version,
version_string, version_sort, build_date, sunset_date,
featured_version, build_type)
VALUES
(
1,
'Firefox',
'15.0',
'15.0',
'15.0a1',
'000000150a1',
%(build_date)s,
%(sunset_date)s,
true,
'release'
),
(
2,
'Firefox',
'24.5',
'24.5.0',
'24.5.0',
'024005000x000',
%(build_date)s,
%(sunset_date)s,
true,
'nightly'
),
(
3,
'Firefox',
'49.0.1',
'49.0.1',
'49.0.1',
'000000150a1',
%(build_date)s,
%(sunset_date)s,
false,
'release'
),
(
4,
'Firefox',
'50.0b',
'50.0b',
'50.0b',
'024005000x000',
%(build_date)s,
%(sunset_date)s,
false,
'beta'
),
(
5,
'Firefox',
'51.0a2',
'51.0a2',
'51.0a2',
'000000150a1',
%(build_date)s,
%(sunset_date)s,
false,
'aurora'
),
(
6,
'Firefox',
'52.0a1',
'52.0a1',
'52.0a1',
'024005000x000',
%(build_date)s,
%(sunset_date)s,
false,
'nightly'
)
""",
{
'build_date': build_date,
'sunset_date': sunset_date
}
)
execute_no_results(
self.conn,
"""
INSERT INTO release_channels
(release_channel, sort)
VALUES
('nightly', 1),
('aurora', 2),
('beta', 3),
('release', 4)
"""
)
execute_no_results(
self.conn,
"""
INSERT INTO product_release_channels
(product_name, release_channel, throttle)
VALUES
('Firefox', 'nightly', 1),
('Firefox', 'aurora', 1),
('Firefox', 'beta', 1),
('Firefox', 'release', 1),
('Fennec', 'release', 1),
('Fennec', 'beta', 1)
"""
)
def tearDown(self):
self.__truncate()
super(IntegrationTestFeaturedVersionsAutomatic, self).tearDown()
def __truncate(self):
"""Named like this because the parent class has a _truncate()
which won't be executed by super(IntegrationTestFeaturedVersionsSync)
in its setUp()."""
self.conn.cursor().execute("""
TRUNCATE
products,
product_versions,
release_channels,
product_release_channels
CASCADE
""")
self.conn.commit()
def _setup_config_manager(self):
return super(IntegrationTestFeaturedVersionsAutomatic, self)._setup_config_manager(
jobs_string=(
'socorro.cron.jobs.featured_versions_automatic.FeaturedVersionsAutomaticCronApp|1d'
),
extra_value_source={
'crontabber.class-FeaturedVersionsAutomaticCronApp.api_endpoint_url': (
'https://example.com/{product}_versions.json'
),
}
)
@mock.patch('requests.get')
def test_basic_run_job(self, rget):
config_manager = self._setup_config_manager()
def mocked_get(url):
if 'firefox_versions.json' in url:
return Response({
'FIREFOX_NIGHTLY': '52.0a1',
# Kept for legacy and smooth transition.
# We USED to consider the latest AURORA version a
# featured version but we no longer build aurora
# so Socorro shouldn't pick this up any more
# even if product-details.mozilla.org supplies it.
'FIREFOX_AURORA': '51.0a2',
'FIREFOX_ESR': '45.4.0esr',
'FIREFOX_ESR_NEXT': '',
'LATEST_FIREFOX_DEVEL_VERSION': '50.0b7',
'LATEST_FIREFOX_OLDER_VERSION': '3.6.28',
'LATEST_FIREFOX_RELEASED_DEVEL_VERSION': '50.0b7',
'LATEST_FIREFOX_VERSION': '49.0.1'
})
elif 'mobile_versions.json' in url:
return Response({
'nightly_version': '52.0a1',
'alpha_version': '51.0a2',
'beta_version': '50.0b6',
'version': '49.0',
'ios_beta_version': '6.0',
'ios_version': '5.0'
})
elif 'thunderbird_versions.json' in url:
return Response({
'LATEST_THUNDERBIRD_VERSION': '45.4.0',
'LATEST_THUNDERBIRD_DEVEL_VERSION': '50.0b1',
'LATEST_THUNDERBIRD_ALPHA_VERSION': '51.0a2',
'LATEST_THUNDERBIRD_NIGHTLY_VERSION': '52.0a1',
})
else:
raise NotImplementedError(url)
rget.side_effect = mocked_get
# Check what's set up in the fixture
rows = execute_query_fetchall(
self.conn,
'select product_name, version_string, featured_version '
'from product_versions order by version_string'
)
assert sorted(rows) == [
('Firefox', '15.0a1', True),
('Firefox', '24.5.0', True),
('Firefox', '49.0.1', False),
('Firefox', '50.0b', False),
('Firefox', '51.0a2', False),
('Firefox', '52.0a1', False),
]
# This is necessary so we get a new cursor when we do other
# selects after the crontabber app has run.
self.conn.commit()
with config_manager.context() as config:
tab = CronTabberApp(config)
tab.run_all()
information = self._load_structure()
assert information['featured-versions-automatic']
assert not information['featured-versions-automatic']['last_error']
assert information['featured-versions-automatic']['last_success']
config.logger.info.assert_called_with(
'Set featured versions for Thunderbird to: '
'45.4.0, 50.0b1, 52.0a1'
)
rows = execute_query_fetchall(
self.conn,
'select product_name, version_string, featured_version '
'from product_versions'
)
expected = [
('Firefox', '15.0a1', False),
('Firefox', '24.5.0', False),
('Firefox', '49.0.1', True),
('Firefox', '50.0b', True),
# Note that the 'Aurora' branch is still mentioned but
# note that it's NOT featured (hence 'False').
('Firefox', '51.0a2', False),
('Firefox', '52.0a1', True),
]
assert sorted(rows) == expected
@mock.patch('requests.get')
def test_download_error(self, rget):
config_manager = self._setup_config_manager()
def mocked_get(url):
return Response('not here', status_code=404)
rget.side_effect = mocked_get
with config_manager.context() as config:
tab = CronTabberApp(config)
tab.run_all()
information = self._load_structure()
assert information['featured-versions-automatic']
assert information['featured-versions-automatic']['last_error']
error = information['featured-versions-automatic']['last_error']
assert 'DownloadError' in error['type']
assert '404' in error['value']
| Tayamarn/socorro | socorro/unittest/cron/jobs/test_featured_versions_automatic.py | Python | mpl-2.0 | 9,704 | 0.000309 |
"""Angles and anomalies.
"""
from astropy import units as u
from poliastro.core.angles import (
D_to_M as D_to_M_fast,
D_to_nu as D_to_nu_fast,
E_to_M as E_to_M_fast,
E_to_nu as E_to_nu_fast,
F_to_M as F_to_M_fast,
F_to_nu as F_to_nu_fast,
M_to_D as M_to_D_fast,
M_to_E as M_to_E_fast,
M_to_F as M_to_F_fast,
fp_angle as fp_angle_fast,
nu_to_D as nu_to_D_fast,
nu_to_E as nu_to_E_fast,
nu_to_F as nu_to_F_fast,
)
@u.quantity_input(D=u.rad)
def D_to_nu(D):
"""True anomaly from parabolic eccentric anomaly.
Parameters
----------
D : ~astropy.units.Quantity
Eccentric anomaly.
Returns
-------
nu : ~astropy.units.Quantity
True anomaly.
Notes
-----
Taken from Farnocchia, Davide, Davide Bracali Cioci, and Andrea Milani.
"Robust resolution of Kepler’s equation in all eccentricity regimes."
Celestial Mechanics and Dynamical Astronomy 116, no. 1 (2013): 21-34.
"""
return (D_to_nu_fast(D.to_value(u.rad)) * u.rad).to(D.unit)
@u.quantity_input(nu=u.rad)
def nu_to_D(nu):
"""Parabolic eccentric anomaly from true anomaly.
Parameters
----------
nu : ~astropy.units.Quantity
True anomaly.
Returns
-------
D : ~astropy.units.Quantity
Hyperbolic eccentric anomaly.
Notes
-----
Taken from Farnocchia, Davide, Davide Bracali Cioci, and Andrea Milani.
"Robust resolution of Kepler’s equation in all eccentricity regimes."
Celestial Mechanics and Dynamical Astronomy 116, no. 1 (2013): 21-34.
"""
return (nu_to_D_fast(nu.to_value(u.rad)) * u.rad).to(nu.unit)
@u.quantity_input(nu=u.rad, ecc=u.one)
def nu_to_E(nu, ecc):
"""Eccentric anomaly from true anomaly.
.. versionadded:: 0.4.0
Parameters
----------
nu : ~astropy.units.Quantity
True anomaly.
ecc : ~astropy.units.Quantity
Eccentricity.
Returns
-------
E : ~astropy.units.Quantity
Eccentric anomaly.
"""
return (nu_to_E_fast(nu.to_value(u.rad), ecc.value) * u.rad).to(nu.unit)
@u.quantity_input(nu=u.rad, ecc=u.one)
def nu_to_F(nu, ecc):
"""Hyperbolic eccentric anomaly from true anomaly.
Parameters
----------
nu : ~astropy.units.Quantity
True anomaly.
ecc : ~astropy.units.Quantity
Eccentricity (>1).
Returns
-------
F : ~astropy.units.Quantity
Hyperbolic eccentric anomaly.
Notes
-----
Taken from Curtis, H. (2013). *Orbital mechanics for engineering students*. 167
"""
return (nu_to_F_fast(nu.to_value(u.rad), ecc.value) * u.rad).to(nu.unit)
@u.quantity_input(E=u.rad, ecc=u.one)
def E_to_nu(E, ecc):
"""True anomaly from eccentric anomaly.
.. versionadded:: 0.4.0
Parameters
----------
E : ~astropy.units.Quantity
Eccentric anomaly.
ecc : ~astropy.units.Quantity
Eccentricity.
Returns
-------
nu : ~astropy.units.Quantity
True anomaly.
"""
return (E_to_nu_fast(E.to_value(u.rad), ecc.value) * u.rad).to(E.unit)
@u.quantity_input(F=u.rad, ecc=u.one)
def F_to_nu(F, ecc):
"""True anomaly from hyperbolic eccentric anomaly.
Parameters
----------
F : ~astropy.units.Quantity
Hyperbolic eccentric anomaly.
ecc : ~astropy.units.Quantity
Eccentricity (>1).
Returns
-------
nu : ~astropy.units.Quantity
True anomaly.
"""
return (F_to_nu_fast(F.to_value(u.rad), ecc.value) * u.rad).to(F.unit)
@u.quantity_input(M=u.rad, ecc=u.one)
def M_to_E(M, ecc):
"""Eccentric anomaly from mean anomaly.
.. versionadded:: 0.4.0
Parameters
----------
M : ~astropy.units.Quantity
Mean anomaly.
ecc : ~astropy.units.Quantity
Eccentricity.
Returns
-------
E : ~astropy.units.Quantity
Eccentric anomaly.
"""
return (M_to_E_fast(M.to_value(u.rad), ecc.value) * u.rad).to(M.unit)
@u.quantity_input(M=u.rad, ecc=u.one)
def M_to_F(M, ecc):
"""Hyperbolic eccentric anomaly from mean anomaly.
Parameters
----------
M : ~astropy.units.Quantity
Mean anomaly.
ecc : ~astropy.units.Quantity
Eccentricity (>1).
Returns
-------
F : ~astropy.units.Quantity
Hyperbolic eccentric anomaly.
"""
return (M_to_F_fast(M.to_value(u.rad), ecc.value) * u.rad).to(M.unit)
@u.quantity_input(M=u.rad, ecc=u.one)
def M_to_D(M):
"""Parabolic eccentric anomaly from mean anomaly.
Parameters
----------
M : ~astropy.units.Quantity
Mean anomaly.
Returns
-------
D : ~astropy.units.Quantity
Parabolic eccentric anomaly.
"""
return (M_to_D_fast(M.to_value(u.rad)) * u.rad).to(M.unit)
@u.quantity_input(E=u.rad, ecc=u.one)
def E_to_M(E, ecc):
"""Mean anomaly from eccentric anomaly.
.. versionadded:: 0.4.0
Parameters
----------
E : ~astropy.units.Quantity
Eccentric anomaly.
ecc : ~astropy.units.Quantity
Eccentricity.
Returns
-------
M : ~astropy.units.Quantity
Mean anomaly.
"""
return (E_to_M_fast(E.to_value(u.rad), ecc.value) * u.rad).to(E.unit)
@u.quantity_input(F=u.rad, ecc=u.one)
def F_to_M(F, ecc):
"""Mean anomaly from eccentric anomaly.
Parameters
----------
F : ~astropy.units.Quantity
Hyperbolic eccentric anomaly.
ecc : ~astropy.units.Quantity
Eccentricity (>1).
Returns
-------
M : ~astropy.units.Quantity
Mean anomaly.
"""
return (F_to_M_fast(F.to_value(u.rad), ecc.value) * u.rad).to(F.unit)
@u.quantity_input(D=u.rad, ecc=u.one)
def D_to_M(D):
"""Mean anomaly from eccentric anomaly.
Parameters
----------
D : ~astropy.units.Quantity
Parabolic eccentric anomaly.
Returns
-------
M : ~astropy.units.Quantity
Mean anomaly.
"""
return (D_to_M_fast(D.to_value(u.rad)) * u.rad).to(D.unit)
@u.quantity_input(nu=u.rad, ecc=u.one)
def fp_angle(nu, ecc):
"""Flight path angle.
.. versionadded:: 0.4.0
Parameters
----------
nu : ~astropy.units.Quantity
True anomaly.
ecc : ~astropy.units.Quantity
Eccentricity.
Notes
-----
Algorithm taken from Vallado 2007, pp. 113.
"""
return (fp_angle_fast(nu.to_value(u.rad), ecc.value) * u.rad).to(nu.unit)
| poliastro/poliastro | src/poliastro/twobody/angles.py | Python | mit | 6,419 | 0.000156 |
import pytest
from six.moves.urllib.parse import quote
from ...api import MissingEntry
from .base import BaseTestFormgrade
from .manager import HubAuthNotebookServerUserManager
@pytest.mark.formgrader
@pytest.mark.usefixtures("all_formgraders")
class TestGradebook(BaseTestFormgrade):
def _click_element(self, name):
self.browser.find_element_by_css_selector(name).click()
def test_start(self):
# This is just a fake test, since starting up the browser and formgrader
# can take a little while. So if anything goes wrong there, this test
# will fail, rather than having it fail on some other test.
pass
def test_login(self):
if self.manager.jupyterhub is None:
pytest.skip("JupyterHub is not running")
self._get(self.manager.base_formgrade_url)
self._wait_for_element("username_input")
next_url = self.formgrade_url().replace(self.manager.base_url, "")
self._check_url("{}/hub/login?next={}".format(self.manager.base_url, next_url))
# fill out the form
self.browser.find_element_by_id("username_input").send_keys("foobar")
self.browser.find_element_by_id("login_submit").click()
# check the url
self._wait_for_gradebook_page("")
def test_load_assignment_list(self):
# load the main page and make sure it is the Assignments page
self._get(self.formgrade_url())
self._wait_for_gradebook_page("")
self._check_breadcrumbs("Assignments")
# load the assignments page
self._load_gradebook_page("assignments")
self._check_breadcrumbs("Assignments")
# click on the "Problem Set 1" link
self._click_link("Problem Set 1")
self._wait_for_gradebook_page("assignments/Problem Set 1")
def test_load_assignment_notebook_list(self):
self._load_gradebook_page("assignments/Problem Set 1")
self._check_breadcrumbs("Assignments", "Problem Set 1")
# click the "Assignments" link
self._click_link("Assignments")
self._wait_for_gradebook_page("assignments")
self.browser.back()
# click on the problem link
for problem in self.gradebook.find_assignment("Problem Set 1").notebooks:
self._click_link(problem.name)
self._wait_for_gradebook_page("assignments/Problem Set 1/{}".format(problem.name))
self.browser.back()
def test_load_assignment_notebook_submissions_list(self):
for problem in self.gradebook.find_assignment("Problem Set 1").notebooks:
self._load_gradebook_page("assignments/Problem Set 1/{}".format(problem.name))
self._check_breadcrumbs("Assignments", "Problem Set 1", problem.name)
# click the "Assignments" link
self._click_link("Assignments")
self._wait_for_gradebook_page("assignments")
self.browser.back()
# click the "Problem Set 1" link
self._click_link("Problem Set 1")
self._wait_for_gradebook_page("assignments/Problem Set 1")
self.browser.back()
submissions = problem.submissions
submissions.sort(key=lambda x: x.id)
for i in range(len(submissions)):
# click on the "Submission #i" link
self._click_link("Submission #{}".format(i + 1))
self._wait_for_formgrader("submissions/{}/?index=0".format(submissions[i].id))
self.browser.back()
def test_load_student_list(self):
# load the student view
self._load_gradebook_page("students")
self._check_breadcrumbs("Students")
# click on student
for student in self.gradebook.students:
## TODO: they should have a link here, even if they haven't submitted anything!
if len(student.submissions) == 0:
continue
self._click_link("{}, {}".format(student.last_name, student.first_name))
self._wait_for_gradebook_page("students/{}".format(student.id))
self.browser.back()
def test_load_student_assignment_list(self):
for student in self.gradebook.students:
self._load_gradebook_page("students/{}".format(student.id))
self._check_breadcrumbs("Students", student.id)
try:
self.gradebook.find_submission("Problem Set 1", student.id)
except MissingEntry:
## TODO: make sure link doesn't exist
continue
self._click_link("Problem Set 1")
self._wait_for_gradebook_page("students/{}/Problem Set 1".format(student.id))
def test_load_student_assignment_submissions_list(self):
for student in self.gradebook.students:
try:
submission = self.gradebook.find_submission("Problem Set 1", student.id)
except MissingEntry:
## TODO: make sure link doesn't exist
continue
self._load_gradebook_page("students/{}/Problem Set 1".format(student.id))
self._check_breadcrumbs("Students", student.id, "Problem Set 1")
for problem in self.gradebook.find_assignment("Problem Set 1").notebooks:
submission = self.gradebook.find_submission_notebook(problem.name, "Problem Set 1", student.id)
self._click_link(problem.name)
self._wait_for_formgrader("submissions/{}/?index=0".format(submission.id))
self.browser.back()
self._wait_for_gradebook_page("students/{}/Problem Set 1".format(student.id))
def test_switch_views(self):
# load the main page
self._load_gradebook_page("assignments")
# click the "Change View" button
self._click_link("Change View", partial=True)
# click the "Students" option
self._click_link("Students")
self._wait_for_gradebook_page("students")
# click the "Change View" button
self._click_link("Change View", partial=True)
# click the "Assignments" option
self._click_link("Assignments")
self._wait_for_gradebook_page("assignments")
def test_formgrade_view_breadcrumbs(self):
for problem in self.gradebook.find_assignment("Problem Set 1").notebooks:
submissions = problem.submissions
submissions.sort(key=lambda x: x.id)
for i, submission in enumerate(submissions):
self._get(self.formgrade_url("submissions/{}".format(submission.id)))
self._wait_for_formgrader("submissions/{}/?index=0".format(submission.id))
# click on the "Assignments" link
self._click_link("Assignments")
self._wait_for_gradebook_page("assignments")
# go back
self.browser.back()
self._wait_for_formgrader("submissions/{}/?index=0".format(submission.id))
# click on the "Problem Set 1" link
self._click_link("Problem Set 1")
self._wait_for_gradebook_page("assignments/Problem Set 1")
# go back
self.browser.back()
self._wait_for_formgrader("submissions/{}/?index=0".format(submission.id))
# click on the problem link
self._click_link(problem.name)
self._wait_for_gradebook_page("assignments/Problem Set 1/{}".format(problem.name))
# go back
self.browser.back()
self._wait_for_formgrader("submissions/{}/?index=0".format(submission.id))
def test_load_live_notebook(self):
for problem in self.gradebook.find_assignment("Problem Set 1").notebooks:
submissions = problem.submissions
submissions.sort(key=lambda x: x.id)
for i, submission in enumerate(submissions):
self._get(self.formgrade_url("submissions/{}".format(submission.id)))
self._wait_for_formgrader("submissions/{}/?index=0".format(submission.id))
# check the live notebook link
self._click_link("Submission #{}".format(i + 1))
self.browser.switch_to_window(self.browser.window_handles[1])
self._wait_for_notebook_page(self.notebook_url("autograded/{}/Problem Set 1/{}.ipynb".format(submission.student.id, problem.name)))
self.browser.close()
self.browser.switch_to_window(self.browser.window_handles[0])
def test_formgrade_images(self):
submissions = self.gradebook.find_notebook("Problem 1", "Problem Set 1").submissions
submissions.sort(key=lambda x: x.id)
for submission in submissions:
self._get(self.formgrade_url("submissions/{}".format(submission.id)))
self._wait_for_formgrader("submissions/{}/?index=0".format(submission.id))
images = self.browser.find_elements_by_tag_name("img")
for image in images:
# check that the image is loaded, and that it has a width
assert self.browser.execute_script("return arguments[0].complete", image)
assert self.browser.execute_script("return arguments[0].naturalWidth", image) > 0
def test_next_prev_assignments(self):
problem = self.gradebook.find_notebook("Problem 1", "Problem Set 1")
submissions = problem.submissions
submissions.sort(key=lambda x: x.id)
# test navigating both with the arrow keys and with clicking the
# next/previous links
next_functions = [
(self._click_element, ".next a")
]
prev_functions = [
(self._click_element, ".previous a")
]
for n, p in zip(next_functions, prev_functions):
# first element is the function, the other elements are the arguments
# to that function
next_function = lambda: n[0](*n[1:])
prev_function = lambda: p[0](*p[1:])
# Load the first submission
self._get(self.formgrade_url("submissions/{}".format(submissions[0].id)))
self._wait_for_formgrader("submissions/{}/?index=0".format(submissions[0].id))
# Move to the next submission
next_function()
self._wait_for_formgrader("submissions/{}/?index=0".format(submissions[1].id))
# Move to the next submission (should return to notebook list)
next_function()
self._wait_for_gradebook_page("assignments/Problem Set 1/Problem 1")
# Go back
self.browser.back()
self._wait_for_formgrader("submissions/{}/?index=0".format(submissions[1].id))
# Move to the previous submission
prev_function()
self._wait_for_formgrader("submissions/{}/?index=0".format(submissions[0].id))
# Move to the previous submission (should return to the notebook list)
prev_function()
self._wait_for_gradebook_page("assignments/Problem Set 1/Problem 1")
def test_logout(self):
"""Make sure after we've logged out we can't access any of the formgrader pages."""
if self.manager.jupyterhub is None:
pytest.skip("JupyterHub is not running")
# logout and wait for the login page to appear
self._get("{}/hub".format(self.manager.base_url))
self._wait_for_element("logout")
self._wait_for_visibility_of_element("logout")
element = self.browser.find_element_by_id("logout")
element.click()
self._wait_for_element("username_input")
# try going to a formgrader page
self._get(self.manager.base_formgrade_url)
self._wait_for_element("username_input")
next_url = self.formgrade_url().replace(self.manager.base_url, "")
self._check_url("{}/hub/login?next={}".format(self.manager.base_url, next_url))
# try going to a live notebook page
problem = self.gradebook.find_assignment("Problem Set 1").notebooks[0]
submission = sorted(problem.submissions, key=lambda x: x.id)[0]
url = self.notebook_url("autograded/{}/Problem Set 1/{}.ipynb".format(submission.student.id, problem.name))
self._get(url)
self._wait_for_element("username_input")
self._check_url("{}/hub/login".format(self.manager.base_url))
| EdwardJKim/nbgrader | nbgrader/tests/formgrader/test_gradebook_navigation.py | Python | bsd-3-clause | 12,452 | 0.004096 |
#!/usr/bin/python
#
# Copyright (C) 2020 abetusk
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import sys
import os
import numpy
import random
import getopt
import re
import scipy
import numpy as np
from scipy.interpolate import griddata
import grbl
from termcolor import colored, cprint
Z_ZERO = -1
G1_SPEED = 50
G0_SPEED = 100
SPINDLE_SPEED = 1000.0
# format float
#
def _s(x):
return "{:.8f}".format(x)
DEFAULT_FEED_RATE = 60
DEFAULT_DEVICE = "/dev/ttyUSB0"
unit = "mm"
cur_x, cur_y, cur_z = 0, 0, 0
z_pos = 'up'
dry_run = False
z_threshold = 0.0
z_plunge_inch = -0.004
z_plunge_mm = z_plunge_inch * 25.4
grbl_spindle = True
tty_device = DEFAULT_DEVICE
output = None
verbose = True
def usage(ofp = sys.stdout):
ofp.write( "\nDo a height probe, interploate GCode file then execute job\n")
ofp.write( "\nusage:\n")
ofp.write( " -g <gcode file> gcode file\n")
ofp.write( " [-m <height map>] height map\n")
ofp.write( " [-O <out height>] output height map file (default stdout)\n")
ofp.write( " [-D] dry run (do not connect to GRBL)\n")
ofp.write( " [-S <S>] Set S value\n")
ofp.write( " [-z <threshold>] z threshold (default to " + str(z_threshold) + ")\n")
ofp.write( " [-p <zplunge>] amount under height sensed part to plunge (default " + str(z_plunge_mm) + "mm)\n")
ofp.write( " [-T <device>] use <device>\n")
ofp.write( " [--grbl-spindle] Use M03/M05 for spindle control\n")
ofp.write( " [--no-grbl-spindle] Don't use M03/M05 for spindle control\n")
ofp.write( " [-h|--help] help (this screen)\n")
ofp.write( "\n")
gcode_file = None
height_map_file = None
out_height_map_file = None
try:
opts, args = getopt.getopt(sys.argv[1:], "hm:g:z:Dp:O:T:S:", ["help", "output=", "grbl-spindle", "no-grbl-spindle", "tty="])
except getopt.GetoptError, err:
print str(err)
usage()
sys.exit(2)
for o, a in opts:
if o == "-g":
gcode_file = a
elif o in ("-h", "--help"):
usage(sys.stdout)
sys.exit(0)
elif o == "-m":
height_map_file = a
elif o == "-z":
z_threshold = float(a)
elif o == "-p":
z_plunge_mm = float(a)
elif o == "-D":
dry_run = True
elif o == "-O":
out_height_map_file = a
elif o == "-T":
tty_device = a
elif o == "-S":
SPINDLE_SPEED = float(a)
elif o == "--grbl-spindle":
grbl_spindle = True
elif o == "--no-grbl-spindle":
grbl_spindle = False
else:
assert False, "unhandled option"
if gcode_file is None:
sys.stderr.write("Provide gcode file\n")
usage(sys.stderr)
sys.exit(-1)
pnts = []
pnts_xy = []
pntz_z = []
def read_gcode_file(gcode_filename):
cur_x = 0.0
cur_y = 0.0
xvalid = False
yvalid = False
res = {
"lines":[],
"status":"init",
"error":"",
"min_x":0.0,
"min_y":0.0,
"max_x":0.0,
"max_y":0.0
}
lines = []
with open(gcode_filename, "r") as gc:
for line in gc:
line = line.strip()
line = line.rstrip('\n')
res["lines"].append(line)
m = re.match('^\s*(\(|;)', line)
if m: continue
m = re.match('.*[xX]\s*(-?\d+(\.\d+)?)', line)
if m:
cur_x = float(m.group(1))
if not xvalid:
res["min_x"] = cur_x
res["max_x"] = cur_x
xvalid = True
if cur_x < res["min_x"]: res["min_x"] = cur_x
if cur_x > res["max_x"]: res["max_x"] = cur_x
m = re.match('.*[yY]\s*(-?\d+(\.\d+)?)', line)
if m:
cur_y = float(m.group(1))
if not yvalid:
res["min_y"] = cur_y
res["max_y"] = cur_y
yvalid = True
if cur_y < res["min_y"]: res["min_y"] = cur_y
if cur_y > res["max_y"]: res["max_y"] = cur_y
res["status"] = "ok"
return res
def interpolate_gcode(gcode, pnts_xy, pnts_z, _feed=DEFAULT_FEED_RATE):
unit = "mm"
cur_x, cur_y, cur_z = 0, 0, 0
z_pos = 'up'
z_pos_prv = z_pos
z_threshold = 0.0
z_plunge_inch = -0.006
z_plunge_mm = z_plunge_inch * 25.4
lines = []
z_leeway = 1
z_ub = pnts_z[0]
g1_feed = _feed
for idx in range(len(pnts_z)):
if z_ub < pnts_z[idx]: z_ub = pnts_z[idx]
z_ub += z_leeway
for line in gcode["lines"]:
line = line.strip()
is_move = 0
l = line.rstrip('\n')
# skip comments
# assumes comments encased in parens all on one line
#
m = re.match('^\s*(\(|;)', l)
if m:
lines.append(l)
continue
m = re.match('^\s*[gG]\s*(0*\d*)([^\d]|$)', l)
if m:
tmp_mode = m.group(1)
if re.match('^0*20$', tmp_mode):
unit = "inch"
elif re.match('^0*21$', tmp_mode):
unit = "mm"
m = re.match('^\s*[gG]\s*(0*[01])[^\d](.*)', l)
if m:
g_mode = m.group(1)
l = m.group(2)
m = re.match('.*[xX]\s*(-?\d+(\.\d+)?)', l)
if m:
is_move = 1
cur_x = m.group(1)
m = re.match('.*[yY]\s*(-?\d+(\.\d+)?)', l)
if m:
is_move = 1
cur_y = m.group(1)
m = re.match('.*[zZ]\s*(-?\d+(\.\d+)?)', l)
if m:
is_move = 1
cur_z = m.group(1)
if ( float(cur_z) >= z_threshold ):
z_pos = 'up'
else:
z_pos = 'down'
if is_move and (not g_mode):
return None
if not is_move:
lines.append(l)
continue
if (z_pos == 'up'):
lines.append("G" + str(g_mode) + " Z{0:.8f}".format(z_ub))
elif (z_pos == 'down'):
interpolated_z = griddata(pnts_xy, pnts_z, (cur_x, cur_y), method='linear')
if np.isnan(interpolated_z):
sys.stderr.write("ERROR: NaN at " + str(cur_x) + " " + str(cur_y))
sys.stdout.write("ERROR: NaN at " + str(cur_x) + " " + str(cur_y))
sys.stderr.flush()
sys.stdout.flush()
sys.exit(-5)
if unit == "inch":
z_plunge = z_plunge_inch
elif unit == "mm":
z_plunge = z_plunge_mm
else:
#print "ERROR: unit improperly set"
return None
interpolated_z += z_plunge
x_f = float(cur_x)
y_f = float(cur_y)
if z_pos_prv == "up":
lines.append("G0 X{0:.8f}".format(x_f) + " Y{0:.8f}".format(y_f) + " Z{0:.8f}".format(z_ub))
#print "G" + g_mode, "X{0:.8f}".format(x_f), "Y{0:.8f}".format(y_f), "Z{0:.8f}".format(interpolated_z)
#lines.append("G" + str(g_mode) + " X{0:.8f}".format(x_f) + " Y{0:.8f}".format(y_f) + " Z{0:.8f}".format(interpolated_z))
lines.append("G" + str(g_mode) + " X{0:.8f}".format(x_f) + " Y{0:.8f}".format(y_f) + " Z{0:.8f}".format(interpolated_z) + " F{0:.8f}".format(g1_feed))
z_pos_prv = z_pos
return lines
_gc = read_gcode_file(gcode_file)
pnts = []
if not dry_run:
grbl.setup(tty_device)
grbl.send_initial_command("")
sys.stdout.write("\n#### ")
cprint("READY TO CUT, REMOVE PROBE AND PRESS ENTER TO CONTINUE", "red", attrs=['blink'])
sys.stdout.flush()
sys.stdin.readline()
if not dry_run:
sys.stdout.write("homing ....\n")
grbl.send_command("$H")
sys.stdout.write("homing done.n")
#grbl.send_command("G1 Z" + _s(Z_ZERO) + " F" + _s(G1_SPEED) )
if grbl_spindle:
grbl.send_command("S" + _s(SPINDLE_SPEED))
grbl.send_command("M03")
else:
grbl.send_command("M42")
for line in _gc["lines"]:
line = line.strip()
if len(line)==0: continue
if line[0] == '#': continue
if line[0] == '(': continue
print("## sending:", line)
sys.stdout.flush()
r = grbl.send_command(line)
print "### got:", r
sys.stdout.flush()
if grbl_spindle:
grbl.send_command("M05")
else:
grbl.send_command("M43")
grbl.send_command("$H")
| abetusk/dev | projects/pcbcnc/src/cnc3018pro.py | Python | agpl-3.0 | 8,180 | 0.025672 |
# how many routes are in a 20x20 lattice grid?
import time
start_time = time.clock()
field = []
sum = 0
for n in range(0,21):
field.append([1])
for i in range(1,21):
sum = field[n][i-1]
if n>0: sum += field[n-1][i]
field[n].append(sum)
print(field[20][20])
print("--- %s seconds ---" % (time.clock() - start_time)) | Selen93/ProjectEuler | Python/Problem 15/Problem 15.py | Python | mit | 349 | 0.014327 |
#!/usr/bin/env python
# coding: utf-8
# Copyright 2014 The Crashpad Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A one-shot testing webserver.
When invoked, this server will write a short integer to stdout, indiciating on
which port the server is listening. It will then read one integer from stdin,
indiciating the response code to be sent in response to a request. It also reads
16 characters from stdin, which, after having "\r\n" appended, will form the
response body in a successful response (one with code 200). The server will
process one HTTP request, deliver the prearranged response to the client, and
write the entire request to stdout. It will then terminate.
This server is written in Python since it provides a simple HTTP stack, and
because parsing chunked encoding is safer and easier in a memory-safe language.
This could easily have been written in C++ instead.
"""
import BaseHTTPServer
import struct
import sys
import zlib
class BufferedReadFile(object):
"""A File-like object that stores all read contents into a buffer."""
def __init__(self, real_file):
self.file = real_file
self.buffer = ""
def read(self, size=-1):
buf = self.file.read(size)
self.buffer += buf
return buf
def readline(self, size=-1):
buf = self.file.readline(size)
self.buffer += buf
return buf
def flush(self):
self.file.flush()
def close(self):
self.file.close()
class RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
# Everything to be written to stdout is collected into this string. It can’t
# be written to stdout until after the HTTP transaction is complete, because
# stdout is a pipe being read by a test program that’s also the HTTP client.
# The test program expects to complete the entire HTTP transaction before it
# even starts reading this script’s stdout. If the stdout pipe buffer fills up
# during an HTTP transaction, deadlock would result.
raw_request = ''
response_code = 500
response_body = ''
def handle_one_request(self):
# Wrap the rfile in the buffering file object so that the raw header block
# can be written to stdout after it is parsed.
self.rfile = BufferedReadFile(self.rfile)
BaseHTTPServer.BaseHTTPRequestHandler.handle_one_request(self)
def do_POST(self):
RequestHandler.raw_request = self.rfile.buffer
self.rfile.buffer = ''
if self.headers.get('Transfer-Encoding', '').lower() == 'chunked':
if 'Content-Length' in self.headers:
raise AssertionError
body = self.handle_chunked_encoding()
else:
length = int(self.headers.get('Content-Length', -1))
body = self.rfile.read(length)
if self.headers.get('Content-Encoding', '').lower() == 'gzip':
# 15 is the value of |wbits|, which should be at the maximum possible
# value to ensure that any gzip stream can be decoded. The offset of 16
# specifies that the stream to decompress will be formatted with a gzip
# wrapper.
body = zlib.decompress(body, 16 + 15)
RequestHandler.raw_request += body
self.send_response(self.response_code)
self.end_headers()
if self.response_code == 200:
self.wfile.write(self.response_body)
self.wfile.write('\r\n')
def handle_chunked_encoding(self):
"""This parses a "Transfer-Encoding: Chunked" body in accordance with
RFC 7230 §4.1. This returns the result as a string.
"""
body = ''
chunk_size = self.read_chunk_size()
while chunk_size > 0:
# Read the body.
data = self.rfile.read(chunk_size)
chunk_size -= len(data)
body += data
# Finished reading this chunk.
if chunk_size == 0:
# Read through any trailer fields.
trailer_line = self.rfile.readline()
while trailer_line.strip() != '':
trailer_line = self.rfile.readline()
# Read the chunk size.
chunk_size = self.read_chunk_size()
return body
def read_chunk_size(self):
# Read the whole line, including the \r\n.
chunk_size_and_ext_line = self.rfile.readline()
# Look for a chunk extension.
chunk_size_end = chunk_size_and_ext_line.find(';')
if chunk_size_end == -1:
# No chunk extensions; just encounter the end of line.
chunk_size_end = chunk_size_and_ext_line.find('\r')
if chunk_size_end == -1:
self.send_response(400) # Bad request.
return -1
return int(chunk_size_and_ext_line[:chunk_size_end], base=16)
def log_request(self, code='-', size='-'):
# The default implementation logs these to sys.stderr, which is just noise.
pass
def Main():
if sys.platform == 'win32':
import os, msvcrt
msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
# Start the server.
server = BaseHTTPServer.HTTPServer(('127.0.0.1', 0), RequestHandler)
# Write the port as an unsigned short to the parent process.
sys.stdout.write(struct.pack('=H', server.server_address[1]))
sys.stdout.flush()
# Read the desired test response code as an unsigned short and the desired
# response body as a 16-byte string from the parent process.
RequestHandler.response_code, RequestHandler.response_body = \
struct.unpack('=H16s', sys.stdin.read(struct.calcsize('=H16s')))
# Handle the request.
server.handle_request()
# Share the entire request with the test program, which will validate it.
sys.stdout.write(RequestHandler.raw_request)
sys.stdout.flush()
if __name__ == '__main__':
Main()
| atom/crashpad | util/net/http_transport_test_server.py | Python | apache-2.0 | 6,017 | 0.010316 |
import datetime
import os
import re
class WLError(Exception):
"""Base class for all Writelightly exceptions."""
class WLQuit(WLError):
"""Raised when user sends a quit command."""
def lastday(*args):
"""Return the last day of the given month.
Takes datetime.date or year and month, returns an integer.
>>> lastday(2011, 11)
30
>>> lastday(datetime.date(2011, 2, 1))
28
"""
if not args:
raise TypeError('I need some arguments')
if len(args) == 1 and type(args[0]) is datetime.date:
year, month = args[0].year, args[0].month
elif len(args) == 2 and type(args[0]) is int and type(args[1]) is int:
year, month = args
else:
raise TypeError('Give me either datetime.date or year and month')
next_first = datetime.date(year if month != 12 else year + 1,
month + 1 if month != 12 else 1, 1)
return (next_first - datetime.timedelta(days=1)).day
def entry_exists(date):
"""Check if an entry for the given date exists."""
from metadata import Metadata
data = Metadata.get(date.year, date.month).get_data_for_day(date.day)
return data is not None
def format_size(size):
"""Format a size of a file in bytes to be human-readable."""
size = int(size)
if size > 1024:
kib = size // 1024 + (size % 1024) / 1024.0
return ('%.2f' % kib).rstrip('0').rstrip('.') + ' KiB'
return '%d B' % size
def format_date(date):
return '%s %d, %d' % (date.strftime('%B'), date.day, date.year)
def get_all_months(data_dir):
"""
Return a sorted list of (year, month) tuples for months that have entries.
"""
entries_dir = os.path.join(data_dir, 'entries')
entries = os.listdir(entries_dir)
months = set()
for entry in entries:
months.add(tuple(map(int, entry.split('-'))))
return sorted(sorted(list(months), key=lambda i: i[1]), key=lambda i: i[0])
def parse_date(date):
"""Try to recognize a date using several formats."""
if date == 'today':
return datetime.date.today()
if date == 'yesterday':
return datetime.date.today() - datetime.timedelta(days=1)
for p in ['(?P<year>\d{4})-(?P<month>\d{2})-(?P<day>\d{2})',
'(?P<month>\d{2})/(?P<day>\d{2})/(?P<year>\d{4})',
'(?P<day>\d{2}).(?P<month>\d{2}).(?P<year>\d{4})']:
m = re.match(p, date)
if m:
d = m.groupdict()
return datetime.date(int(d['year']), int(d['month']), int(d['day']))
return None
def get_char(win):
"""Use win.getch() to get a character even if it's multibyte.
A magic function that I found somewhere on the Internet. But it works
well, at least for Russian characters in a UTF-8-based shell.
Needs testing.
"""
def get_check_next_byte():
c = win.getch()
if 128 <= c <= 191:
return c
else:
raise UnicodeError
bytes = []
c = win.getch()
if c <= 127:
return c
elif 194 <= c <= 223:
bytes.append(c)
bytes.append(get_check_next_byte())
elif 224 <= c <= 239:
bytes.append(c)
bytes.append(get_check_next_byte())
bytes.append(get_check_next_byte())
elif 240 <= c <= 244:
bytes.append(c)
bytes.append(get_check_next_byte())
bytes.append(get_check_next_byte())
bytes.append(get_check_next_byte())
else:
return c
buf = ''.join([chr(b) for b in bytes])
return buf
def format_time(ts, full=False):
"""Format a timestamp relatively to the current time."""
if not ts:
return 'unknown'
dt = datetime.datetime.fromtimestamp(ts)
today = datetime.date.today()
fmt = ' '.join(filter(None, [
'%Y' if dt.year != today.year or full else '',
'%b %d' if (dt.month, dt.day) != (today.month, today.day)
or full else '',
'%H:%M'
]))
return dt.strftime(fmt)
| thesealion/writelightly | writelightly/utils.py | Python | mit | 3,963 | 0.005551 |
# -*- coding: utf-8 -*-
# This file is part of beets.
# Copyright 2015, Thomas Scholtes.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
from __future__ import (division, absolute_import, print_function,
unicode_literals)
from mock import patch
from test._common import unittest
from test.helper import TestHelper
from beets.library import Item
class KeyFinderTest(unittest.TestCase, TestHelper):
def setUp(self):
self.setup_beets()
self.load_plugins('keyfinder')
self.patcher = patch('beets.util.command_output')
self.command_output = self.patcher.start()
def tearDown(self):
self.teardown_beets()
self.unload_plugins()
self.patcher.stop()
def test_add_key(self):
item = Item(path='/file')
item.add(self.lib)
self.command_output.return_value = 'dbm'
self.run_command('keyfinder')
item.load()
self.assertEqual(item['initial_key'], 'C#m')
self.command_output.assert_called_with(
['KeyFinder', '-f', item.path])
def test_add_key_on_import(self):
self.command_output.return_value = 'dbm'
importer = self.create_importer()
importer.run()
item = self.lib.items().get()
self.assertEqual(item['initial_key'], 'C#m')
def test_force_overwrite(self):
self.config['keyfinder']['overwrite'] = True
item = Item(path='/file', initial_key='F')
item.add(self.lib)
self.command_output.return_value = 'C#m'
self.run_command('keyfinder')
item.load()
self.assertEqual(item['initial_key'], 'C#m')
def test_do_not_overwrite(self):
item = Item(path='/file', initial_key='F')
item.add(self.lib)
self.command_output.return_value = 'dbm'
self.run_command('keyfinder')
item.load()
self.assertEqual(item['initial_key'], 'F')
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
if __name__ == b'__main__':
unittest.main(defaultTest='suite')
| kelvinhammond/beets | test/test_keyfinder.py | Python | mit | 2,610 | 0.000383 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""Tests for the VSS path specification implementation."""
import unittest
from dfvfs.path import vshadow_path_spec
from tests.path import test_lib
class VShadowPathSpecTest(test_lib.PathSpecTestCase):
"""Tests for the VSS path specification implementation."""
def testInitialize(self):
"""Tests the path specification initialization."""
path_spec = vshadow_path_spec.VShadowPathSpec(parent=self._path_spec)
self.assertIsNotNone(path_spec)
path_spec = vshadow_path_spec.VShadowPathSpec(
location=u'/vss2', parent=self._path_spec)
self.assertIsNotNone(path_spec)
path_spec = vshadow_path_spec.VShadowPathSpec(
store_index=1, parent=self._path_spec)
self.assertIsNotNone(path_spec)
path_spec = vshadow_path_spec.VShadowPathSpec(
location=u'/vss2', store_index=1, parent=self._path_spec)
self.assertIsNotNone(path_spec)
with self.assertRaises(ValueError):
_ = vshadow_path_spec.VShadowPathSpec(parent=None)
with self.assertRaises(ValueError):
_ = vshadow_path_spec.VShadowPathSpec(
parent=self._path_spec, bogus=u'BOGUS')
def testComparable(self):
"""Tests the path specification comparable property."""
path_spec = vshadow_path_spec.VShadowPathSpec(parent=self._path_spec)
self.assertIsNotNone(path_spec)
expected_comparable = u'\n'.join([
u'type: TEST',
u'type: VSHADOW',
u''])
self.assertEqual(path_spec.comparable, expected_comparable)
path_spec = vshadow_path_spec.VShadowPathSpec(
location=u'/vss2', parent=self._path_spec)
self.assertIsNotNone(path_spec)
expected_comparable = u'\n'.join([
u'type: TEST',
u'type: VSHADOW, location: /vss2',
u''])
self.assertEqual(path_spec.comparable, expected_comparable)
path_spec = vshadow_path_spec.VShadowPathSpec(
store_index=1, parent=self._path_spec)
self.assertIsNotNone(path_spec)
expected_comparable = u'\n'.join([
u'type: TEST',
u'type: VSHADOW, store index: 1',
u''])
self.assertEqual(path_spec.comparable, expected_comparable)
path_spec = vshadow_path_spec.VShadowPathSpec(
location=u'/vss2', store_index=1, parent=self._path_spec)
self.assertIsNotNone(path_spec)
expected_comparable = u'\n'.join([
u'type: TEST',
u'type: VSHADOW, location: /vss2, store index: 1',
u''])
self.assertEqual(path_spec.comparable, expected_comparable)
if __name__ == '__main__':
unittest.main()
| dc3-plaso/dfvfs | tests/path/vshadow_path_spec.py | Python | apache-2.0 | 2,586 | 0.00232 |
# -*- coding: utf-8 -*-
__author__ = 'chinfeng'
__gum__ = 'web_console'
from .server import WebConsoleServer | chinfeng/gumpy | plugins/web_console/__init__.py | Python | lgpl-3.0 | 110 | 0.009091 |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the crash_access library."""
# pylint: disable=protected-access
import unittest
import mock
from clusterfuzz._internal.tests.test_libs import helpers as test_helpers
from libs import crash_access
from libs import helpers
from libs.query import base
def _has_access(need_privileged_access=False):
return not need_privileged_access
class AddScopeTest(unittest.TestCase):
"""Test add_scope."""
def setUp(self):
Query = base.Query # pylint: disable=invalid-name
test_helpers.patch(self, [
'clusterfuzz._internal.base.external_users._allowed_entities_for_user',
'libs.crash_access.get_permission_names',
'libs.access.has_access',
'libs.access.get_user_job_type',
'libs.helpers.get_user_email',
'libs.query.base.Query',
])
self.params = {}
self.mock.get_user_job_type.return_value = None
self.mock.get_user_email.return_value = '[email protected]'
self.mock._allowed_entities_for_user.return_value = []
self.mock.get_permission_names.return_value = []
def create_query():
q = mock.create_autospec(Query)
return q
self.mock.Query.side_effect = create_query
self.query = base.Query()
def test_forbidden(self):
"""Test when user is forbidden."""
self.mock.has_access.return_value = False
with self.assertRaises(helpers.EarlyExitException):
crash_access.add_scope(self.query, self.params, 'security_flag',
'job_type', 'fuzzer_name')
def test_default_global_privileged(self):
"""Test the default filter for globally privileged users."""
self.mock.has_access.return_value = True
crash_access.add_scope(self.query, self.params, 'security_flag', 'job_type',
'fuzzer_name')
self.assertTrue(self.params['permissions']['everything'])
self.assertTrue(self.params['permissions']['isPrivileged'])
self.assertEqual([], self.params['permissions']['jobs'])
self.assertFalse([], self.params['permissions']['fuzzers'])
self.query.union.assert_has_calls([])
self.query.filter.assert_has_calls([])
def test_default_domain(self):
"""Test the default filter for domain users."""
self.mock.has_access.side_effect = _has_access
crash_access.add_scope(self.query, self.params, 'security_flag', 'job_type',
'fuzzer_name')
self.assertTrue(self.params['permissions']['everything'])
self.assertFalse(self.params['permissions']['isPrivileged'])
self.assertEqual([], self.params['permissions']['jobs'])
self.assertFalse([], self.params['permissions']['fuzzers'])
self.query.filter.assert_has_calls([])
self.query.union.assert_called_once_with(mock.ANY)
q = self.query.union.call_args[0][0]
q.union.assert_has_calls([])
q.filter.assert_has_calls([mock.call('security_flag', False)])
def test_domain_with_job_and_fuzzer(self):
"""Test domain user with job and fuzzer."""
self.mock.has_access.side_effect = _has_access
self.mock.get_user_job_type.return_value = 'job'
self.mock._allowed_entities_for_user.side_effect = [['job2'], ['fuzzer']]
self.mock.get_permission_names.side_effect = [['perm'], ['perm1']]
crash_access.add_scope(self.query, self.params, 'security_flag', 'job_type',
'fuzzer_name')
self.assertTrue(self.params['permissions']['everything'])
self.assertFalse(self.params['permissions']['isPrivileged'])
self.assertListEqual(['perm', 'job'], self.params['permissions']['jobs'])
self.assertListEqual(['perm1'], self.params['permissions']['fuzzers'])
self.query.union.assert_has_calls([])
self.query.union.assert_called_once_with(mock.ANY, mock.ANY, mock.ANY)
everything_query = self.query.union.call_args[0][0]
job_query = self.query.union.call_args[0][1]
fuzzer_query = self.query.union.call_args[0][2]
everything_query.union.assert_has_calls([])
job_query.union.assert_has_calls([])
fuzzer_query.union.assert_has_calls([])
everything_query.filter.assert_has_calls(
[mock.call('security_flag', False)])
job_query.filter_in.assert_has_calls([
mock.call('job_type', ['job2', 'job']),
])
fuzzer_query.filter_in.assert_has_calls([
mock.call('fuzzer_name', ['fuzzer']),
])
| google/clusterfuzz | src/clusterfuzz/_internal/tests/appengine/libs/crash_access_test.py | Python | apache-2.0 | 4,900 | 0.002653 |
"""
This page is in the table of contents.
The xml.py script is an import translator plugin to get a carving from an Art of Illusion xml file.
An import plugin is a script in the interpret_plugins folder which has the function getCarving. It is meant to be run from the interpret tool. To ensure that the plugin works on platforms which do not handle file capitalization properly, give the plugin a lower case name.
The getCarving function takes the file name of an xml file and returns the carving.
This example gets a triangle mesh for the xml file boolean.xml. This example is run in a terminal in the folder which contains boolean.xml and xml.py.
> python
Python 2.5.1 (r251:54863, Sep 22 2007, 01:43:31)
[GCC 4.2.1 (SUSE Linux)] on linux2
Type "help", "copyright", "credits" or "license" for more information.
>>> import xml
>>> xml.getCarving().getCarveRotatedBoundaryLayers()
[-1.159765625, None, [[(-18.925000000000001-2.4550000000000001j), (-18.754999999999981-2.4550000000000001j)
..
many more lines of the carving
..
An xml file can be exported from Art of Illusion by going to the "File" menu, then going into the "Export" menu item, then picking the XML choice. This will bring up the XML file chooser window, choose a place to save the file then click "OK". Leave the "compressFile" checkbox unchecked. All the objects from the scene will be exported, this plugin will ignore the light and camera. If you want to fabricate more than one object at a time, you can have multiple objects in the Art of Illusion scene and they will all be carved, then fabricated together.
"""
from __future__ import absolute_import
#Init has to be imported first because it has code to workaround the python bug where relative imports don't work if the module is imported as a main module.
import __init__
from fabmetheus_utilities.geometry.geometry_utilities import evaluate
from fabmetheus_utilities.geometry.solids import group
from fabmetheus_utilities.geometry.solids import trianglemesh
from fabmetheus_utilities.vector3 import Vector3
__author__ = 'Enrique Perez ([email protected])'
__credits__ = 'Nophead <http://hydraraptor.blogspot.com/>\nArt of Illusion <http://www.artofillusion.org/>'
__date__ = '$Date: 2008/21/04 $'
__license__ = 'GPL 3.0'
def processXMLElement(xmlElement):
"Process the xml element."
group.processShape( Cube, xmlElement)
class Cube( trianglemesh.TriangleMesh ):
"A cube object."
def addXMLSection(self, depth, output):
"Add the xml section for this object."
pass
def createShape(self):
"Create the shape."
square = [
complex( - self.inradius.x, - self.inradius.y ),
complex( self.inradius.x, - self.inradius.y ),
complex( self.inradius.x, self.inradius.y ),
complex( - self.inradius.x, self.inradius.y ) ]
bottomTopSquare = trianglemesh.getAddIndexedLoops( square, self.vertexes, [ - self.inradius.z, self.inradius.z ] )
trianglemesh.addPillarByLoops( self.faces, bottomTopSquare )
def setToObjectAttributeDictionary(self):
"Set the shape of this carvable object info."
self.inradius = evaluate.getVector3ByPrefixes( ['demisize', 'inradius'], Vector3(1.0, 1.0, 1.0), self.xmlElement )
self.inradius = evaluate.getVector3ByMultiplierPrefix( 2.0, 'size', self.inradius, self.xmlElement )
self.xmlElement.attributeDictionary['inradius.x'] = self.inradius.x
self.xmlElement.attributeDictionary['inradius.y'] = self.inradius.y
self.xmlElement.attributeDictionary['inradius.z'] = self.inradius.z
self.createShape()
| natetrue/ReplicatorG | skein_engines/skeinforge-35/fabmetheus_utilities/geometry/solids/cube.py | Python | gpl-2.0 | 3,514 | 0.016221 |
"""
pybufrkit.mdquery
~~~~~~~~~~~~~~~~~
"""
from __future__ import absolute_import
from __future__ import print_function
import logging
from pybufrkit.errors import MetadataExprParsingError
__all__ = ['MetadataExprParser', 'MetadataQuerent', 'METADATA_QUERY_INDICATOR_CHAR']
log = logging.getLogger(__file__)
METADATA_QUERY_INDICATOR_CHAR = '%'
class MetadataExprParser(object):
def parse(self, metadata_expr):
"""
:param str metadata_expr: The metadata expression string to parse
:return: A 2-element tuple of section index and metadata name
:rtype: (int, str)
"""
metadata_expr = metadata_expr.strip()
if metadata_expr[0] != METADATA_QUERY_INDICATOR_CHAR:
raise MetadataExprParsingError('Metadata expression must start with "%"')
if '.' in metadata_expr:
section_index, metadata_name = metadata_expr[1:].split('.')
try:
section_index = int(section_index)
except ValueError:
raise MetadataExprParsingError('Invalid section index: {}'.format(section_index))
else:
section_index = None
metadata_name = metadata_expr[1:]
return section_index, metadata_name
class MetadataQuerent(object):
"""
:param MetadataExprParser metadata_expr_parser: Parser for metadata expression
"""
def __init__(self, metadata_expr_parser):
self.metadata_expr_parser = metadata_expr_parser
def query(self, bufr_message, metadata_expr):
section_index, metadata_name = self.metadata_expr_parser.parse(metadata_expr)
sections = [s for s in bufr_message.sections
if s.get_metadata('index') == section_index or section_index is None]
for section in sections:
for parameter in section:
if parameter.name == metadata_name:
return parameter.value
return None
| ywangd/pybufrkit | pybufrkit/mdquery.py | Python | mit | 1,958 | 0.003064 |
from main import KeyboardHandler
import threading
import thread
import pyatspi
def parse(s):
"""parse a string like control+f into (modifier, key).
Unknown modifiers will return ValueError."""
m = 0
lst = s.split('+')
if not len(lst):
return (0, s)
# Are these right?
d = {
"shift": 1 << pyatspi.MODIFIER_SHIFT,
"control": 1 << pyatspi.MODIFIER_CONTROL,
"alt": 1 << pyatspi.MODIFIER_ALT,
"win": 1 << pyatspi.MODIFIER_META3,
}
for item in lst:
if item in d:
m |= d[item]
lst.remove(item)
# end if
if len(lst) > 1: # more than one key, parse error
raise ValueError('unknown modifier %s' % lst[0])
return (m, lst[0].lower())
class AtspiThread(threading.Thread):
def run(self):
pyatspi.Registry.registerKeystrokeListener(handler, kind=(
pyatspi.KEY_PRESSED_EVENT,), mask=pyatspi.allModifiers())
pyatspi.Registry.start()
# the keys we registered
keys = {}
def handler(e):
m, k = e.modifiers, e.event_string.lower()
# not sure why we can't catch control+f. Try to fix it.
if (not e.is_text) and e.id >= 97 <= 126:
k = chr(e.id)
if (m, k) not in keys:
return False
thread.start_new(keys[(m, k)], ())
return True # don't pass it on
class LinuxKeyboardHandler(KeyboardHandler):
def __init__(self, *args, **kwargs):
KeyboardHandler.__init__(self, *args, **kwargs)
t = AtspiThread()
t.start()
def register_key(self, key, function):
"""key will be a string, such as control+shift+f.
We need to convert that, using parse_key,
into modifier and key to put into our dictionary."""
# register key so we know if we have it on event receive.
t = parse(key)
keys[t] = function
# if we got this far, the key is valid.
KeyboardHandler.register_key(self, key, function)
def unregister_key(self, key, function):
KeyboardHandler.unregister_key(self, key, function)
del keys[parse(key)]
| Oliver2213/Queriet | queriet/keyboard_handler/linux.py | Python | gpl-2.0 | 2,059 | 0.000486 |
# 1222, Fri 18 Dec 2015 (NZDT)
#
# build-cpldns.py: makes pypy cpldns module
#
# Copyright (C) 2016 by Nevil Brownlee, U Auckland | WAND
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from cffi import FFI
ffi = FFI()
# Our C functions
ffi.cdef("int get_ldns_info(struct ldns_info *ldi, uint8_t *dns_msg, int dns_len);")
ffi.cdef("int ldns_ok(struct ldns_info *ldi);")
ffi.cdef("uint16_t get_short(uint8_t *bp, uint16_t x);")
ffi.set_source("cpldns", # .so to be created
""" // passed to the real C compiler
#include <ldns/ldns.h>
struct ldns_info {
struct ldns_struct_pkt *ldpkt; /* ldns_pkt typedef in ldns/packet.h */
int status; /* enum in ldns */
};
int get_ldns_info(struct ldns_info *ldi, uint8_t *dns_msg, int dns_len) {
ldi->status = ldns_wire2pkt(&ldi->ldpkt, dns_msg, dns_len);
return ldi->ldpkt != NULL; /* True if all's well */
}
int ldns_ok(struct ldns_info *ldi) {
return ldi->status == LDNS_STATUS_OK;
}
uint16_t get_short(uint8_t *bp, uint16_t x) {
uint16_t v = *(uint16_t *)&bp[x];
return ntohs(v);
}
""",
libraries=["ldns", "c"]) # list of libraries to link with
ffi.cdef(
"""
struct ldns_info {
struct ldns_struct_pkt *ldpkt; /* ldns_pkt typedef in ldns/packet.h */
int status; /* enum in ldns */
};
extern const char *ldns_pkt_opcode2str(int rcode); /* enum */
extern const char *ldns_pkt_rcode2str(int rcode); /* enum */
extern const char *ldns_rr_type2str(int type); /* enum */
extern const char *ldns_get_errorstr_by_id(int errnbr); /* enum */
extern uint16_t ldns_pkt_id(struct ldns_struct_pkt *p);
extern bool ldns_pkt_qr(struct ldns_struct_pkt *p);
extern int ldns_pkt_get_opcode(struct ldns_struct_pkt *p); /* enum */
extern int ldns_pkt_get_rcode(struct ldns_struct_pkt *p); /* enum */
extern uint16_t ldns_pkt_qdcount(struct ldns_struct_pkt *p);
extern struct ldns_struct_rr_list *ldns_pkt_question(
struct ldns_struct_pkt *p);
extern struct ldns_struct_rr_list *ldns_pkt_answer(
struct ldns_struct_pkt *p);
extern struct ldns_struct_rr_list *ldns_pkt_authority(
struct ldns_struct_pkt *p);
extern struct ldns_struct_rr_list *ldns_pkt_additional(
struct ldns_struct_pkt *p);
extern int ldns_rr_list_rr_count(struct ldns_struct_rr_list *rr_list);
extern struct ldns_struct_rr *ldns_rr_list_rr(
struct ldns_struct_rr_list *rr_list, size_t nr);
extern int ldns_rr_get_type(struct ldns_struct_rr *rr);
extern size_t ldns_rdf_size(struct ldns_struct_rdf *rd);
extern int ldns_rdf_get_type(struct ldns_struct_rdf *rd);
extern struct ldns_struct_rdf *ldns_rr_owner(struct ldns_struct_rr *rr);
extern char *ldns_rdf2str(struct ldns_struct_rdf *rdf);
extern size_t ldns_rr_rd_count(struct ldns_struct_rr *rr);
// get number of rdata fields
extern uint32_t ldns_rr_ttl(struct ldns_struct_rr *rr);
extern char *ldns_rr2str(struct ldns_struct_rr *rr);
extern struct ldns_struct_rdf *ldns_rr_rdf(struct ldns_struct_rr *rr, size_t n);
// get nth rdf fromm rr
struct ldns_struct_rdf { /* From ldata/rdata.h */
size_t _size; /* The size of the data (in octets) */
int _type; /* enum: The type of the data */
void *_data; /* Pointer to the data (raw octets) */
};
""")
if __name__ == "__main__":
ffi.compile()
| nevil-brownlee/pypy-libtrace | lib/pldns/build-cpldns.py | Python | gpl-3.0 | 3,848 | 0.001559 |
from maya import OpenMaya
__all__ = [
"getSoftSelection"
]
def getSoftSelection():
"""
Query the soft selection in the scene. If no soft selection is made empty
lists will be returned. The active selection will be returned for easy
reselection. The data structure of the soft selection weight is as
followed.
data = {
mesh:{
{index:weight},
{index:weight},
}
}
:return: Active selection list for re-selection and weight data.
:rtype: tuple(OpenMaya.MSelectionList, dict)
"""
# variables
data = {}
activeList = OpenMaya.MSelectionList()
richList = OpenMaya.MSelectionList()
softSelection = OpenMaya.MRichSelection()
# get active selection
OpenMaya.MGlobal.getActiveSelectionList(activeList)
# get rich selection
try:
OpenMaya.MGlobal.getRichSelection(softSelection, False)
softSelection.getSelection(richList)
except:
return activeList, data
# iter selection
selIter = OpenMaya.MItSelectionList(richList)
while not selIter.isDone():
# variables
dep = OpenMaya.MObject()
obj = OpenMaya.MObject()
dag = OpenMaya.MDagPath()
selIter.getDependNode(dep)
selIter.getDagPath(dag, obj)
# add path
path = dag.fullPathName()
if path not in data.keys():
data[path] = {}
# check is selection type is vertex components
if obj.apiType() == OpenMaya.MFn.kMeshVertComponent:
components = OpenMaya.MFnSingleIndexedComponent(obj)
count = components.elementCount()
# store index and weight
for i in range(count):
index = components.element(i)
weight = components.weight(i).influence()
data[path][index] = weight
selIter.next()
return activeList, data
| robertjoosten/rjSkinningTools | scripts/skinningTools/utils/api/selection.py | Python | gpl-3.0 | 1,916 | 0.000522 |
#!/usr/bin/python
#
# Config file test app (together with test.cfg file)
#
import os, sys
sys.path.append("..")
import configfile
cfg = configfile.ConfigFile("test.cfg")
cfg.setCfgValue("name1", "value1")
cfg.setCfgValue("name2", "value2")
cfg.selectSection("user")
cfg.setCfgValue("username", "janis")
cfg.setCfgValue("acceptable_names", ["john", "janis"])
cfg.load()
print cfg.cfg.options("main")
print cfg.cfg.options("user")
print cfg.getCfgValue("username")
print type(cfg.getCfgValue("username"))
print cfg.getCfgValueAsList("acceptable_names")
print cfg.getCfgValueAsList("list_in_list")
cfg.selectSection("main")
print cfg.getCfgValueAsInt("a_number")
print type(cfg.getCfgValueAsInt("a_number"))
print cfg.getCfgValueAsBool("a_bool")
print type(cfg.getCfgValueAsBool("a_bool"))
cfg.filename = "test-mod.cfg"
cfg.selectSection("main")
cfg.setCfgValue("name1", "value1mod2")
cfg.setCfgValue("a_number", 14)
cfg.selectSection("user")
cfg.setCfgValue("acceptable_names", ["john", "janis", "ivan"])
cfg.setCfgValue("list_in_list2", ["[baz]", "[foo, bar]"])
cfg.setCfgValue("list_in_list3", ["first", "[second-one, second-third]"])
cfg.save()
| IECS/MansOS | tools/lib/tests/configtest.py | Python | mit | 1,154 | 0.001733 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import division, print_function
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
from matplotlib.patches import Circle, Polygon
from matplotlib import rc
from matplotlib.ticker import MultipleLocator, FormatStrFormatter
from matplotlib.ticker import LogLocator
P = 8.0*np.arctan(1.0)*4.0*np.sqrt(2.0)
#rc('font',**{'family':'serif','serif':['Palatino']})
rc('text', usetex=True)
#rc('font',**{'family':'serif','serif':['Computer Modern Roman']})
rc('font', family='serif')
mpl.rcParams['ps.usedistiller'] = 'xpdf'
mpl.rcParams['font.size'] = 11
IQRs = np.loadtxt('IQRs_N400.dat')
LF_50_IQRs = IQRs[0:7,0:2]
TJ_50_IQRs = IQRs[7:14,0:2]
Cha_50_IQRs = IQRs[14:21,0:2]
RKNa14_50_IQRs = IQRs[21:28,0:2]
RKNb11_50_IQRs = IQRs[28:35,0:2]
RKNac1_50_IQRs = IQRs[35:42,0:2]
RKNbc1_50_IQRs = IQRs[42:49,0:2]
RKNbr1_50_IQRs = IQRs[49:56,0:2]
RKNar1_50_IQRs = IQRs[56:63,0:2]
RKNb6_50_IQRs = IQRs[77:84,0:2]
metcf_LF = 1
metcf_Cha = 12.01
metcf_TJ = 3.004
metcf_RKNb5 = 5.003
metcf_RKNb6 = 6.024
metcf_RKNb11 = 11.03
metcf_RKNa14 = 14.04
metcf_RKNar1b = 21.73
metcf_RKNar1 = 5.005
metcf_RKNbr1 = 4.997
metcf_RKNac1 = 30.36
metcf_RKNbc1 = 28.62
fig=plt.figure(figsize=(9,6))
#fig=plt.figure()
ax=fig.add_subplot(111)
#ax.loglog(RKNb11_50_IQRs[:,0], RKNb11_50_IQRs[:,1], label='RKNb11')
#ax.loglog(RKNbc1_50_IQRs[:,0], RKNbc1_50_IQRs[:,1], label='RKNbc1')
#ax.loglog(RKNar1_50_IQRs[:,0], RKNar1_50_IQRs[:,1], label='RKNar1')
ax.loglog(0.01/LF_50_IQRs[:,0] *metcf_LF , LF_50_IQRs[:,1] , 'v-', label='Leapfrog')
ax.loglog(0.01/TJ_50_IQRs[:,0] *metcf_TJ , TJ_50_IQRs[:,1] , '+-', label='Triple Jump')
ax.loglog(0.01/Cha_50_IQRs[:,0] *metcf_Cha , Cha_50_IQRs[:,1] , '^-', label='Chambers')
ax.loglog(0.01/RKNb6_50_IQRs[:,0] *metcf_RKNb6 , RKNb6_50_IQRs[:,1 ], '*-', label='RKNb6' )
ax.loglog(0.01/RKNa14_50_IQRs[:,0]*metcf_RKNa14, RKNa14_50_IQRs[:,1], 'o-', label='RKNa14')
ax.loglog(0.01/RKNbr1_50_IQRs[:,0]*metcf_RKNbr1, RKNbr1_50_IQRs[:,1], 'p-', label='RKNbr1')
ax.loglog(0.01/RKNac1_50_IQRs[:,0]*metcf_RKNac1, RKNac1_50_IQRs[:,1], 's-', label='RKNac1')
ax.set_xlim(2e-2, 1e2)
#ax.set_ylim(1e-16,2e-2)
for tick in ax.xaxis.get_major_ticks():
tick.label1.set_fontsize(16)
for tick in ax.yaxis.get_major_ticks():
tick.label1.set_fontsize(16)
plt.legend(loc='lower left')
plt.xlabel('CPU time (normalized to LF, $\delta t = 0.01$)', fontsize=18)
plt.ylabel('Inter quartile range for $r-r_\mathrm{GBS}$', fontsize=18)
leg = plt.gca().get_legend()
ltext = leg.get_texts()
plt.setp(ltext, fontsize=14)
majorLocator = LogLocator(100)
#majorFormatter = FormatStrFormatter('%d')
minorLocator = LogLocator(10)
ax.yaxis.set_major_locator(majorLocator)
#ax.xaxis.set_major_formatter(majorFormatter)
#for the minor ticks, use no labels; default NullFormatter
ax.yaxis.set_minor_locator(minorLocator)
plt.savefig('400body_CPUtime_plot.eps',
orientation='landscape',bbox_inches='tight')
plt.show()
| atakan/Complex_Coeff_RKN | testing/timing_runs/400body_CPUtime_plot.py | Python | gpl-3.0 | 3,030 | 0.023432 |
"""
CLI management script. (Requires flask-script.)
"""
import os
# pylint: disable=no-name-in-module,import-error
from flask.ext.script import Manager
from flask.ext.migrate import MigrateCommand, Migrate
from componentsdb.app import default_app
from componentsdb.model import db
# Default to development environment settings unless told otherwise
if 'COMPONENTSDB_SETTINGS' not in os.environ:
os.environ['COMPONENTSDB_SETTINGS'] = os.path.join(
os.path.dirname(os.path.abspath(__file__)), 'devsettings.py'
)
app = default_app()
manager = Manager(app)
migrate = Migrate(app, db, directory='alembic')
manager.add_command('migrate', MigrateCommand)
if __name__ == "__main__":
manager.run()
| rjw57/componentsdb | manage.py | Python | mit | 717 | 0.001395 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.mgmt.core.exceptions import ARMErrorFormat
from msrest import Serializer
from .. import models as _models
from .._vendor import _convert_request, _format_url_section
T = TypeVar('T')
JSONType = Any
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_delete_request(
scope: str,
policy_assignment_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2016-12-01"
accept = "application/json, text/json"
# Construct URL
url = kwargs.pop("template_url", '/{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}')
path_format_arguments = {
"scope": _SERIALIZER.url("scope", scope, 'str', skip_quote=True),
"policyAssignmentName": _SERIALIZER.url("policy_assignment_name", policy_assignment_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="DELETE",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_create_request(
scope: str,
policy_assignment_name: str,
*,
json: JSONType = None,
content: Any = None,
**kwargs: Any
) -> HttpRequest:
content_type = kwargs.pop('content_type', None) # type: Optional[str]
api_version = "2016-12-01"
accept = "application/json, text/json"
# Construct URL
url = kwargs.pop("template_url", '/{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}')
path_format_arguments = {
"scope": _SERIALIZER.url("scope", scope, 'str', skip_quote=True),
"policyAssignmentName": _SERIALIZER.url("policy_assignment_name", policy_assignment_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="PUT",
url=url,
params=query_parameters,
headers=header_parameters,
json=json,
content=content,
**kwargs
)
def build_get_request(
scope: str,
policy_assignment_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2016-12-01"
accept = "application/json, text/json"
# Construct URL
url = kwargs.pop("template_url", '/{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}')
path_format_arguments = {
"scope": _SERIALIZER.url("scope", scope, 'str', skip_quote=True),
"policyAssignmentName": _SERIALIZER.url("policy_assignment_name", policy_assignment_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_list_for_resource_group_request(
resource_group_name: str,
subscription_id: str,
*,
filter: Optional[str] = None,
**kwargs: Any
) -> HttpRequest:
api_version = "2016-12-01"
accept = "application/json, text/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Authorization/policyAssignments')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if filter is not None:
query_parameters['$filter'] = _SERIALIZER.query("filter", filter, 'str', skip_quote=True)
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_list_for_resource_request(
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
subscription_id: str,
*,
filter: Optional[str] = None,
**kwargs: Any
) -> HttpRequest:
api_version = "2016-12-01"
accept = "application/json, text/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}/providers/Microsoft.Authorization/policyAssignments')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
"resourceProviderNamespace": _SERIALIZER.url("resource_provider_namespace", resource_provider_namespace, 'str'),
"parentResourcePath": _SERIALIZER.url("parent_resource_path", parent_resource_path, 'str', skip_quote=True),
"resourceType": _SERIALIZER.url("resource_type", resource_type, 'str', skip_quote=True),
"resourceName": _SERIALIZER.url("resource_name", resource_name, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if filter is not None:
query_parameters['$filter'] = _SERIALIZER.query("filter", filter, 'str')
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_list_request(
subscription_id: str,
*,
filter: Optional[str] = None,
**kwargs: Any
) -> HttpRequest:
api_version = "2016-12-01"
accept = "application/json, text/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/policyAssignments')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if filter is not None:
query_parameters['$filter'] = _SERIALIZER.query("filter", filter, 'str')
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_delete_by_id_request(
policy_assignment_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2016-12-01"
accept = "application/json, text/json"
# Construct URL
url = kwargs.pop("template_url", '/{policyAssignmentId}')
path_format_arguments = {
"policyAssignmentId": _SERIALIZER.url("policy_assignment_id", policy_assignment_id, 'str', skip_quote=True),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="DELETE",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_create_by_id_request(
policy_assignment_id: str,
*,
json: JSONType = None,
content: Any = None,
**kwargs: Any
) -> HttpRequest:
content_type = kwargs.pop('content_type', None) # type: Optional[str]
api_version = "2016-12-01"
accept = "application/json, text/json"
# Construct URL
url = kwargs.pop("template_url", '/{policyAssignmentId}')
path_format_arguments = {
"policyAssignmentId": _SERIALIZER.url("policy_assignment_id", policy_assignment_id, 'str', skip_quote=True),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="PUT",
url=url,
params=query_parameters,
headers=header_parameters,
json=json,
content=content,
**kwargs
)
def build_get_by_id_request(
policy_assignment_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2016-12-01"
accept = "application/json, text/json"
# Construct URL
url = kwargs.pop("template_url", '/{policyAssignmentId}')
path_format_arguments = {
"policyAssignmentId": _SERIALIZER.url("policy_assignment_id", policy_assignment_id, 'str', skip_quote=True),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
class PolicyAssignmentsOperations(object):
"""PolicyAssignmentsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.resource.policy.v2016_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def delete(
self,
scope: str,
policy_assignment_name: str,
**kwargs: Any
) -> Optional["_models.PolicyAssignment"]:
"""Deletes a policy assignment.
:param scope: The scope of the policy assignment.
:type scope: str
:param policy_assignment_name: The name of the policy assignment to delete.
:type policy_assignment_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyAssignment, or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment or None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.PolicyAssignment"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_request(
scope=scope,
policy_assignment_name=policy_assignment_name,
template_url=self.delete.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('PolicyAssignment', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
delete.metadata = {'url': '/{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}'} # type: ignore
@distributed_trace
def create(
self,
scope: str,
policy_assignment_name: str,
parameters: "_models.PolicyAssignment",
**kwargs: Any
) -> "_models.PolicyAssignment":
"""Creates a policy assignment.
Policy assignments are inherited by child resources. For example, when you apply a policy to a
resource group that policy is assigned to all resources in the group.
:param scope: The scope of the policy assignment.
:type scope: str
:param policy_assignment_name: The name of the policy assignment.
:type policy_assignment_name: str
:param parameters: Parameters for the policy assignment.
:type parameters: ~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyAssignment, or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyAssignment"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(parameters, 'PolicyAssignment')
request = build_create_request(
scope=scope,
policy_assignment_name=policy_assignment_name,
content_type=content_type,
json=_json,
template_url=self.create.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('PolicyAssignment', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create.metadata = {'url': '/{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}'} # type: ignore
@distributed_trace
def get(
self,
scope: str,
policy_assignment_name: str,
**kwargs: Any
) -> "_models.PolicyAssignment":
"""Gets a policy assignment.
:param scope: The scope of the policy assignment.
:type scope: str
:param policy_assignment_name: The name of the policy assignment to get.
:type policy_assignment_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyAssignment, or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyAssignment"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
scope=scope,
policy_assignment_name=policy_assignment_name,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('PolicyAssignment', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}'} # type: ignore
@distributed_trace
def list_for_resource_group(
self,
resource_group_name: str,
filter: Optional[str] = None,
**kwargs: Any
) -> Iterable["_models.PolicyAssignmentListResult"]:
"""Gets policy assignments for the resource group.
:param resource_group_name: The name of the resource group that contains policy assignments.
:type resource_group_name: str
:param filter: The filter to apply on the operation.
:type filter: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PolicyAssignmentListResult or the result of
cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignmentListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyAssignmentListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_for_resource_group_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
filter=filter,
template_url=self.list_for_resource_group.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_for_resource_group_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
filter=filter,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("PolicyAssignmentListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_for_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Authorization/policyAssignments'} # type: ignore
@distributed_trace
def list_for_resource(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
filter: Optional[str] = None,
**kwargs: Any
) -> Iterable["_models.PolicyAssignmentListResult"]:
"""Gets policy assignments for a resource.
:param resource_group_name: The name of the resource group containing the resource. The name is
case insensitive.
:type resource_group_name: str
:param resource_provider_namespace: The namespace of the resource provider.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource path.
:type parent_resource_path: str
:param resource_type: The resource type.
:type resource_type: str
:param resource_name: The name of the resource with policy assignments.
:type resource_name: str
:param filter: The filter to apply on the operation.
:type filter: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PolicyAssignmentListResult or the result of
cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignmentListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyAssignmentListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_for_resource_request(
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
filter=filter,
template_url=self.list_for_resource.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_for_resource_request(
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
filter=filter,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("PolicyAssignmentListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_for_resource.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}/providers/Microsoft.Authorization/policyAssignments'} # type: ignore
@distributed_trace
def list(
self,
filter: Optional[str] = None,
**kwargs: Any
) -> Iterable["_models.PolicyAssignmentListResult"]:
"""Gets all the policy assignments for a subscription.
:param filter: The filter to apply on the operation.
:type filter: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PolicyAssignmentListResult or the result of
cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignmentListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyAssignmentListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
subscription_id=self._config.subscription_id,
filter=filter,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_request(
subscription_id=self._config.subscription_id,
filter=filter,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("PolicyAssignmentListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/policyAssignments'} # type: ignore
@distributed_trace
def delete_by_id(
self,
policy_assignment_id: str,
**kwargs: Any
) -> "_models.PolicyAssignment":
"""Deletes a policy assignment by ID.
When providing a scope for the assignment, use '/subscriptions/{subscription-id}/' for
subscriptions, '/subscriptions/{subscription-id}/resourceGroups/{resource-group-name}' for
resource groups, and
'/subscriptions/{subscription-id}/resourceGroups/{resource-group-name}/providers/{resource-provider-namespace}/{resource-type}/{resource-name}'
for resources.
:param policy_assignment_id: The ID of the policy assignment to delete. Use the format
'/{scope}/providers/Microsoft.Authorization/policyAssignments/{policy-assignment-name}'.
:type policy_assignment_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyAssignment, or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyAssignment"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_by_id_request(
policy_assignment_id=policy_assignment_id,
template_url=self.delete_by_id.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('PolicyAssignment', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
delete_by_id.metadata = {'url': '/{policyAssignmentId}'} # type: ignore
@distributed_trace
def create_by_id(
self,
policy_assignment_id: str,
parameters: "_models.PolicyAssignment",
**kwargs: Any
) -> "_models.PolicyAssignment":
"""Creates a policy assignment by ID.
Policy assignments are inherited by child resources. For example, when you apply a policy to a
resource group that policy is assigned to all resources in the group. When providing a scope
for the assignment, use '/subscriptions/{subscription-id}/' for subscriptions,
'/subscriptions/{subscription-id}/resourceGroups/{resource-group-name}' for resource groups,
and
'/subscriptions/{subscription-id}/resourceGroups/{resource-group-name}/providers/{resource-provider-namespace}/{resource-type}/{resource-name}'
for resources.
:param policy_assignment_id: The ID of the policy assignment to create. Use the format
'/{scope}/providers/Microsoft.Authorization/policyAssignments/{policy-assignment-name}'.
:type policy_assignment_id: str
:param parameters: Parameters for policy assignment.
:type parameters: ~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyAssignment, or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyAssignment"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(parameters, 'PolicyAssignment')
request = build_create_by_id_request(
policy_assignment_id=policy_assignment_id,
content_type=content_type,
json=_json,
template_url=self.create_by_id.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('PolicyAssignment', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_by_id.metadata = {'url': '/{policyAssignmentId}'} # type: ignore
@distributed_trace
def get_by_id(
self,
policy_assignment_id: str,
**kwargs: Any
) -> "_models.PolicyAssignment":
"""Gets a policy assignment by ID.
When providing a scope for the assignment, use '/subscriptions/{subscription-id}/' for
subscriptions, '/subscriptions/{subscription-id}/resourceGroups/{resource-group-name}' for
resource groups, and
'/subscriptions/{subscription-id}/resourceGroups/{resource-group-name}/providers/{resource-provider-namespace}/{resource-type}/{resource-name}'
for resources.
:param policy_assignment_id: The ID of the policy assignment to get. Use the format
'/{scope}/providers/Microsoft.Authorization/policyAssignments/{policy-assignment-name}'.
:type policy_assignment_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyAssignment, or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyAssignment"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_by_id_request(
policy_assignment_id=policy_assignment_id,
template_url=self.get_by_id.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('PolicyAssignment', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_by_id.metadata = {'url': '/{policyAssignmentId}'} # type: ignore
| Azure/azure-sdk-for-python | sdk/resources/azure-mgmt-resource/azure/mgmt/resource/policy/v2016_12_01/operations/_policy_assignments_operations.py | Python | mit | 38,695 | 0.004032 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2016, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: win_find
version_added: "2.3"
short_description: Return a list of files based on specific criteria
description:
- Return a list of files based on specified criteria.
- Multiple criteria are AND'd together.
- For non-Windows targets, use the M(find) module instead.
options:
age:
description:
- Select files or folders whose age is equal to or greater than
the specified time.
- Use a negative age to find files equal to or less than
the specified time.
- You can choose seconds, minutes, hours, days or weeks
by specifying the first letter of an of
those words (e.g., "2s", "10d", 1w").
type: str
age_stamp:
description:
- Choose the file property against which we compare C(age).
- The default attribute we compare with is the last modification time.
type: str
choices: [ atime, ctime, mtime ]
default: mtime
checksum_algorithm:
description:
- Algorithm to determine the checksum of a file.
- Will throw an error if the host is unable to use specified algorithm.
type: str
choices: [ md5, sha1, sha256, sha384, sha512 ]
default: sha1
file_type:
description: Type of file to search for.
type: str
choices: [ directory, file ]
default: file
follow:
description:
- Set this to C(yes) to follow symlinks in the path.
- This needs to be used in conjunction with C(recurse).
type: bool
default: no
get_checksum:
description:
- Whether to return a checksum of the file in the return info (default sha1),
use C(checksum_algorithm) to change from the default.
type: bool
default: yes
hidden:
description: Set this to include hidden files or folders.
type: bool
default: no
paths:
description:
- List of paths of directories to search for files or folders in.
- This can be supplied as a single path or a list of paths.
type: list
required: yes
patterns:
description:
- One or more (powershell or regex) patterns to compare filenames with.
- The type of pattern matching is controlled by C(use_regex) option.
- The patterns retrict the list of files or folders to be returned based on the filenames.
- For a file to be matched it only has to match with one pattern in a list provided.
type: list
recurse:
description:
- Will recursively descend into the directory looking for files or folders.
type: bool
default: no
size:
description:
- Select files or folders whose size is equal to or greater than the specified size.
- Use a negative value to find files equal to or less than the specified size.
- You can specify the size with a suffix of the byte type i.e. kilo = k, mega = m...
- Size is not evaluated for symbolic links.
type: str
use_regex:
description:
- Will set patterns to run as a regex check if set to C(yes).
type: bool
default: no
author:
- Jordan Borean (@jborean93)
'''
EXAMPLES = r'''
- name: Find files in path
win_find:
paths: D:\Temp
- name: Find hidden files in path
win_find:
paths: D:\Temp
hidden: yes
- name: Find files in multiple paths
win_find:
paths:
- C:\Temp
- D:\Temp
- name: Find files in directory while searching recursively
win_find:
paths: D:\Temp
recurse: yes
- name: Find files in directory while following symlinks
win_find:
paths: D:\Temp
recurse: yes
follow: yes
- name: Find files with .log and .out extension using powershell wildcards
win_find:
paths: D:\Temp
patterns: [ '*.log', '*.out' ]
- name: Find files in path based on regex pattern
win_find:
paths: D:\Temp
patterns: out_\d{8}-\d{6}.log
- name: Find files older than 1 day
win_find:
paths: D:\Temp
age: 86400
- name: Find files older than 1 day based on create time
win_find:
paths: D:\Temp
age: 86400
age_stamp: ctime
- name: Find files older than 1 day with unit syntax
win_find:
paths: D:\Temp
age: 1d
- name: Find files newer than 1 hour
win_find:
paths: D:\Temp
age: -3600
- name: Find files newer than 1 hour with unit syntax
win_find:
paths: D:\Temp
age: -1h
- name: Find files larger than 1MB
win_find:
paths: D:\Temp
size: 1048576
- name: Find files larger than 1GB with unit syntax
win_find:
paths: D:\Temp
size: 1g
- name: Find files smaller than 1MB
win_find:
paths: D:\Temp
size: -1048576
- name: Find files smaller than 1GB with unit syntax
win_find:
paths: D:\Temp
size: -1g
- name: Find folders/symlinks in multiple paths
win_find:
paths:
- C:\Temp
- D:\Temp
file_type: directory
- name: Find files and return SHA256 checksum of files found
win_find:
paths: C:\Temp
get_checksum: yes
checksum_algorithm: sha256
- name: Find files and do not return the checksum
win_find:
paths: C:\Temp
get_checksum: no
'''
RETURN = r'''
examined:
description: The number of files/folders that was checked.
returned: always
type: int
sample: 10
matched:
description: The number of files/folders that match the criteria.
returned: always
type: int
sample: 2
files:
description: Information on the files/folders that match the criteria returned as a list of dictionary elements for each file matched.
returned: success
type: complex
contains:
attributes:
description: attributes of the file at path in raw form.
returned: success, path exists
type: str
sample: "Archive, Hidden"
checksum:
description: The checksum of a file based on checksum_algorithm specified.
returned: success, path exists, path is a file, get_checksum == True
type: str
sample: 09cb79e8fc7453c84a07f644e441fd81623b7f98
creationtime:
description: The create time of the file represented in seconds since epoch.
returned: success, path exists
type: float
sample: 1477984205.15
extension:
description: The extension of the file at path.
returned: success, path exists, path is a file
type: str
sample: ".ps1"
isarchive:
description: If the path is ready for archiving or not.
returned: success, path exists
type: bool
sample: true
isdir:
description: If the path is a directory or not.
returned: success, path exists
type: bool
sample: true
ishidden:
description: If the path is hidden or not.
returned: success, path exists
type: bool
sample: true
islnk:
description: If the path is a symbolic link or junction or not.
returned: success, path exists
type: bool
sample: true
isreadonly:
description: If the path is read only or not.
returned: success, path exists
type: bool
sample: true
isshared:
description: If the path is shared or not.
returned: success, path exists
type: bool
sample: true
lastaccesstime:
description: The last access time of the file represented in seconds since epoch.
returned: success, path exists
type: float
sample: 1477984205.15
lastwritetime:
description: The last modification time of the file represented in seconds since epoch.
returned: success, path exists
type: float
sample: 1477984205.15
lnk_source:
description: The target of the symbolic link, will return null if not a link or the link is broken.
return: success, path exists, path is a symbolic link
type: str
sample: C:\temp
owner:
description: The owner of the file.
returned: success, path exists
type: str
sample: BUILTIN\Administrators
path:
description: The full absolute path to the file.
returned: success, path exists
type: str
sample: BUILTIN\Administrators
sharename:
description: The name of share if folder is shared.
returned: success, path exists, path is a directory and isshared == True
type: str
sample: file-share
size:
description: The size in bytes of a file or folder.
returned: success, path exists, path is not a link
type: int
sample: 1024
'''
| Jorge-Rodriguez/ansible | lib/ansible/modules/windows/win_find.py | Python | gpl-3.0 | 9,583 | 0.002087 |
# A revised version of CPython's turtle module written for Brython
#
# Note: This version is not intended to be used in interactive mode,
# nor use help() to look up methods/functions definitions. The docstrings
# have thus been shortened considerably as compared with the CPython's version.
#
# All public methods/functions of the CPython version should exist, if only
# to print out a warning that they are not implemented. The intent is to make
# it easier to "port" any existing turtle program from CPython to the browser.
#
# IMPORTANT: We use SVG for drawing turtles. If we have a turtle at an angle
# of 350 degrees and we rotate it by an additional 20 degrees, we will have
# a turtle at an angle of 370 degrees. For turtles drawn periodically on
# a screen (like typical animations, including the CPython turtle module),
# drawing a turtle with a rotation of 370 degrees is the same as a rotation of
# 10 degrees. However, using SVG, if we "slowly" animate an object,
# rotating it from 350 to 370 degrees, the result will not be the same
# as rotating it from 350 to 10 degrees. For this reason, we did not use the
# Vec2D class from the CPython module and handle the rotations quite differently.
import math
import sys
from math import cos, sin
from browser import console, document, html, timer
import _svg as svg
import copy
# Even though it is a private object, use the same name for the configuration
# dict as the CPython's module.
# Commented out configuration items are those found on the CPython version
_CFG = {
# "width" : 0.5, # Screen
# "height" : 0.75,
"canvwidth" : 500,
"canvheight": 500,
# "leftright": None,
# "topbottom": None,
"mode": "standard",
# "colormode": 1.0,
# "delay": 10,
# "undobuffersize": 1000,
"shape": "classic",
"pencolor" : "black",
"fillcolor" : "black",
# "resizemode" : "noresize",
"visible" : True,
# "language": "english", # docstrings
# "exampleturtle": "turtle",
# "examplescreen": "screen",
# "title": "Python Turtle Graphics",
# "using_IDLE": False
# Below are configuration items specific to this version
"turtle_canvas_wrapper": None,
"turtle_canvas_id": "turtle-canvas",
"min_duration": "1ms"
}
_cfg_copy = copy.copy(_CFG)
def set_defaults(**params):
"""Allows to override defaults."""
_CFG.update(**params)
Screen().reset()
class FormattedTuple(tuple):
'''used to give a nicer representation of the position'''
def __new__(cls, x, y):
return tuple.__new__(cls, (x, y))
def __repr__(self):
return "(%.2f, %.2f)" % self
def create_circle(r):
'''Creates a circle of radius r centered at the origin'''
circle = svg.circle(x=0, y=0, r=r, stroke="black", fill="black")
circle.setAttribute("stroke-width", 1)
return circle
def create_polygon(points):
'''Creates a polygon using the points provided'''
points = ["%s,%s " % (x, y) for x, y in points]
polygon = svg.polygon(points=points, stroke="black", fill="black")
polygon.setAttribute("stroke-width", 1)
return polygon
def create_rectangle(width=2, height=2, rx=None, ry=None):
'''Creates a rectangle centered at the origin. rx and ry can be
used to have rounded corners'''
rectangle = svg.rect(x=-width/2, y=-height/2, width=width,
height=height, stroke="black", fill="black")
rectangle.setAttribute("stroke-width", 1)
if rx is not None:
rectangle.setAttribute("rx", rx)
if ry is not None:
rectangle.setAttribute("ry", ry)
return rectangle
def create_square(size=2, r=None):
'''Creates a square centered at the origin. rx and ry can be
used to have rounded corners'''
return create_rectangle(width=size, height=size, rx=r, ry=r)
class TurtleGraphicsError(Exception):
"""Some TurtleGraphics Error
"""
pass
class Singleton(type):
_instances = {}
def __call__(cls, *args, **kwargs):
if cls not in cls._instances:
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
return cls._instances[cls]
class Screen(metaclass=Singleton):
def __init__(self):
self.shapes = {
'arrow': (create_polygon, ((-10, 0), (10, 0), (0, 10))),
'turtle': (create_polygon, ((0, 16), (-2, 14), (-1, 10), (-4, 7),
(-7, 9), (-9, 8), (-6, 5), (-7, 1), (-5, -3), (-8, -6),
(-6, -8), (-4, -5), (0, -7), (4, -5), (6, -8), (8, -6),
(5, -3), (7, 1), (6, 5), (9, 8), (7, 9), (4, 7), (1, 10),
(2, 14))),
'classic': (create_polygon, ((0, 0), (-5, -9), (0, -7), (5, -9))),
'triangle': (create_polygon, ((10, -5.77), (0, 11.55), (-10, -5.77))),
'square': (create_square, 20),
'circle': (create_circle, 10)
}
self.reset()
self._set_geometry()
def bgcolor(self, color=None):
"""sets the background with the given color if color is not None,
else return current background color.
"""
if color is None:
return self.background_color
self.background_color = color
width = _CFG['canvwidth']
height = _CFG['canvheight']
if self.mode() in ['logo', 'standard']:
x = -width//2
y = -height//2
else:
x = 0
y = -height
self.frame_index += 1
rect = svg.rect(x=x, y=y, width=width, height=height, fill=color,
style={'display': 'none'})
an = svg.animate(Id="animation_frame%s" % self.frame_index,
attributeName="display", attributeType="CSS",
From="block", to="block", dur=_CFG["min_duration"],
fill='freeze')
an.setAttribute('begin', "animation_frame%s.end" % (self.frame_index-1))
rect <= an
self.background_canvas <=rect
def _convert_coordinates(self, x, y):
"""In the browser, the increasing y-coordinate is towards the
bottom of the screen; this is the opposite of what is assumed
normally for the methods in the CPython turtle module.
This method makes the necessary orientation. It should be called
just prior to creating any SVG element.
"""
return x*self.yscale, self.y_points_down * y*self.yscale
def create_svg_turtle(self, _turtle, name):
if name in self.shapes:
fn = self.shapes[name][0]
arg = self.shapes[name][1]
else:
print("Unknown turtle '%s'; the default turtle will be used")
fn = self.shapes[_CVG["shape"]][0]
arg = self.shapes[_CVG["shape"]][1]
shape = fn(arg)
if self._mode == 'standard' or self._mode == 'world':
rotation = -90
else:
rotation = 0
return shape, rotation
def _dot(self, pos, size, color):
"""Draws a filled circle of specified size and color"""
if color is None:
color = 'black'
if size is None or size < 1:
size = 1
self.frame_index += 1
x, y = self._convert_coordinates(pos[0], pos[1])
circle = svg.circle(cx=x, cy=y, r=size, fill=color,
style={'display': 'none'})
an = svg.animate(Id="animation_frame%s" % self.frame_index,
attributeName="display", attributeType="CSS",
From="block", to="block", dur=_CFG["min_duration"],
fill='freeze')
an.setAttribute('begin', "animation_frame%s.end" % (self.frame_index-1))
circle <= an
self.canvas <= circle
def _drawline(self, _turtle, coordlist=None,
color=None, width=1, speed=None):
"""Draws an animated line with a turtle
- coordlist is the egin and end coordinates of the line
- color should include the current outline and fill colors;
- width is width of line to be drawn.
- speed is the animation speed
"""
outline = color[0]
fill = color[1]
x0, y0 = coordlist[0]
x1, y1 = coordlist[1]
x0, y0 = self._convert_coordinates(x0, y0)
x1, y1 = self._convert_coordinates(x1, y1)
# The speed scale does not correspond exactly to the CPython one...
if speed == 0:
duration = _CFG["min_duration"]
else:
dist = _turtle._distance
if speed is None or speed == 1:
duration = 0.02 * dist
else:
duration = 0.02 * dist / speed ** 1.2
if duration < 0.001:
duration = _CFG["min_duration"]
else:
duration = "%6.3fs" % duration
drawing = _turtle._drawing
_line = svg.line(x1=x0, y1=y0, x2=x0, y2=y0,
style={'stroke': outline, 'stroke-width': width})
if not drawing:
_line.setAttribute('opacity', 0)
# always create one animation for timing purpose
begin = "animation_frame%s.end" % self.frame_index
self.frame_index += 1
_an1 = svg.animate(Id="animation_frame%s" % self.frame_index,
attributeName="x2", attributeType="XML",
From=x0, to=x1, dur=duration, fill='freeze',
begin=begin)
_line <= _an1
## But, do not bother adding animations that will not be shown.
if drawing:
_an2 = svg.animate(attributeName="y2", attributeType="XML",
begin=begin,
From=y0, to=y1, dur=duration, fill='freeze')
_line <= _an2
if width > 2:
_line_cap = svg.set(attributeName="stroke-linecap",
begin=begin,
attributeType="xml", to="round", dur=duration, fill='freeze')
_line <= _line_cap
self.canvas <= _line
return begin, duration, (x0, y0), (x1, y1)
def _drawpoly(self, coordlist, outline=None, fill=None, width=None):
"""Draws a path according to provided arguments:
- coordlist is sequence of coordinates
- fill is filling color
- outline is outline color
- width is the outline width
"""
self.frame_index += 1
shape = ["%s,%s" % self._convert_coordinates(x, y) for x, y in coordlist]
style = {'display': 'none'}
if fill is not None:
style['fill'] = fill
if outline is not None:
style['stroke'] = outline
if width is not None:
style['stroke-width'] = width
else:
style['stroke-width'] = 1
polygon = svg.polygon(points=" ".join(shape), style=style)
an = svg.animate(Id="animation_frame%s" % self.frame_index,
attributeName="display", attributeType="CSS",
From="block", to="block", dur=_CFG["min_duration"],
fill='freeze')
an.setAttribute('begin', "animation_frame%s.end" % (self.frame_index-1))
polygon <= an
self.canvas <= polygon
def _new_frame(self):
'''returns a new animation frame index and update the current indes'''
previous_end = "animation_frame%s.end" % self.frame_index
self.frame_index += 1
new_frame_id = "animation_frame%s" % self.frame_index
return previous_end, new_frame_id
def mode(self, _mode=None):
if _mode is None:
return self._mode
_CFG['mode'] = _mode
self.reset()
def reset(self):
self._turtles = []
self.frame_index = 0
self.background_color = "white"
self._set_geometry()
def _set_geometry(self):
self.width = _CFG["canvwidth"]
self.height = _CFG["canvheight"]
self.x_offset = self.y_offset = 0
self.xscale = self.yscale = 1
self.y_points_down = -1
self._mode = _CFG["mode"].lower()
if self._mode in ['logo', 'standard']:
self.translate_canvas = (self.width//2, self.height//2)
elif self._mode == 'world':
self.translate_canvas = (0, self.height)
self._setup_canvas()
def _setup_canvas(self):
self.svg_scene = svg.svg(Id=_CFG["turtle_canvas_id"], width=self.width,
height=self.height)
translate = "translate(%d %d)" % self.translate_canvas
# always create one animation for timing purpose
self.svg_scene <= svg.animate(
Id="animation_frame%s" % self.frame_index,
attributeName="width", attributeType="CSS",
From=self.width, to=self.width, begin="0s",
dur=_CFG["min_duration"], fill='freeze')
# Unlike html elements, svg elements have no concept of a z-index: each
# new element is drawn on top of each other.
# Having separate canvas keeps the ordering
self.background_canvas = svg.g(transform=translate)
self.canvas = svg.g(transform=translate)
self.writing_canvas = svg.g(transform=translate)
self.turtle_canvas = svg.g(transform=translate)
self.svg_scene <= self.background_canvas
self.svg_scene <= self.canvas
self.svg_scene <= self.writing_canvas
self.svg_scene <= self.turtle_canvas
def setworldcoordinates(self, llx, lly, urx, ury):
"""Set up a user defined coordinate-system.
Arguments:
llx -- a number, x-coordinate of lower left corner of canvas
lly -- a number, y-coordinate of lower left corner of canvas
urx -- a number, x-coordinate of upper right corner of canvas
ury -- a number, y-coordinate of upper right corner of canvas
Note: llx must be less than urx in this version.
Warning: in user-defined coordinate systems angles may appear distorted.
"""
self._mode = "world"
if urx < llx:
sys.stderr.write("Warning: urx must be greater than llx; your choice will be reversed")
urx, llx = llx, urx
xspan = urx - llx
yspan = abs(ury - lly)
self.xscale = int(self.width) / xspan
self.yscale = int(self.height) / yspan
self.x_offset = -llx * self.xscale
if ury < lly:
self.y_points_down = 1 # standard orientation in the browser
else:
self.y_points_down = -1
self.y_offset = self.y_points_down * lly * self.yscale
self.translate_canvas = (self.x_offset, self.height-self.y_offset)
self._setup_canvas()
def show_scene(self):
'''Ends the creation of a "scene" and has it displayed'''
for t in self._turtles:
self.turtle_canvas <= t.svg
if _CFG["turtle_canvas_wrapper"] is None:
_CFG["turtle_canvas_wrapper"] = html.DIV(Id="turtle-canvas-wrapper")
document <= _CFG["turtle_canvas_wrapper"]
if _CFG["turtle_canvas_id"] not in document:
_CFG["turtle_canvas_wrapper"] <= self.svg_scene
def set_svg():
# need to have a delay for chrome so that first few draw commands are viewed properly.
_CFG["turtle_canvas_wrapper"].html = _CFG["turtle_canvas_wrapper"].html
timer.set_timeout(set_svg, 1)
def turtles(self):
"""Return the list of turtles on the screen.
"""
return self._turtles
def _write(self, pos, txt, align, font, color):
"""Write txt at pos in canvas with specified font
and color."""
if isinstance(color, tuple):
stroke = color[0]
fill = color[1]
else:
fill = color
stroke = None
x, y = self._convert_coordinates(pos[0], pos[1])
text = svg.text(txt, x=x, y=y, fill=fill,
style={'display': 'none',
'font-family': font[0],
'font-size': font[1],
'font-style': font[2]})
if stroke is not None:
text.setAttribute('stroke', stroke)
if align == 'left':
text.setAttribute('text-anchor', 'start')
elif align == 'center' or align == 'centre':
text.setAttribute('text-anchor', 'middle')
elif align == 'right':
text.setAttribute('text-anchor', 'end')
self.frame_index += 1
an = svg.animate(Id="animation_frame%s" % self.frame_index,
attributeName="display", attributeType="CSS",
From="block", to="block", dur=_CFG["min_duration"],
fill='freeze')
an.setAttribute('begin', "animation_frame%s.end" % (self.frame_index-1))
text <= an
self.writing_canvas <= text
def addshape(self, *args, **kwargs):
sys.stderr.write("Warning: Screen.addshape() is not implemented.\n")
def bgpic(self, *args, **kwargs):
sys.stderr.write("Warning: Screen.bgpic() is not implemented.\n")
def bye(self, *args, **kwargs):
sys.stderr.write("Warning: Screen.bye() is not implemented.\n")
def clearscreen(self, *args, **kwargs):
sys.stderr.write("Warning: Screen.clearscreen() is not implemented.\n")
def colormode(self, *args, **kwargs):
sys.stderr.write("Warning: Screen.colormode() is not implemented.\n")
def delay(self, *args, **kwargs):
sys.stderr.write("Warning: Screen.delay() is not implemented.\n")
def exitonclick(self, *args, **kwargs):
sys.stderr.write("Warning: Screen.exitonclick() is not implemented.\n")
def getcanvas(self, *args, **kwargs):
sys.stderr.write("Warning: Screen.getcanvas() is not implemented.\n")
def getshapes(self, *args, **kwargs):
sys.stderr.write("Warning: Screen.getshapes() is not implemented.\n")
def addshape(self, *args, **kwargs):
sys.stderr.write("Warning: Screen.addshape() is not implemented.\n")
def listen(self, *args, **kwargs):
sys.stderr.write("Warning: Screen.listen() is not implemented.\n")
def mainloop(self, *args, **kwargs):
sys.stderr.write("Warning: Screen.mainloop() is not implemented.\n")
def numinput(self, *args, **kwargs):
sys.stderr.write("Warning: Screen.numinput() is not implemented.\n")
def onkey(self, *args, **kwargs):
sys.stderr.write("Warning: Screen.onkey() is not implemented.\n")
def onkeypress(self, *args, **kwargs):
sys.stderr.write("Warning: Screen.onkeypress() is not implemented.\n")
def onkeyrelease(self, *args, **kwargs):
sys.stderr.write("Warning: Screen.onkeyrelease() is not implemented.\n")
def onscreenclick(self, *args, **kwargs):
sys.stderr.write("Warning: Screen.onscreenclick() is not implemented.\n")
def ontimer(self, *args, **kwargs):
sys.stderr.write("Warning: Screen.ontimer() is not implemented.\n")
def register_shape(self, *args, **kwargs):
sys.stderr.write("Warning: Screen.register_shape() is not implemented.\n")
def resetscreen(self, *args, **kwargs):
sys.stderr.write("Warning: Screen.resetscreen() is not implemented.\n")
def screensize(self, *args, **kwargs):
sys.stderr.write("Warning: Screen.screensize() is not implemented.\n")
def setup(self, *args, **kwargs):
sys.stderr.write("Warning: Screen.setup() is not implemented.\n")
def textinput(self, *args, **kwargs):
sys.stderr.write("Warning: Screen.textinput() is not implemented.\n")
def title(self, *args, **kwargs):
sys.stderr.write("Warning: Screen.title() is not implemented.\n")
def tracer(self, *args, **kwargs):
sys.stderr.write("Warning: Screen.tracer() is not implemented.\n")
def update(self, *args, **kwargs):
sys.stderr.write("Warning: Screen.update() is not implemented.\n")
def window_height(self, *args, **kwargs):
sys.stderr.write("Warning: Screen.window_height() is not implemented.\n")
def window_width(self, *args, **kwargs):
sys.stderr.write("Warning: Screen.window_width() is not implemented.\n")
class TNavigator:
"""Navigation part of the Turtle.
Implements methods for turtle movement.
"""
# START_ORIENTATION = {
# "standard": Vec2D(1.0, 0.0),
# "world": Vec2D(1.0, 0.0),
# "logo": Vec2D(0.0, 1.0)}
DEFAULT_MODE = "standard"
DEFAULT_ANGLEOFFSET = 0
DEFAULT_ANGLEORIENT = 1
def __init__(self, mode=DEFAULT_MODE):
self._angleOffset = self.DEFAULT_ANGLEOFFSET
self._angleOrient = self.DEFAULT_ANGLEORIENT
self._mode = mode
self.degree_to_radians = math.pi / 180
self.degrees()
self._mode = _CFG['mode']
self._setmode(mode)
TNavigator.reset(self)
def reset(self):
"""reset turtle navigation to its initial values
The derived class, which will call it directly and add its own
"""
self._position = (0.0, 0.0)
self._x = 0
self._y = 0
self._angle = 0
self._old_heading = 0
def _setmode(self, mode=None):
"""Set turtle-mode to 'standard', 'world' or 'logo'.
"""
if mode is None:
return self._mode
if mode not in ["standard", "logo", "world"]:
print(mode, "is an unknown mode; it will be ignored.")
return
self._mode = mode
if mode in ["standard", "world"]:
self._angleOffset = 0
self._angleOrient = 1
else: # mode == "logo":
self._angleOffset = -self._fullcircle/4.
self._angleOrient = 1
def _setDegreesPerAU(self, fullcircle):
"""Helper function for degrees() and radians()"""
self._fullcircle = fullcircle
self._degreesPerAU = 360/fullcircle
def degrees(self, fullcircle=360.0):
""" Set angle measurement units to degrees, or possibly other system.
"""
self._setDegreesPerAU(fullcircle)
def radians(self):
""" Set the angle measurement units to radians.
"""
self._setDegreesPerAU(2*math.pi)
def _rotate(self, angle):
"""Turn turtle counterclockwise by specified angle if angle > 0."""
pass
def _goto(self, x, y):
pass # implemented by derived class
def forward(self, distance):
"""Move the turtle forward by the specified distance.
"""
x1 = distance * cos(self._angle * self.degree_to_radians)
y1 = distance * sin(self._angle * self.degree_to_radians)
self._distance = distance
self._goto(self._x + x1, self._y + y1)
fd = forward
def back(self, distance):
"""Move the turtle backward by distance.
"""
x1 = -distance * cos(self._angle * self.degree_to_radians)
y1 = -distance * sin(self._angle * self.degree_to_radians)
self._distance = distance
self._goto(self._x + x1, self._y + y1)
backward = back
bk = back
def right(self, angle):
"""Turn turtle right by angle units.
"""
angle*=self._degreesPerAU
self._angle += self.screen.y_points_down*angle
self._rotate_image(-angle)
rt = right
def left(self, angle):
"""Turn turtle left by angle units.
"""
angle*=self._degreesPerAU
self._angle += -self.screen.y_points_down*angle
self._rotate_image(angle)
lt = left
def pos(self):
"""Return the turtle's current location (x,y), as a formatted tuple
"""
return FormattedTuple(self._x, self._y)
position = pos
def xcor(self):
""" Return the turtle's x coordinate.
"""
return self._x
def ycor(self):
""" Return the turtle's y coordinate
"""
return self._y
def goto(self, x, y=None):
"""Move turtle to an absolute position.
"""
if y is None:
x, y = *x
# distance only needed to calculate the duration of
# the animation which is based on "distance" and "speed" as well.
# We use the Manhattan distance here as it is *much* faster on Chrome,
# than using the proper distance with calls to math.sqrt, while
# giving acceptable results
#
# forward, backward, etc., call _goto directly with the distance
# given by the user
self._distance = abs(self._x - x) + abs(self._y - y)
self._goto(x, y)
setpos = goto
setposition = goto
def home(self):
"""Move turtle to the origin - coordinates (0,0), facing in the
default orientation
"""
self.goto(0, 0)
self.setheading(0)
def setx(self, x):
"""Set the turtle's first coordinate to x
"""
self._distance = abs(x - self._x)
self._goto(x, self._y)
def sety(self, y):
"""Set the turtle's second coordinate to y
"""
self._distance = abs(y - self._y)
self._goto(self._x, y)
def distance(self, x, y=None):
"""Return the distance from the turtle to (x,y) in turtle step units.
"""
if y is None:
assert isinstance(x, tuple)
x, y = x
return math.sqrt((self._x - x)**2 + (self._y - y)**2)
def towards(self, x, y=None):
"""Return the angle of the line from the turtle's position to (x, y).
"""
if y is None:
assert isinstance(x, tuple)
x, y = x
x, y = x - self._x, y - self._y
result = round(math.atan2(y, x)*180.0/math.pi, 10) % 360.0
result /= self._degreesPerAU
return (self._angleOffset + self._angleOrient*result) % self._fullcircle
def heading(self):
""" Return the turtle's current heading.
"""
angle = self._angle / self._degreesPerAU
return (self._angleOffset + self._angleOrient*angle) % self._fullcircle
def setheading(self, to_angle):
"""Set the orientation of the turtle to to_angle.
"""
self._rotate(to_angle - self._angle)
seth = setheading
def circle(self, radius, extent=None, steps=None):
""" Draw an approximate (arc) circle with given radius, using straight
line segments.
Arguments:
radius -- a number
extent (optional) -- a number
steps (optional) -- an integer
Draw a circle with given radius. The center is radius units left
of the turtle; extent - an angle - determines which part of the
circle is drawn. If extent is not given, draw the entire circle.
If extent is not a full circle, one endpoint of the arc is the
current pen position. Draw the arc in counterclockwise direction
if radius is positive, otherwise in clockwise direction. Finally
the direction of the turtle is changed by the amount of extent.
As the circle is approximated by an inscribed regular polygon,
steps determines the number of steps to use. If not given,
it will be calculated automatically. Maybe used to draw regular
polygons.
"""
speed = self.speed()
if extent is None:
extent = self._fullcircle
if steps is None:
frac = abs(extent)/self._fullcircle
steps = 1+int(min(11+abs(radius)/6.0, 59.0)*frac)
w = 1.0 * extent / steps
w2 = 0.5 * w
l = 2.0 * radius * math.sin(w2*math.pi/180.0*self._degreesPerAU)
if radius < 0:
l, w, w2 = -l, -w, -w2
self._rotate(w2)
for i in range(steps):
self.speed(speed)
self.forward(l)
self.speed(0)
self._rotate(w)
self._rotate(-w2)
self.speed(speed)
class TPen:
"""Drawing part of the Turtle.
"""
def __init__(self):
TPen._reset(self)
def _reset(self, pencolor=_CFG["pencolor"],
fillcolor=_CFG["fillcolor"]):
self._pensize = 1
self._shown = True
self._drawing = True
self._pencolor = 'black'
self._fillcolor = 'black'
self._speed = 3
self._stretchfactor = (1., 1.)
def resizemode(self, rmode=None):
sys.stderr.write("Warning: TPen.resizemode() is not implemented.\n")
def pensize(self, width=None):
"""Set or return the line thickness.
"""
if width is None:
return self._pensize
self.pen(pensize=width)
width = pensize
def pendown(self):
"""Pull the pen down -- drawing when moving.
"""
if self._drawing:
return
self.pen(pendown=True)
pd = pendown
down = pendown
def penup(self):
"""Pull the pen up -- no drawing when moving.
"""
if not self._drawing:
return
self.pen(pendown=False)
pu = penup
up = penup
def isdown(self):
"""Return True if pen is down, False if it's up.
"""
return self._drawing
def speed(self, speed=None):
""" Return or set the turtle's speed.
Optional argument:
speed -- an integer in the range 0..10 or a speedstring (see below)
Set the turtle's speed to an integer value in the range 0 .. 10.
If no argument is given: return current speed.
If input is a number greater than 10 or smaller than 0.5,
speed is set to 0.
Speedstrings are mapped to speedvalues in the following way:
'fastest' : 0
'fast' : 10
'normal' : 6
'slow' : 3
'slowest' : 1
speeds from 1 to 10 enforce increasingly faster animation of
line drawing and turtle turning.
Attention:
speed = 0 : *no* animation takes place. forward/back makes turtle jump
and likewise left/right make the turtle turn instantly.
"""
speeds = {'fastest': 0, 'fast': 10, 'normal': 6, 'slow': 3, 'slowest': 1}
if speed is None:
return self._speed
if speed in speeds:
speed = speeds[speed]
elif 0.5 < speed < 10.5:
speed = int(round(speed))
else:
speed = 0
self.pen(speed=speed)
def color(self, *args):
"""Return or set the pencolor and fillcolor.
IMPORTANT: this is very different than the CPython's version.
Colors are using strings in any format recognized by a browser
(named color, rgb, rgba, hex, hsl, etc.)
Acceptable arguments:
no argument: returns (pencolor, fillcolor)
single string -> sets both pencolor and fillcolor to that value
two string arguments -> taken to be pencolor, fillcolor
tuple of two strings -> taken to be (pencolor, fillcolor)
"""
if args:
l = len(args)
if l == 1:
if isinstance(args[0], tuple):
pencolor = args[0][0]
fillcolor = args[0][1]
else:
pencolor = fillcolor = args[0]
elif l == 2:
pencolor, fillcolor = args
if not isinstance(pencolor, str) or not isinstance(fillcolor, str):
raise TurtleGraphicsError("bad color arguments: %s" % str(args))
self.pen(pencolor=pencolor, fillcolor=fillcolor)
else:
return self._pencolor, self._fillcolor
def pencolor(self, color=None):
""" Return or set the pencolor.
IMPORTANT: this is very different than the CPython's version.
Colors are using strings in any format recognized by a browser
(named color, rgb, rgba, hex, hsl, etc.)
"""
if color is not None:
if not isinstance(color, str):
raise TurtleGraphicsError("bad color arguments: %s" % str(color))
if color == self._pencolor:
return
self.pen(pencolor=color)
else:
return self._pencolor
def fillcolor(self, color=None):
""" Return or set the fillcolor.
IMPORTANT: this is very different than the CPython's version.
Colors are using strings in any format recognized by a browser
(named color, rgb, rgba, hex, hsl, etc.)
"""
if color is not None:
if not isinstance(color, str):
raise TurtleGraphicsError("bad color arguments: %s" % str(color))
if color == self._fillcolor:
return
self.pen(fillcolor=color)
else:
return self._pencolor
def showturtle(self):
"""Makes the turtle visible.
"""
if self._shown:
return
self.pen(shown=True)
self.left(0) # this will update the display to the correct rotation
st = showturtle
def hideturtle(self):
"""Makes the turtle invisible.
"""
if self._shown:
self.pen(shown=False)
ht = hideturtle
def isvisible(self):
"""Return True if the Turtle is shown, False if it's hidden.
"""
return self._shown
def pen(self, pen=None, **pendict):
"""Return or set the pen's attributes.
Arguments:
pen -- a dictionary with some or all of the below listed keys.
**pendict -- one or more keyword-arguments with the below
listed keys as keywords.
Return or set the pen's attributes in a 'pen-dictionary'
with the following key/value pairs:
"shown" : True/False
"pendown" : True/False
"pencolor" : color-string or color-tuple
"fillcolor" : color-string or color-tuple
"pensize" : positive number
"speed" : number in range 0..10
"""
_pd = {"shown": self._shown,
"pendown": self._drawing,
"pencolor": self._pencolor,
"fillcolor": self._fillcolor,
"pensize": self._pensize,
"speed": self._speed
}
if not (pen or pendict):
return _pd
if isinstance(pen, dict):
p = pen
else:
p = {}
p.update(pendict)
_p_buf = {}
for key in p:
_p_buf[key] = _pd[key]
if "pendown" in p:
self._drawing = p["pendown"]
if "pencolor" in p:
old_color = self._pencolor
self._pencolor = p["pencolor"]
previous_end, new_frame_id = self.screen._new_frame()
anim = svg.animate(Id=new_frame_id, begin=previous_end,
dur=_CFG["min_duration"], fill="freeze",
attributeName="stroke", attributeType="XML",
From=old_color, to=self._pencolor)
self.svg <= anim
if "pensize" in p:
self._pensize = p["pensize"]
if "fillcolor" in p:
old_color = self._fillcolor
self._fillcolor = p["fillcolor"]
previous_end, new_frame_id = self.screen._new_frame()
anim = svg.animate(Id=new_frame_id, begin=previous_end,
dur=_CFG["min_duration"], fill="freeze",
attributeName="fill", attributeType="XML",
From=old_color, to=self._fillcolor)
self.svg <= anim
if "speed" in p:
self._speed = p["speed"]
if "shown" in p:
old_shown = self._shown
if old_shown:
opacity = 0
old_opacity = 1
else:
opacity = 1
old_opacity = 0
previous_end, new_frame_id = self.screen._new_frame()
anim = svg.animate(Id=new_frame_id, begin=previous_end,
dur=_CFG["min_duration"], fill="freeze",
attributeName="opacity", attributeType="XML",
From=old_opacity, to=opacity)
self.svg <= anim
self.forward(0) # updates the turtle visibility on screen
self._shown = p["shown"]
# No RawTurtle/RawPen for this version, unlike CPython's; only Turtle/Pen
class Turtle(TPen, TNavigator):
"""Animation part of the Turtle.
Puts Turtle upon a TurtleScreen and provides tools for
its animation.
"""
_pen = None
screen = None
def __init__(self, shape=_CFG["shape"], visible=_CFG["visible"]):
self.screen = Screen()
TPen.__init__(self)
TNavigator.__init__(self, self.screen.mode())
self._poly = None
self._creatingPoly = False
self._fillitem = self._fillpath = None
self.name = shape
self.svg, rotation = self.screen.create_svg_turtle(self, name=shape)
self.svg.setAttribute("opacity", 0)
self._shown = False
if visible:
self.showturtle() # will ensure that turtle become visible at appropriate time
self.screen._turtles.append(self)
self.rotation_correction = rotation
# apply correction to image orientation
self._old_heading = self.heading() + self.rotation_correction
speed = self.speed()
self.speed(0)
self.left(-self._angleOffset) # this will update the display to include the correction
self.speed(speed)
def reset(self):
"""Delete the turtle's drawings and restore its default values.
"""
## TODO: review this and most likely revise docstring.
TNavigator.reset(self)
TPen._reset(self)
self._old_heading = self.heading() + self.rotation_correction
self.home()
self.color(_CFG["pencolor"], _CFG["fillcolor"])
def clear(self):
sys.stderr.write("Warning: Turtle.clear() is not implemented.\n")
def shape(self, name=None):
"""Set turtle shape to shape with given name
/ return current shapename if no name is provided
"""
if name is None:
return self.name
_turtle = self._make_copy(name=name)
visible = self.isvisible()
if visible:
self.hideturtle()
self.screen.turtle_canvas <= self.svg
self.svg = _turtle
self.screen._turtles.append(self)
if visible:
self.showturtle()
def clearstamp(self, *args, **kwargs):
sys.stderr.write("Warning: Turtle.clearstamp() is not implemented.\n")
def clearstamps(self, *args, **kwargs):
sys.stderr.write("Warning: Turtle.clearstamps() is not implemented.\n")
def onclick(self, *args, **kwargs):
sys.stderr.write("Warning: Turtle.onclick() is not implemented.\n")
def ondrag(self, *args, **kwargs):
sys.stderr.write("Warning: Turtle.ondrag() is not implemented.\n")
def onrelease(self, *args, **kwargs):
sys.stderr.write("Warning: Turtle.onrelease() is not implemented.\n")
def undo(self, *args, **kwargs):
sys.stderr.write("Warning: Turtle.undo() is not implemented.\n")
def setundobuffer(self, *args, **kwargs):
sys.stderr.write("Warning: Turtle.setundobuffer() is not implemented.\n")
def undobufferentries(self, *args, **kwargs):
sys.stderr.write("Warning: Turtle.undobufferentries() is not implemented.\n")
def shapesize(self, *args, **kwargs):
sys.stderr.write("Warning: Turtle.shapesize() is not implemented.\n")
turtlesize = shapesize
def shearfactor(self, shear=None):
sys.stderr.write("Warning: Turtle.shearfactor() is not implemented.\n")
def settiltangle(self, angle):
sys.stderr.write("Warning: Turtle.settiltangle() is not implemented.\n")
def tiltangle(self, angle=None):
sys.stderr.write("Warning: Turtle.tiltangle() is not implemented.\n")
def tilt(self, angle):
sys.stderr.write("Warning: Turtle.tilt() is not implemented.\n")
def shapetransform(self, t11=None, t12=None, t21=None, t22=None):
sys.stderr.write("Warning: Turtle.shapetransform() is not implemented.\n")
def get_shapepoly(self):
sys.stderr.write("Warning: Turtle.get_shapepoly() is not implemented.\n")
def _goto(self, x, y):
"""Move the pen to the point end, thereby drawing a line
if pen is down. All other methods for turtle movement depend
on this one.
"""
begin, duration, _from, _to = self.screen._drawline(self,
((self._x, self._y), (x, y)),
(self._pencolor, self._fillcolor),
self._pensize, self._speed)
if self._shown:
self.svg <= svg.animateMotion(begin=begin, dur=_CFG["min_duration"],
fill="remove")
self.svg <= svg.animateMotion(From="%s,%s" % _from, to="%s,%s" % _to,
dur=duration, begin=begin, fill="freeze")
if self._fillpath is not None:
self._fillpath.append((x, y))
self._position = (x, y)
self._x = x
self._y = y
def _rotate(self, angle):
"""Turns pen clockwise by angle.
"""
angle*=self._degreesPerAU
self._angle += -self.screen.y_points_down*angle
self._rotate_image(angle)
def _rotate_image(self, angle):
new_heading = self._old_heading - angle
if self.isvisible():
previous_end, new_frame_id = self.screen._new_frame()
if self._speed == 0:
duration = _CFG["min_duration"]
else:
duration = (abs(angle)/(self._speed * 360))
if duration < 0.001:
duration = _CFG["min_duration"]
else:
duration = "%6.3fs" % duration
self.svg <= svg.animateMotion(begin=previous_end,
dur=_CFG["min_duration"], fill="remove")
self.svg <= svg.animateTransform(attributeName="transform",
Id = new_frame_id,
type="rotate",
From=(self._old_heading, 0, 0),
to=(new_heading, 0, 0),
begin=previous_end,
dur=duration, fill="freeze")
self._old_heading = new_heading
def filling(self):
"""Return fillstate (True if filling, False else).
"""
return self._fillpath is not None
def begin_fill(self):
"""Called just before drawing a shape to be filled.
"""
self._fillpath = [(self._x, self._y)]
def end_fill(self):
"""Fill the shape drawn after the call begin_fill().
"""
if self.filling() and len(self._fillpath) > 2:
self.screen._drawpoly(self._fillpath, outline=self._pencolor,
fill=self._fillcolor, )
else:
print("No path to fill.")
self._fillpath = None
def dot(self, size=None, color=None):
"""Draw a filled circle with diameter size, using color.
"""
item = self.screen._dot((self._x, self._y), size, color=color)
def _write(self, txt, align, font, color=None):
"""Performs the writing for write()
"""
if color is None:
color = self._pencolor
self.screen._write((self._x, self._y), txt, align, font, color)
def write(self, arg, align="left", font=("Arial", 8, "normal"), color=None):
"""Write text at the current turtle position.
Arguments:
arg -- info, which is to be written to the TurtleScreen; it will be
converted to a string.
align (optional) -- one of the strings "left", "center" or right"
font (optional) -- a triple (fontname, fontsize, fonttype)
"""
self._write(str(arg), align.lower(), font, color=color)
def begin_poly(self):
"""Start recording the vertices of a polygon.
"""
self._poly = [(self._x, self._y)]
self._creatingPoly = True
def end_poly(self):
"""Stop recording the vertices of a polygon.
"""
self._creatingPoly = False
def get_poly(self):
"""Return the lastly recorded polygon.
"""
# check if there is any poly?
if self._poly is not None:
return tuple(self._poly)
def getscreen(self):
"""Return the TurtleScreen object, the turtle is drawing on.
"""
return self.screen
def getturtle(self):
"""Return the Turtle object itself.
Only reasonable use: as a function to return the 'anonymous turtle'
"""
return self
getpen = getturtle
def _make_copy(self, name=None):
'''makes a copy of the current svg turtle, but possibly using a
different shape. This copy is then ready to be inserted
into a canvas.'''
if name is None:
name = self.name
# We recreate a copy of the existing turtle, possibly using a different
# name/shape; we set the opacity to
# 0 since there is no specific time associated with the creation of
# such an object: we do not want to show it early.
_turtle, rotation = self.screen.create_svg_turtle(self, name=name)
_turtle.setAttribute("opacity", 0)
_turtle.setAttribute("fill", self._fillcolor)
_turtle.setAttribute("stroke", self._pencolor)
# We use timed animations to get it with the proper location, orientation
# and appear at the desired time.
previous_end, new_frame_id = self.screen._new_frame()
x, y = self.screen._convert_coordinates(self._x, self._y)
_turtle <= svg.animateMotion(begin=previous_end, dur=_CFG["min_duration"],
fill="remove")
_turtle <= svg.animateMotion(Id=new_frame_id,
From="%s,%s" % (x, y), to="%s,%s" % (x, y),
dur=_CFG["min_duration"], begin=previous_end,
fill="freeze")
_turtle <= svg.animateTransform(attributeName="transform",
type="rotate",
From=(self._old_heading, 0, 0),
to=(self._old_heading, 0, 0),
begin=previous_end,
dur=_CFG["min_duration"], fill="freeze")
_turtle <= svg.animate(begin=previous_end,
dur=_CFG["min_duration"], fill="freeze",
attributeName="opacity", attributeType="XML",
From=0, to=1)
return _turtle
def stamp(self):
'''draws a permanent copy of the turtle at its current location'''
_turtle = self._make_copy(name=self.name)
self.screen.canvas <= _turtle
def clone(self):
"""Create and return a clone of the turtle.
"""
n = Turtle(self.name)
attrs = vars(self)
new_dict = {}
for attr in attrs:
if isinstance(getattr(self, attr), (int, str, float)):
new_dict[attr] = getattr(self, attr)
n.__dict__.update(**new_dict)
# ensure that visible characteristics are consistent with settings
if not n._shown:
n._shown = True # otherwise, hideturtle() would have not effect
n.hideturtle()
n.left(0)
n.fd(0)
n.color(n.color())
return n
Pen = Turtle
def done():
Screen().show_scene()
show_scene = done
def replay_scene():
"Start playing an animation by 'refreshing' the canvas."
if (_CFG["turtle_canvas_id"] in document and
document[_CFG["turtle_canvas_id"]] is not None):
element = document[_CFG["turtle_canvas_id"]]
element.parentNode.removeChild(element)
show_scene()
def restart():
"For Brython turtle: clears the existing drawing and canvas"
_CFG.update(_cfg_copy)
Screen().reset()
Turtle._pen = None
if (_CFG["turtle_canvas_id"] in document and
document[_CFG["turtle_canvas_id"]] is not None):
element = document[_CFG["turtle_canvas_id"]]
element.parentNode.removeChild(element)
### Creating functions based
import inspect
def getmethparlist(ob):
"""Get strings describing the arguments for the given object
Returns a pair of strings representing function parameter lists
including parenthesis. The first string is suitable for use in
function definition and the second is suitable for use in function
call. The "self" parameter is not included.
"""
defText = callText = ""
# bit of a hack for methods - turn it into a function
# but we drop the "self" param.
# Try and build one for Python defined functions
args, varargs, varkw = inspect.getargs(ob.__code__)
items2 = args[1:]
realArgs = args[1:]
defaults = ob.__defaults__ or []
defaults = ["=%r" % (value,) for value in defaults]
defaults = [""] * (len(realArgs)-len(defaults)) + defaults
items1 = [arg + dflt for arg, dflt in zip(realArgs, defaults)]
if varargs is not None:
items1.append("*" + varargs)
items2.append("*" + varargs)
if varkw is not None:
items1.append("**" + varkw)
items2.append("**" + varkw)
defText = ", ".join(items1)
defText = "(%s)" % defText
callText = ", ".join(items2)
callText = "(%s)" % callText
return defText, callText
_tg_screen_functions = ['addshape', 'bgcolor', 'bgpic', 'bye',
'clearscreen', 'colormode', 'delay', 'exitonclick', 'getcanvas',
'getshapes', 'listen', 'mainloop', 'mode', 'numinput',
'onkey', 'onkeypress', 'onkeyrelease', 'onscreenclick', 'ontimer',
'register_shape', 'resetscreen', 'screensize', 'setup',
'setworldcoordinates', 'textinput', 'title', 'tracer', 'turtles', 'update',
'window_height', 'window_width']
_tg_turtle_functions = ['back', 'backward', 'begin_fill', 'begin_poly', 'bk',
'circle', 'clear', 'clearstamp', 'clearstamps', 'clone', 'color',
'degrees', 'distance', 'dot', 'down', 'end_fill', 'end_poly', 'fd',
'fillcolor', 'filling', 'forward', 'get_poly', 'getpen', 'getscreen', 'get_shapepoly',
'getturtle', 'goto', 'heading', 'hideturtle', 'home', 'ht', 'isdown',
'isvisible', 'left', 'lt', 'onclick', 'ondrag', 'onrelease', 'pd',
'pen', 'pencolor', 'pendown', 'pensize', 'penup', 'pos', 'position',
'pu', 'radians', 'right', 'reset', 'resizemode', 'rt',
'seth', 'setheading', 'setpos', 'setposition', 'settiltangle',
'setundobuffer', 'setx', 'sety', 'shape', 'shapesize', 'shapetransform', 'shearfactor', 'showturtle',
'speed', 'st', 'stamp', 'tilt', 'tiltangle', 'towards',
'turtlesize', 'undo', 'undobufferentries', 'up', 'width',
'write', 'xcor', 'ycor']
__all__ = (_tg_screen_functions + _tg_turtle_functions +
['done', 'restart', 'replay_scene', 'Turtle', 'Screen'])
## The following mechanism makes all methods of RawTurtle and Turtle available
## as functions. So we can enhance, change, add, delete methods to these
## classes and do not need to change anything here.
__func_body = """\
def {name}{paramslist}:
if {obj} is None:
{obj} = {init}
return {obj}.{name}{argslist}
"""
def _make_global_funcs(functions, cls, obj, init):
for methodname in functions:
try:
method = getattr(cls, methodname)
except AttributeError:
print("methodname missing:", methodname)
continue
pl1, pl2 = getmethparlist(method)
defstr = __func_body.format(obj=obj, init=init, name=methodname,
paramslist=pl1, argslist=pl2)
exec(defstr, globals())
_make_global_funcs(_tg_turtle_functions, Turtle, 'Turtle._pen', 'Turtle()')
_make_global_funcs(_tg_screen_functions, Screen, 'Turtle.screen', 'Screen()')
| jonathanverner/brython | www/src/Lib/turtle.py | Python | bsd-3-clause | 53,120 | 0.004085 |
#!/usr/bin/env python2
"""
:Synopsis: Specialized Class for installing essential components.
:Install-logs: All logs are saved in the directory **hackademic-logs**.
"""
import os
import sys
import subprocess
from logging_manager import LoggingManager
sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__),
'..')))
__author__ = 'AnirudhAnand (a0xnirudh) <[email protected]'
class Install:
def __init__(self):
self.user = os.environ['USER']
self.install = LoggingManager()
self.file_location = os.path.abspath(os.path.dirname(__file__))
self.pip_install_tools = self.file_location + "/pip.txt"
return
def run_command(self, command):
print "[+] Running the command: %s" % command
os.system(command)
def install_docker(self):
"""
This function will install docker and if docker is already installed,it
will skip the installation.
All logs during the install are saved to hackademic_logs/install.logs
"""
print("[+] Installing Docker and necessary supporting plugins")
print("[+] This could take some time. Please wait ...")
try:
subprocess.check_call("docker -v", stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, shell=True)
except (OSError, subprocess.CalledProcessError):
try:
subprocess.check_call("wget -qO- https://get.docker.com/ | sudo sh",
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, shell=True)
subprocess.check_call("sudo usermod -aG docker " + self.user,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, shell=True)
except (OSError, subprocess.CalledProcessError) as exception:
self.install.install_log('Installation Error: \n' +
str(exception))
exit("[+] Unknown error happened. Check logs for more details")
def docker_image(self):
"""
This will pull the latest ubuntu (~300 MB) from phusion/baseimage repo.
We are using the Ubuntu based image because we need the init script to
run since we are running more than 1 process or else it could become
zombie process. So an image which is configured with init is necessary.
"""
print("[+] Docker has successfully installed.")
print("[+] Now Pulling Image. This could take sometime. Please wait..")
try:
subprocess.check_call("docker pull phusion/baseimage:latest",
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, shell=True)
except (OSError, subprocess.CalledProcessError) as exception:
self.install.install_log('Image File Download Error: \n' +
str(exception))
exit("[+] Image Download Interrupted. Check logs for more details")
def build_docker(self):
"""
This will build the docker image with the specified Dockerfile which
adds all the challenges and install necessary applications for running
the challenges.
Once building is over, try the command " docker images " and if you can
see animage named 'Hackademic', installation is successful.
"""
print("[+] Building and Configuring Docker")
subprocess.call("docker rmi -f hackademic", stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, shell=True)
try:
subprocess.check_call("docker build -t hackademic "+self.file_location,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, shell=True)
except (OSError, subprocess.CalledProcessError) as exception:
self.install.install_log('Docker Build error: \n' + str(exception))
exit("[+] Docker Build Interrupted. Check logs for more details..")
def install_pip_tools(self):
print("[+] Installing additional requirements")
install_file = open(self.pip_install_tools, "r")
for i in install_file.readlines():
self.run_command("sudo -E pip install --upgrade " + i)
def install_finish(self):
print("[+] Installation is Successful. Happy hacking !")
def main():
docker = Install()
docker.install_docker()
docker.docker_image()
docker.build_docker()
docker.install_pip_tools()
docker.install_finish()
if __name__ == '__main__':
main()
| a0xnirudh/hackademic | ContainerManager/install.py | Python | gpl-3.0 | 4,749 | 0.000421 |
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import test
from telemetry.page import page
from telemetry.page import page_set
from telemetry.page import page_test
# pylint: disable=W0401,W0614
from telemetry.page.actions.all_page_actions import *
from webgl_conformance import WebglConformanceValidator
from webgl_conformance import conformance_harness_script
from webgl_conformance import conformance_path
robustness_harness_script = conformance_harness_script + r"""
var robustnessTestHarness = {};
robustnessTestHarness._contextLost = false;
robustnessTestHarness.initialize = function() {
var canvas = document.getElementById('example');
canvas.addEventListener('webglcontextlost', function() {
robustnessTestHarness._contextLost = true;
});
}
robustnessTestHarness.runTestLoop = function() {
// Run the test in a loop until the context is lost.
main();
if (!robustnessTestHarness._contextLost)
window.requestAnimationFrame(robustnessTestHarness.runTestLoop);
else
robustnessTestHarness.notifyFinished();
}
robustnessTestHarness.notifyFinished = function() {
// The test may fail in unpredictable ways depending on when the context is
// lost. We ignore such errors and only require that the browser doesn't
// crash.
webglTestHarness._allTestSucceeded = true;
// Notify test completion after a delay to make sure the browser is able to
// recover from the lost context.
setTimeout(webglTestHarness.notifyFinished, 3000);
}
window.confirm = function() {
robustnessTestHarness.initialize();
robustnessTestHarness.runTestLoop();
return false;
}
window.webglRobustnessTestHarness = robustnessTestHarness;
"""
class WebglRobustnessPage(page.Page):
def __init__(self, page_set, base_dir):
super(WebglRobustnessPage, self).__init__(
url='file://extra/lots-of-polys-example.html',
page_set=page_set,
base_dir=base_dir)
self.script_to_evaluate_on_commit = robustness_harness_script
def RunNavigateSteps(self, action_runner):
action_runner.NavigateToPage(self)
action_runner.WaitForJavaScriptCondition('webglTestHarness._finished')
class WebglRobustness(test.Test):
test = WebglConformanceValidator
def CreatePageSet(self, options):
ps = page_set.PageSet(
file_path=conformance_path,
user_agent_type='desktop',
serving_dirs=[''])
ps.AddPage(WebglRobustnessPage(ps, ps.base_dir))
return ps
| TeamEOS/external_chromium_org | content/test/gpu/gpu_tests/webgl_robustness.py | Python | bsd-3-clause | 2,603 | 0.002305 |
# -*- coding: utf-8 -*-
#
# This file is part of INSPIRE.
# Copyright (C) 2014, 2015 CERN.
#
# INSPIRE is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# INSPIRE is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with INSPIRE. If not, see <http://www.gnu.org/licenses/>.
#
# In applying this licence, CERN does not waive the privileges and immunities
# granted to it by virtue of its status as an Intergovernmental Organization
# or submit itself to any jurisdiction.
from invenio_workflows.definitions import WorkflowBase
class HarvestingWorkflowBase(WorkflowBase):
"""Base harvesting definition."""
@staticmethod
def get_title(bwo, **kwargs):
"""Return the value to put in the title column of HoldingPen."""
args = bwo.get_extra_data().get("args", {})
return "Summary of {0} harvesting from {1} to {2}".format(
args.get("workflow", "unknown"),
args.get("from_date", "unknown"),
args.get("to_date", "unknown"),
)
@staticmethod
def get_description(bwo, **kwargs):
"""Return the value to put in the title column of HoldingPen."""
return "No description. See log for details."
@staticmethod
def formatter(obj, **kwargs):
"""Format the object."""
return "No data. See log for details."
| jalavik/inspire-next | inspire/modules/harvester/definitions.py | Python | gpl-2.0 | 1,759 | 0 |
# Nix
# Copyright (c) 2017 Mark Biciunas.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
class Error(Exception):
"""Base class for exceptions in this module."""
pass
class NixError(Error):
def __init__(self, message: str, exception: Exception=None) -> None:
self._message = message
self._exception = exception
def get_message(self) -> str:
return self._message
def get_exception(self) -> Exception:
return self._exception
| mbiciunas/nix | src/utility/nix_error.py | Python | gpl-3.0 | 1,070 | 0.001869 |
import json
import unittest
from io import BytesIO
import yaml
from django.http import HttpRequest
from django.test import RequestFactory, SimpleTestCase
from django.utils.encoding import force_bytes
from .duf_test_case import DUFTestCase
from django_url_framework.decorators import auto, json_action, yaml_action
from django_url_framework.controller import ActionController
class TestController(DUFTestCase):
def test_default_renderer_template(self):
action_response = {'data':'foo'}
class TestTemplateRendererController(ActionController):
def test_action(self, request):
return action_response
response = self._request_and_test(TestTemplateRendererController, "test_action",
expected_response="HTML:{data}".format(**action_response))
self.assertEqual(response['Content-Type'],"text/html; charset=utf-8")
def test_template_renderer_adds_request_to_template_context(self):
action_response = {'data':'foo'}
class TestTemplateRendererAddsRequestController(ActionController):
def test_has_request(self, request):
return action_response
response = self._request_and_test(TestTemplateRendererAddsRequestController, "test_has_request",
expected_response="This template <WSGIRequest: GET '/test/json/'>")
self.assertEqual(response['Content-Type'],"text/html; charset=utf-8")
def test_auto_json_yaml_str(self):
expected = {'ab':"C",1:"2",None:False}
yaml_flow = True
def _run_test(accept, expect, **kwargs):
class TestTestController(ActionController):
yaml_default_flow_style=yaml_flow
@auto()
def test_action(self, request):
return expected
self._request_and_test(TestTestController, "test_action", expected_response=expect, HTTP_ACCEPT=accept)
_run_test("application/json", json.dumps(expected))
_run_test("application/yaml", yaml.dump(expected, default_flow_style=yaml_flow).strip())
yaml_flow = False
_run_test("application/yaml", yaml.dump(expected, default_flow_style=False), flow_style=yaml_flow)
_run_test("application/yaml, application/json", yaml.dump(expected, default_flow_style=False), flow_style=yaml_flow)
_run_test(["application/yaml","application/json"], yaml.dump(expected, default_flow_style=False), flow_style=yaml_flow)
_run_test("application/json, application/yaml", json.dumps(expected))
_run_test("text/plain", "{None: False, 1: '2', 'ab': 'C'}")
def test_auto_decorator_with_params(self):
expected = {'ab':"C",1:"2",None:False}
class TestDecoratorWithParamsController(ActionController):
@auto(yaml_default_flow_style=True)
def test_action(self, request):
return expected
self._request_and_test(TestDecoratorWithParamsController, "test_action",
HTTP_ACCEPT="application/yaml",
expected_response=yaml.dump(expected,default_flow_style=True))
def test_json_decorator(self):
expected = {'ab':"C",1:"2",None:False}
class TestJSONDecoratorController(ActionController):
@json_action()
def test_action(self, request):
return expected
self._request_and_test(TestJSONDecoratorController, "test_action", expected_response=json.dumps(expected))
def test_before_filter_redirect(self):
returned = {"foo":"bar"}
class TestPrintController(ActionController):
def _before_filter(self, request):
return self._go(to_url="/baz/")
@json_action()
def test_action(self, request):
return returned
response = self._request_and_test(TestPrintController, "test_action", status_code=302)
self.assertEqual(response['Location'], "/baz/")
def test_before_filter_none(self):
returned = {"foo":"bar"}
class TestPrintController(ActionController):
def _before_filter(self, request):
return None
@json_action()
def test_action(self, request):
return returned
self._request_and_test(TestPrintController, "test_action", expected_response=json.dumps(returned))
def test_before_filter_dict(self):
returned = {"foo":"bar"}
class TestPrintController(ActionController):
def _before_filter(self, request):
return {"add":123}
@json_action()
def test_action(self, request):
return returned
self._request_and_test(TestPrintController, "test_action", expected_response=json.dumps({"foo":"bar", "add":123}))
def test_print(self):
expected = [1,2,3,4,5]
def _run_test(input, expect, **kwargs):
class TestPrintController(ActionController):
def test_action(self, request):
return self._print(input)
self._request_and_test(TestPrintController, "test_action", expected_response=expect)
_run_test(expected, str(expected))
_run_test("Bajs", "Bajs")
_run_test({"a":"b"}, str({"a":"b"}))
def test_as_yaml(self):
input = {'ab':"C",1:"2",None:False}
class TestAsYamlController(ActionController):
def test_action(self, request):
return self._as_yaml(input, default_flow_style=True)
self._request_and_test(TestAsYamlController, "test_action", expected_response=yaml.dump(input, default_flow_style=True))
def test_as_json(self):
input = {'ab':"C",1:"2",None:False}
class TestAsJsonController(ActionController):
def test_action(self, request):
return self._as_json(input)
self._request_and_test(TestAsJsonController, "test_action", expected_response=json.dumps(input))
def test_redirect_action(self):
class RedirectController(ActionController):
@json_action()
def second_action(self, request):
return {}
def redirect(self, request):
return self._go(to_url="/temporary/")
def redirect_permanent(self, request):
return self._go(to_url="/permanent/", permanent=True)
rf = RequestFactory()
request = rf.get('/redirecting/')
controller = RedirectController(site=None, request=request, helper_class=None, url_params=None)
with self.subTest('302'):
response = controller._call_action('redirect')
self.assertEqual(response.status_code, 302)
self.assertEqual(response['Location'], "/temporary/")
with self.subTest('301'):
response = controller._call_action('redirect_permanent')
self.assertEqual(response.status_code, 301)
self.assertEqual(response['Location'], "/permanent/")
def test_yaml_decorator(self):
expected = {'ab':"C",1:"2",None:False}
class TestYamlDecoratorController(ActionController):
yaml_default_flow_style=True
@yaml_action()
def test_action(self, request):
return expected
self._request_and_test(TestYamlDecoratorController, "test_action", expected_response=yaml.dump(expected,default_flow_style=True))
def test_yaml_decorator_with_flow_style(self):
expected = {'ab':"C",1:"2",None:False}
class TestYamlWithFlowController(ActionController):
@yaml_action(default_flow_style=True)
def test_action(self, request):
return expected
self._request_and_test(TestYamlWithFlowController, "test_action", expected_response=yaml.dump(expected,default_flow_style=True))
def test_yaml_decorator_with_flow_style_false(self):
input = {'ab':"C",1:"2",None:False}
class TestYamlDecoWithFalseFlowController(ActionController):
@yaml_action(default_flow_style=False)
def test_action(self, request):
return input
self._request_and_test(TestYamlDecoWithFalseFlowController, "test_action", expected_response=yaml.dump(input,default_flow_style=False))
def test_after_filter(self):
input = {'ab':"C",1:"2",None:False}
after = {'c':'z'}
class TestAfterFilterController(ActionController):
def _after_filter(self, request):
return after
@json_action()
def test_action(self, request):
return input
copied = input.copy()
copied.update(after)
self._request_and_test(
TestAfterFilterController,
"test_action",
expected_response=json.dumps(copied)
)
def test_after_filter_can_access_context(self):
"""
This verifies that `_after_filter` is run, that it has access to the
context that was returned by an `action`, and that `_after_filter` can modify
the context before returning it to the client.
after_filter takes value `foo` from our dictionary,
and assigns it to key `bar`. It should also replace the original
`foo` value with `bazinga`
:return:
"""
input = {'foo':"123"}
class TestAfterFilterContextController(ActionController):
def _after_filter(self, request):
after = {
'bar': self._template_context['foo'],
'foo': 'bazinga'
}
return after
@json_action()
def test_action(self, request):
return input
self._request_and_test(
TestAfterFilterContextController,
"test_action",
expected_response=json.dumps({"foo":'bazinga',"bar":"123"}))
def test_tuple_response_status_code(self):
expected = "HAIHAIHAI"
class TupleController(ActionController):
@json_action()
def three_three(self, request):
return expected, 333
rf = RequestFactory()
request = rf.get('/three_three/')
controller = TupleController(site=None, request=request, helper_class=None, url_params=None)
response = controller._call_action('three_three')
self.assertEqual(response.status_code, 333)
self.assertEqual(response.content.decode('utf8'), json.dumps(expected))
def test_as_json_tuple_response_status_code(self):
expected = "HAIHAIHAI"
class TupleController(ActionController):
def three_three(self, request):
return self._as_json(expected), 333
self._request_and_test(TupleController, "three_three", json.dumps(expected), 333)
def test_as_json_param_response_status_code(self):
expected = "HAIHAIHAI"
class TupleController(ActionController):
def three_three(self, request):
return self._as_json(expected, status_code=333)
self._request_and_test(TupleController, "three_three", json.dumps(expected), 333)
def test_param_tuple_status_code(self):
expected = "HAIHAIHAI"
class TupleController(ActionController):
def three_three(self, request):
return self._print(expected),334
self._request_and_test(TupleController, "three_three", expected, 334)
def test_as_json_param_and_tuple_response_status_code(self):
expected = "HAIHAIHAI"
class TupleController(ActionController):
def three_three(self, request):
return self._as_json(expected, status_code=333), 444
self._request_and_test(TupleController, "three_three", json.dumps(expected), 333)
| zeraien/django-url-framework | tests/test_controller_action_renderers.py | Python | mit | 11,896 | 0.010592 |
#encoding: utf-8
from responsemodule import IModule
import urllib
import httplib2
import socket
import xmltodict
import re
'''
Example config
[walpha]
app_id=1LALAL-2LALA3LAL4
timeout=6
'''
settings_name_g = u'walpha'
api_key_g = u'app_id'
timeout_g = u'timeout'
class Wolfram(IModule):
def __init__(self):
self.settings = {api_key_g: None,
timeout_g: None}
self.shorten_url = None
self.last_response = u''
def uses_settings(self):
return True
def get_command_patterns(self):
return [('^\.(c|wa) (\S.*)', self.wolfAlpha)]
def get_settings_name(self):
return settings_name_g
def get_configuration(self):
return [(key, value) for (key, value) in self.settings.items() if not value is None]
def set_configuration(self, settings):
# fetch the settings
for key, value in settings.items():
if key in settings:
self.settings[key] = value
# signal when the module wasn't properly configured
try:
if self.settings[api_key_g] is None or self.settings[timeout_g] is None:
raise ValueError
self.settings[timeout_g] = int(self.settings[timeout_g])
except ValueError:
raise RuntimeError('Couldn\'t configure the module')
def set_url_shortener(self, shortener):
self.shorten_url = shortener.shorten_url
# Functions that are needed for queries
def wolfAlpha(self, nick, message, channel):
result = self.__fetch_wolfAlpha(message)
if result and result != self.last_response:
self.last_response = result
return result.encode('utf-8', errors='ignore')
def __fetch_wolfAlpha(self, message):
query = re.sub(r'^\.(c|wa)', u'', message.decode('utf-8'), re.UNICODE).encode('utf-8')
query = query.strip(' \t\n\r')
query = urllib.quote(query)
queryurl = u'http://api.wolframalpha.com/v2/query?input={q}&appid={key}&format=plaintext&parsetimeout={timeout}&formattimeout={timeout}'.format(
q = query,
key = self.settings[api_key_g],
timeout = self.settings[timeout_g])
# construct the url and shorten it if we can
url = u'http://www.wolframalpha.com/input/?i={q}'.format(q = query)
if not self.shorten_url is None:
url = self.shorten_url(url)
try:
sock = httplib2.Http(timeout=self.settings[timeout_g])
headers, xml_response = sock.request(queryurl)
if headers[u'status'] in (200, '200'):
response = xmltodict.parse(xml_response)
int_found = False
res_found = False
interpretation = u''
result = u''
# This statement throws an IndexError whenever wolframalpha cannot interpret the input
pods = response[u'queryresult'][u'pod']
if(response[u'queryresult'][u'@numpods'] == u'1'):
pods = [pods]
for pod in pods:
# Check if we can identify pods and they have information where we can fetch it
if u'@id' in pod.keys() and \
pod[u'@numsubpods'] == 1 and u'plaintext' in pod[u'subpod'].keys():
if pod[u'@id'] == u'Input':
interpretation = pod[u'subpod'][u'plaintext']
int_found = True
elif pod[u'@id'] == u'Result':
result = pod[u'subpod'][u'plaintext']
res_found = True
if int_found and res_found:
break
if not int_found:
interpretation = response[u'queryresult'][u'pod'][0][u'subpod'][u'plaintext']
if not res_found:
ismain = lambda d: u'@primary' in d.keys()
mainresult = filter(ismain, response[u'queryresult'][u'pod'])[0][u'subpod']
result = mainresult[u'plaintext']
return u'{inter}: {res} -- {link}'.format(
inter = interpretation,
res = result,
link = url)
except (socket.timeout, KeyError, TypeError, IndexError):
return u'{}'.format(url)
| undu/query-bot | wolfram/wolfram.py | Python | mpl-2.0 | 4,435 | 0.006764 |
from collections import namedtuple
knapsack_item = namedtuple("knapsack_item", ["weight", "value"])
max_weight = 15
items = [
knapsack_item(weight=2, value=2),
knapsack_item(weight=1, value=2),
knapsack_item(weight=12, value=4),
knapsack_item(weight=1, value=1),
knapsack_item(weight=4, value=10),
]
# max_weight * use n items -> optimal value
optimal_solution = [[0] * (len(items) + 1) for _ in range(max_weight + 1)]
for used_items in range(1, len(items) + 1):
for max_weight_index in range(1, max_weight + 1):
# we add the used_items'th + 1 item to the knapsack
item_to_add = items[used_items - 1]
if item_to_add.weight <= max_weight_index: # can we add this item?
# yes we can
optimal_value_with_new_item = item_to_add.value + optimal_solution[max_weight_index - item_to_add.weight][used_items - 1]
optimal_value_without_new_item = optimal_solution[max_weight_index][used_items - 1]
optimal_solution[max_weight_index][used_items] = max(optimal_value_with_new_item, optimal_value_without_new_item)
else:
# no it is too heavy
optimal_solution[max_weight_index][used_items] = optimal_solution[max_weight_index][used_items - 1]
# find items
items_to_take = []
value = optimal_solution[max_weight][len(items)]
current_max_weight = max_weight
current_items = len(items)
while value > 0:
while optimal_solution[current_max_weight][current_items] == value:
current_items -= 1
current_max_weight -= items[current_items].weight
value = optimal_solution[current_max_weight][current_items]
items_to_take.append(items[current_items])
print("items:")
for item in items: print(item)
print("max weight:", max_weight)
print("table: max_weight * use n items -> optimal value")
for row in optimal_solution: print(row)
print("optimal value:", optimal_solution[-1][-1])
print("items to take:")
for item in items_to_take: print(item)
| orion-42/numerics-physics-stuff | knapsack.py | Python | mit | 1,976 | 0.004049 |
# coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import unsmuggle_url
class JWPlatformIE(InfoExtractor):
_VALID_URL = r'(?:https?://(?:content\.jwplatform|cdn\.jwplayer)\.com/(?:(?:feed|player|thumb|preview)s|jw6|v2/media)/|jwplatform:)(?P<id>[a-zA-Z0-9]{8})'
_TESTS = [{
'url': 'http://content.jwplatform.com/players/nPripu9l-ALJ3XQCI.js',
'md5': 'fa8899fa601eb7c83a64e9d568bdf325',
'info_dict': {
'id': 'nPripu9l',
'ext': 'mov',
'title': 'Big Buck Bunny Trailer',
'description': 'Big Buck Bunny is a short animated film by the Blender Institute. It is made using free and open source software.',
'upload_date': '20081127',
'timestamp': 1227796140,
}
}, {
'url': 'https://cdn.jwplayer.com/players/nPripu9l-ALJ3XQCI.js',
'only_matching': True,
}]
@staticmethod
def _extract_url(webpage):
urls = JWPlatformIE._extract_urls(webpage)
return urls[0] if urls else None
@staticmethod
def _extract_urls(webpage):
return re.findall(
r'<(?:script|iframe)[^>]+?src=["\']((?:https?:)?//(?:content\.jwplatform|cdn\.jwplayer)\.com/players/[a-zA-Z0-9]{8})',
webpage)
def _real_extract(self, url):
url, smuggled_data = unsmuggle_url(url, {})
self._initialize_geo_bypass({
'countries': smuggled_data.get('geo_countries'),
})
video_id = self._match_id(url)
json_data = self._download_json('https://cdn.jwplayer.com/v2/media/' + video_id, video_id)
return self._parse_jwplayer_data(json_data, video_id)
| spvkgn/youtube-dl | youtube_dl/extractor/jwplatform.py | Python | unlicense | 1,720 | 0.002326 |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for MultitaskOptimizerWrapper."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import six
from tensorflow.contrib.opt.python.training import multitask_optimizer_wrapper
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.training import momentum
class MultitaskOptimizerWrapperTest(test.TestCase):
"""Tests for the multitask optimizer wrapper.
"""
def testWrapper(self):
with self.test_session():
var0 = variables.Variable([1.0, 2.0], dtype=dtypes.float32)
var1 = variables.Variable([3.0, 4.0], dtype=dtypes.float32)
grads0 = constant_op.constant([0.1, 0.1], dtype=dtypes.float32)
grads1 = constant_op.constant([0.01, 0.01], dtype=dtypes.float32)
grads_allzero = constant_op.constant([0.0, 0.0], dtype=dtypes.float32)
mom_opt_impl = momentum.MomentumOptimizer(learning_rate=2.0, momentum=0.9)
mom_opt = multitask_optimizer_wrapper.MultitaskOptimizerWrapper(
mom_opt_impl)
mom_update = mom_opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
mom_update_partial = mom_opt.apply_gradients(
zip([grads_allzero, grads1], [var0, var1]))
mom_update_no_action = mom_opt.apply_gradients(
zip([grads_allzero, grads_allzero], [var0, var1]))
self.evaluate(variables.global_variables_initializer())
# Fetch params to validate initial values
self.assertAllClose([1.0, 2.0], self.evaluate(var0))
self.assertAllClose([3.0, 4.0], self.evaluate(var1))
self.assertEqual(["momentum"], mom_opt.get_slot_names())
slot0 = mom_opt.get_slot(var0, "momentum")
self.assertEquals(slot0.get_shape(), var0.get_shape())
slot1 = mom_opt.get_slot(var1, "momentum")
self.assertEquals(slot1.get_shape(), var1.get_shape())
# Step 1: normal momentum update.
self.evaluate(mom_update)
# Check that the momentum accumulators have been updated.
self.assertAllCloseAccordingToType(
np.array([0.1, 0.1]), self.evaluate(slot0))
self.assertAllCloseAccordingToType(
np.array([0.01, 0.01]), self.evaluate(slot1))
# Check that the parameters have been updated.
self.assertAllCloseAccordingToType(
np.array([1.0 - (0.1 * 2.0), 2.0 - (0.1 * 2.0)]), self.evaluate(var0))
self.assertAllCloseAccordingToType(
np.array([3.0 - (0.01 * 2.0), 4.0 - (0.01 * 2.0)]),
self.evaluate(var1))
# Step 2: momentum update that changes only slot1 but not slot0.
self.evaluate(mom_update_partial)
# Check that only the relevant momentum accumulator has been updated.
self.assertAllCloseAccordingToType(
np.array([0.1, 0.1]), self.evaluate(slot0))
self.assertAllCloseAccordingToType(
np.array([(0.9 * 0.01 + 0.01), (0.9 * 0.01 + 0.01)]),
self.evaluate(slot1))
# Step 3: momentum update that does not change anything.
self.evaluate(mom_update_no_action)
# Check that the momentum accumulators have *NOT* been updated.
self.assertAllCloseAccordingToType(
np.array([0.1, 0.1]), self.evaluate(slot0))
self.assertAllCloseAccordingToType(
np.array([(0.9 * 0.01 + 0.01), (0.9 * 0.01 + 0.01)]),
self.evaluate(slot1))
def testGradientClipping(self):
with self.test_session():
var0 = variables.Variable([1.0, 2.0], dtype=dtypes.float32)
var1 = variables.Variable([3.0, 4.0], dtype=dtypes.float32)
var2 = variables.Variable([3.0, 4.0], dtype=dtypes.float32)
var3 = variables.Variable([3.0, 4.0], dtype=dtypes.float32)
grads0 = constant_op.constant([10.0, 15.0], dtype=dtypes.float32)
grads1 = constant_op.constant([0.0, 5.0], dtype=dtypes.float32)
grads2 = constant_op.constant([0.0, 0.0], dtype=dtypes.float32)
grads3 = None
varlist = [var0, var1, var2, var3]
gradients = [grads0, grads1, grads2, grads3]
clipped_gradvars, global_norm = (
multitask_optimizer_wrapper.clip_gradients_by_global_norm(
six.moves.zip(gradients, varlist), clip_norm=1.0))
clipped_grads = list(six.moves.zip(*clipped_gradvars))[0]
reference_global_norm = np.sqrt(np.sum(np.square([10.0, 15.0, 0.0, 5.0])))
self.assertAllCloseAccordingToType(
self.evaluate(global_norm), reference_global_norm)
self.assertAllCloseAccordingToType(
self.evaluate(clipped_grads[2]), np.array([0., 0.]))
self.assertEqual(clipped_grads[3], None)
if __name__ == "__main__":
test.main()
| drpngx/tensorflow | tensorflow/contrib/opt/python/training/multitask_optimizer_wrapper_test.py | Python | apache-2.0 | 5,436 | 0.011038 |
# Copyright (c) 2017-2019 Uber Technologies, Inc.
# SPDX-License-Identifier: Apache-2.0
import pyro.distributions.torch_patch # noqa F403
from pyro.distributions.torch import * # noqa F403
# isort: split
from pyro.distributions.affine_beta import AffineBeta
from pyro.distributions.asymmetriclaplace import (
AsymmetricLaplace,
SoftAsymmetricLaplace,
)
from pyro.distributions.avf_mvn import AVFMultivariateNormal
from pyro.distributions.coalescent import (
CoalescentRateLikelihood,
CoalescentTimes,
CoalescentTimesWithRate,
)
from pyro.distributions.conditional import (
ConditionalDistribution,
ConditionalTransform,
ConditionalTransformedDistribution,
ConditionalTransformModule,
)
from pyro.distributions.conjugate import (
BetaBinomial,
DirichletMultinomial,
GammaPoisson,
)
from pyro.distributions.delta import Delta
from pyro.distributions.diag_normal_mixture import MixtureOfDiagNormals
from pyro.distributions.diag_normal_mixture_shared_cov import (
MixtureOfDiagNormalsSharedCovariance,
)
from pyro.distributions.distribution import Distribution
from pyro.distributions.empirical import Empirical
from pyro.distributions.extended import ExtendedBetaBinomial, ExtendedBinomial
from pyro.distributions.folded import FoldedDistribution
from pyro.distributions.gaussian_scale_mixture import GaussianScaleMixture
from pyro.distributions.hmm import (
DiscreteHMM,
GammaGaussianHMM,
GaussianHMM,
GaussianMRF,
IndependentHMM,
LinearHMM,
)
from pyro.distributions.improper_uniform import ImproperUniform
from pyro.distributions.inverse_gamma import InverseGamma
from pyro.distributions.lkj import LKJ, LKJCorrCholesky
from pyro.distributions.logistic import Logistic, SkewLogistic
from pyro.distributions.mixture import MaskedMixture
from pyro.distributions.multivariate_studentt import MultivariateStudentT
from pyro.distributions.omt_mvn import OMTMultivariateNormal
from pyro.distributions.one_one_matching import OneOneMatching
from pyro.distributions.one_two_matching import OneTwoMatching
from pyro.distributions.ordered_logistic import OrderedLogistic
from pyro.distributions.polya_gamma import TruncatedPolyaGamma
from pyro.distributions.projected_normal import ProjectedNormal
from pyro.distributions.rejector import Rejector
from pyro.distributions.relaxed_straight_through import (
RelaxedBernoulliStraightThrough,
RelaxedOneHotCategoricalStraightThrough,
)
from pyro.distributions.sine_bivariate_von_mises import SineBivariateVonMises
from pyro.distributions.sine_skewed import SineSkewed
from pyro.distributions.softlaplace import SoftLaplace
from pyro.distributions.spanning_tree import SpanningTree
from pyro.distributions.stable import Stable
from pyro.distributions.torch import __all__ as torch_dists
from pyro.distributions.torch_distribution import (
ExpandedDistribution,
MaskedDistribution,
TorchDistribution,
)
from pyro.distributions.torch_transform import ComposeTransformModule, TransformModule
from pyro.distributions.unit import Unit
from pyro.distributions.util import (
enable_validation,
is_validation_enabled,
validation_enabled,
)
from pyro.distributions.von_mises_3d import VonMises3D
from pyro.distributions.zero_inflated import (
ZeroInflatedDistribution,
ZeroInflatedNegativeBinomial,
ZeroInflatedPoisson,
)
from . import constraints, kl, transforms
__all__ = [
"AffineBeta",
"AsymmetricLaplace",
"AVFMultivariateNormal",
"BetaBinomial",
"CoalescentRateLikelihood",
"CoalescentTimes",
"CoalescentTimesWithRate",
"ComposeTransformModule",
"ConditionalDistribution",
"ConditionalTransform",
"ConditionalTransformModule",
"ConditionalTransformedDistribution",
"Delta",
"DirichletMultinomial",
"DiscreteHMM",
"Distribution",
"Empirical",
"ExpandedDistribution",
"ExtendedBetaBinomial",
"ExtendedBinomial",
"FoldedDistribution",
"GammaGaussianHMM",
"GammaPoisson",
"GaussianHMM",
"GaussianMRF",
"GaussianScaleMixture",
"ImproperUniform",
"IndependentHMM",
"InverseGamma",
"LKJ",
"LKJCorrCholesky",
"LinearHMM",
"Logistic",
"MaskedDistribution",
"MaskedMixture",
"MixtureOfDiagNormals",
"MixtureOfDiagNormalsSharedCovariance",
"MultivariateStudentT",
"OMTMultivariateNormal",
"OneOneMatching",
"OneTwoMatching",
"OrderedLogistic",
"ProjectedNormal",
"Rejector",
"RelaxedBernoulliStraightThrough",
"RelaxedOneHotCategoricalStraightThrough",
"SineBivariateVonMises",
"SineSkewed",
"SkewLogistic",
"SoftLaplace",
"SoftAsymmetricLaplace",
"SpanningTree",
"Stable",
"TorchDistribution",
"TransformModule",
"TruncatedPolyaGamma",
"Unit",
"VonMises3D",
"ZeroInflatedDistribution",
"ZeroInflatedNegativeBinomial",
"ZeroInflatedPoisson",
"constraints",
"enable_validation",
"is_validation_enabled",
"kl",
"transforms",
"validation_enabled",
]
# Import all torch distributions from `pyro.distributions.torch_distribution`
__all__.extend(torch_dists)
del torch_dists
| uber/pyro | pyro/distributions/__init__.py | Python | apache-2.0 | 5,185 | 0.000193 |
"""packet.py: Raw packet object."""
import logging
import struct
from codecs import encode
from moteconnection.connection import Dispatcher
log = logging.getLogger(__name__)
__author__ = "Raido Pahtma"
__license__ = "MIT"
class Packet(object):
def __init__(self, dispatch=0):
self._dispatch = dispatch
self._payload = b""
self.callback = None
@property
def dispatch(self):
return self._dispatch
@dispatch.setter
def dispatch(self, dispatch):
self._dispatch = dispatch
@property
def payload(self):
return self._payload
@payload.setter
def payload(self, payload):
self._payload = payload
def serialize(self):
return struct.pack("! B", self._dispatch) + self._payload
def __str__(self):
return "[{0._dispatch:02X}]{1:s}".format(self, encode(self._payload, "hex").upper())
@staticmethod
def deserialize(data):
if len(data) == 0:
raise ValueError("At least 1 byte is required to deserialize a Packet!")
p = Packet(dispatch=ord(data[0:1]))
p.payload = data[1:]
return p
class PacketDispatcher(Dispatcher):
def __init__(self, dispatch):
super(PacketDispatcher, self).__init__(dispatch)
self._receiver = None
def send(self, packet):
packet.dispatch = self.dispatch
self._sender(packet)
def register_receiver(self, receiver):
self._receiver = receiver
def receive(self, data):
try:
p = Packet.deserialize(data)
if self._receiver is not None:
self._deliver(self._receiver, p)
except ValueError:
log.warning("Failed to deserialize packet {}".format(encode(data, "hex").upper()))
| proactivity-lab/python-moteconnection | moteconnection/packet.py | Python | mit | 1,781 | 0.001684 |
# -*- coding: utf-8 -*-
'''
The function cache system allows for data to be stored on the master so it can be easily read by other minions
'''
# Import python libs
import copy
import logging
# Import salt libs
import salt.crypt
import salt.payload
log = logging.getLogger(__name__)
def _auth():
'''
Return the auth object
'''
if 'auth' not in __context__:
__context__['auth'] = salt.crypt.SAuth(__opts__)
return __context__['auth']
def update(clear=False):
'''
Execute the configured functions and send the data back up to the master
The functions to be executed are merged from the master config, pillar and
minion config under the option "function_cache":
.. code-block:: yaml
mine_functions:
network.ip_addrs:
- eth0
disk.usage: []
The function cache will be populated with information from executing these
functions
CLI Example:
.. code-block:: bash
salt '*' mine.update
'''
m_data = __salt__['config.option']('mine_functions', {})
data = {}
for func in m_data:
if func not in __salt__:
log.error('Function {0} in mine_functions not available'
.format(func))
continue
try:
if m_data[func] and isinstance(m_data[func], dict):
data[func] = __salt__[func](**m_data[func])
elif m_data[func] and isinstance(m_data[func], list):
data[func] = __salt__[func](*m_data[func])
else:
data[func] = __salt__[func]()
except Exception:
log.error('Function {0} in mine_functions failed to execute'
.format(func))
continue
if __opts__['file_client'] == 'local':
if not clear:
old = __salt__['data.getval']('mine_cache')
if isinstance(old, dict):
old.update(data)
data = old
return __salt__['data.update']('mine_cache', data)
auth = _auth()
load = {
'cmd': '_mine',
'data': data,
'id': __opts__['id'],
'clear': clear,
'tok': auth.gen_token('salt'),
}
sreq = salt.payload.SREQ(__opts__['master_uri'])
ret = sreq.send('aes', auth.crypticle.dumps(load))
return auth.crypticle.loads(ret)
def send(func, *args, **kwargs):
'''
Send a specific function to the mine.
CLI Example:
.. code-block:: bash
salt '*' mine.send network.interfaces eth0
'''
if not func in __salt__:
return False
data = {}
arg_data = salt.utils.arg_lookup(__salt__[func])
func_data = copy.deepcopy(kwargs)
for ind, _ in enumerate(arg_data.get('args', [])):
try:
func_data[arg_data['args'][ind]] = args[ind]
except IndexError:
# Safe error, arg may be in kwargs
pass
f_call = salt.utils.format_call(__salt__[func], func_data)
try:
if 'kwargs' in f_call:
data[func] = __salt__[func](*f_call['args'], **f_call['kwargs'])
else:
data[func] = __salt__[func](*f_call['args'])
except Exception as exc:
log.error('Function {0} in mine.send failed to execute: {1}'
.format(func, exc))
return False
if __opts__['file_client'] == 'local':
old = __salt__['data.getval']('mine_cache')
if isinstance(old, dict):
old.update(data)
data = old
return __salt__['data.update']('mine_cache', data)
auth = _auth()
load = {
'cmd': '_mine',
'data': data,
'id': __opts__['id'],
'tok': auth.gen_token('salt'),
}
sreq = salt.payload.SREQ(__opts__['master_uri'])
ret = sreq.send('aes', auth.crypticle.dumps(load))
return auth.crypticle.loads(ret)
def get(tgt, fun, expr_form='glob'):
'''
Get data from the mine based on the target, function and expr_form
Targets can be matched based on any standard matching system that can be
matched on the master via these keywords::
glob
pcre
grain
grain_pcre
CLI Example:
.. code-block:: bash
salt '*' mine.get '*' network.interfaces
salt '*' mine.get 'os:Fedora' network.interfaces grain
'''
if expr_form.lower == 'pillar':
log.error('Pillar matching not supported on mine.get')
return ''
if __opts__['file_client'] == 'local':
ret = {}
is_target = {'glob': __salt__['match.glob'],
'pcre': __salt__['match.pcre'],
'list': __salt__['match.list'],
'grain': __salt__['match.grain'],
'grain_pcre': __salt__['match.grain_pcre'],
'compound': __salt__['match.compound'],
'ipcidr': __salt__['match.ipcidr'],
}[expr_form](tgt)
if is_target:
data = __salt__['data.getval']('mine_cache')
if isinstance(data, dict) and fun in data:
ret[__opts__['id']] = data[fun]
return ret
auth = _auth()
load = {
'cmd': '_mine_get',
'id': __opts__['id'],
'tgt': tgt,
'fun': fun,
'expr_form': expr_form,
'tok': auth.gen_token('salt'),
}
sreq = salt.payload.SREQ(__opts__['master_uri'])
ret = sreq.send('aes', auth.crypticle.dumps(load))
return auth.crypticle.loads(ret)
def delete(fun):
'''
Remove specific function contents of minion. Returns True on success.
CLI Example:
.. code-block:: bash
salt '*' mine.delete 'network.interfaces'
'''
if __opts__['file_client'] == 'local':
data = __salt__['data.getval']('mine_cache')
if isinstance(data, dict) and fun in data:
del data[fun]
return __salt__['data.update']('mine_cache', data)
auth = _auth()
load = {
'cmd': '_mine_delete',
'id': __opts__['id'],
'fun': fun,
'tok': auth.gen_token('salt'),
}
sreq = salt.payload.SREQ(__opts__['master_uri'])
ret = sreq.send('aes', auth.crypticle.dumps(load))
return auth.crypticle.loads(ret)
def flush():
'''
Remove all mine contents of minion. Returns True on success.
CLI Example:
.. code-block:: bash
salt '*' mine.flush
'''
if __opts__['file_client'] == 'local':
return __salt__['data.update']('mine_cache', {})
auth = _auth()
load = {
'cmd': '_mine_flush',
'id': __opts__['id'],
'tok': auth.gen_token('salt'),
}
sreq = salt.payload.SREQ(__opts__['master_uri'])
ret = sreq.send('aes', auth.crypticle.dumps(load))
return auth.crypticle.loads(ret)
| victorywang80/Maintenance | saltstack/src/salt/modules/mine.py | Python | apache-2.0 | 6,869 | 0.000291 |
from unittest import TestCase
class TestLoadUser(TestCase):
def test_find_user(self):
from backend import load_user
user = load_user('Neill', 'password')
self.assertIsNotNone(user)
self.assertEqual(user.password, "Password")
user = load_user("Tony")
self.assertIsNone(user)
| neillc/memberdb-ng | backend/tests/test_load_user.py | Python | gpl-2.0 | 330 | 0 |
from django.conf.urls import patterns, url
from polls import views
urlpatterns = patterns('',
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^(?P<pk>\d+)/$', views.DetailView.as_view(), name='detail'),
url(r'^(?P<pk>\d+)/results/$', views.ResultsView.as_view(), name='results'),
url(r'^(?P<question_id>\d+)/vote/$', views.vote, name='vote'),
)
| kwailamchan/programming-languages | python/django/polls/pollsite/polls/urls.py | Python | mit | 374 | 0.005348 |
from .ascendinggaugedstrategy import AscendingGaugedStrategy
from .gauge import AverageItemSize
class MinAverageItemSize(AscendingGaugedStrategy):
def _gauge(self, item):
return AverageItemSize(item)
| vialette/binreconfiguration | binreconfiguration/strategy/minaverageitemsize.py | Python | mit | 205 | 0.014634 |
"""
Backfill opportunity ids for Enterprise Coupons, Enterprise Offers and Manual Order Offers.
"""
import csv
import logging
from collections import Counter, defaultdict
from time import sleep
from uuid import UUID
from django.core.management import BaseCommand
from ecommerce.core.constants import COUPON_PRODUCT_CLASS_NAME
from ecommerce.extensions.offer.models import OFFER_PRIORITY_ENTERPRISE, OFFER_PRIORITY_MANUAL_ORDER
from ecommerce.programs.custom import get_model
ConditionalOffer = get_model('offer', 'ConditionalOffer')
Product = get_model('catalogue', 'Product')
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
class Command(BaseCommand):
"""
Backfill opportunity ids for Enterprise Coupons, Enterprise Offers and Manual Order Offers.
"""
def add_arguments(self, parser):
parser.add_argument(
'--data-csv',
action='store',
dest='data_csv',
default=None,
help='Path of csv to read enterprise uuids and opportunity ids.',
type=str,
)
parser.add_argument(
'--contract-type',
action='store',
dest='contract_type',
default='single',
choices=['single', 'multi'],
help='Specify type of backfilling',
type=str,
)
parser.add_argument(
'--batch-limit',
action='store',
dest='batch_limit',
default=100,
help='Number of records to be fetched in each batch of backfilling.',
type=int,
)
parser.add_argument(
'--batch-offset',
action='store',
dest='batch_offset',
default=0,
help='Which index to start batching from.',
type=int,
)
parser.add_argument(
'--batch-sleep',
action='store',
dest='batch_sleep',
default=10,
help='How long to sleep between batches.',
type=int,
)
def read_csv(self, csv_path):
data = {}
with open(csv_path) as csv_file:
reader = csv.DictReader(csv_file)
for row in reader:
data[UUID(row['enterprise_customer_uuid'])] = row['opportunity_id']
return data
def read_multi_contracts_csv(self, csv_path):
data = {
'coupons': defaultdict(list),
'offers': defaultdict(list),
'ec_uuids': defaultdict(list),
}
with open(csv_path) as csv_file:
reader = csv.DictReader(csv_file)
for row in reader:
if row['ORDER_LINE_OFFER_TYPE'] == 'Voucher':
data['coupons'][row['ORDER_LINE_COUPON_ID']].append(row['OPP_ID'])
elif row['ORDER_LINE_OFFER_TYPE'] in ('Site', 'User'):
data['offers'][row['ORDER_LINE_OFFER_ID']].append(row['OPP_ID'])
else:
data['ec_uuids'][UUID(row['ENTERPRISE_CUSTOMER_UUID'])].append(row['OPP_ID'])
# condition the data so that at the end we have only one opportunity id for each coupon/offer
for __, category_data in data.items():
for category_object_id, opportunity_ids in category_data.items():
if len(opportunity_ids) > 1:
most_common_opportunity_id, __ = Counter(opportunity_ids).most_common(1)[0]
category_data[category_object_id] = most_common_opportunity_id
else:
category_data[category_object_id] = opportunity_ids[0]
return data
def get_enterprise_coupons_batch(self, coupon_filter, start, end):
logger.info('Fetching new batch of enterprise coupons from indexes: %s to %s', start, end)
return Product.objects.filter(**coupon_filter)[start:end]
def get_enterprise_offers_batch(self, offer_filter, start, end):
return ConditionalOffer.objects.filter(**offer_filter)[start:end]
def _backfill_enterprise_coupons(self, data, options, coupon_filter):
batch_limit = options['batch_limit']
batch_sleep = options['batch_sleep']
batch_offset = options['batch_offset']
current_batch_index = batch_offset
logger.info('Started Backfilling Enterprise Coupons...')
coupons = self.get_enterprise_coupons_batch(coupon_filter, batch_offset, batch_offset + batch_limit)
while coupons:
for coupon in coupons:
opportunity_id = data.get(str(coupon.id)) or data.get(UUID(coupon.attr.enterprise_customer_uuid))
if getattr(coupon.attr, 'sales_force_id', None) is None and opportunity_id:
logger.info(
'Enterprise Coupon updated. CouponID: [%s], OpportunityID: [%s]',
coupon.id,
opportunity_id
)
coupon.attr.sales_force_id = opportunity_id
coupon.save()
sleep(batch_sleep)
current_batch_index += len(coupons)
coupons = self.get_enterprise_coupons_batch(
coupon_filter, current_batch_index, current_batch_index + batch_limit
)
logger.info('Backfilling for Enterprise Coupons finished.')
def _backfill_offers(self, data, options, offer_filter, log_prefix):
logger.info('[%s] Started Backfilling Offers...', log_prefix)
batch_limit = options['batch_limit']
batch_sleep = options['batch_sleep']
batch_offset = options['batch_offset']
current_batch_index = batch_offset
ent_offers = self.get_enterprise_offers_batch(offer_filter, batch_offset, batch_offset + batch_limit)
while ent_offers:
for ent_offer in ent_offers:
opportunity_id = data.get(str(ent_offer.id)) or data.get(ent_offer.condition.enterprise_customer_uuid)
if bool(ent_offer.sales_force_id) is False and opportunity_id:
logger.info(
'[%s] Offer updated. OfferID: [%s], OpportunityID: [%s]',
log_prefix,
ent_offer.id,
opportunity_id,
)
ent_offer.sales_force_id = opportunity_id
ent_offer.save()
sleep(batch_sleep)
current_batch_index += len(ent_offers)
ent_offers = self.get_enterprise_offers_batch(
offer_filter, current_batch_index, current_batch_index + batch_limit
)
logger.info('[%s] Backfilling for Offers finished.', log_prefix)
def handle(self, *args, **options):
if options['contract_type'] == 'single':
logger.info('Backfilling for single contracts.')
self.backfill_single_contracts(options)
elif options['contract_type'] == 'multi':
logger.info('Backfilling for multi contracts.')
self.backfill_multi_contracts(options)
def backfill_single_contracts(self, options):
data = self.read_csv(options['data_csv'])
self._backfill_enterprise_coupons(data, options, {
'product_class__name': COUPON_PRODUCT_CLASS_NAME,
'attributes__code': 'enterprise_customer_uuid',
'attribute_values__value_text__in': data.keys()
})
self._backfill_offers(data, options, {
'offer_type': ConditionalOffer.SITE,
'priority': OFFER_PRIORITY_ENTERPRISE,
'condition__enterprise_customer_uuid__in': data.keys(),
}, 'ENTERPRISE OFFER')
self._backfill_offers(data, options, {
'offer_type': ConditionalOffer.USER,
'priority': OFFER_PRIORITY_MANUAL_ORDER,
'condition__enterprise_customer_uuid__in': data.keys(),
}, 'ENTERPRISE MANUAL ORDER OFFER')
def backfill_multi_contracts(self, options):
data = self.read_multi_contracts_csv(options['data_csv'])
coupons_data = data['coupons']
self._backfill_enterprise_coupons(coupons_data, options, {
'product_class__name': COUPON_PRODUCT_CLASS_NAME,
'id__in': coupons_data.keys()
})
offers_data = data['offers']
self._backfill_offers(offers_data, options, {
'offer_type__in': (ConditionalOffer.SITE, ConditionalOffer.USER),
'priority__in': (OFFER_PRIORITY_ENTERPRISE, OFFER_PRIORITY_MANUAL_ORDER),
'id__in': offers_data.keys(),
}, 'ALL ENTERPRISE OFFERS')
# backfill coupons and offers missing both coupon id and offer id
ec_uuids = data['ec_uuids']
self._backfill_enterprise_coupons(ec_uuids, options, {
'product_class__name': COUPON_PRODUCT_CLASS_NAME,
'attributes__code': 'enterprise_customer_uuid',
'attribute_values__value_text__in': ec_uuids.keys()
})
self._backfill_offers(ec_uuids, options, {
'offer_type': ConditionalOffer.SITE,
'priority': OFFER_PRIORITY_ENTERPRISE,
'condition__enterprise_customer_uuid__in': ec_uuids.keys(),
}, 'ENTERPRISE OFFER')
self._backfill_offers(ec_uuids, options, {
'offer_type': ConditionalOffer.USER,
'priority': OFFER_PRIORITY_MANUAL_ORDER,
'condition__enterprise_customer_uuid__in': ec_uuids.keys(),
}, 'ENTERPRISE MANUAL ORDER OFFER')
| edx/ecommerce | ecommerce/enterprise/management/commands/backfill_opportunity_ids.py | Python | agpl-3.0 | 9,523 | 0.00231 |
#!/usr/bin/env python
#
###########################
# Sysusage info:
# 1) memory/swap usage
# 2) disk usage
# 3) load avarage
###########################
import re
import os
class MemoryUsage:
"""MEMORY USAGE STATISTICS:
memused - Total size of used memory in kilobytes.
memfree - Total size of free memory in kilobytes.
memusedper - Total size of used memory in percent.
memtotal - Total size of memory in kilobytes.
buffers - Total size of buffers used from memory in kilobytes.
cached - Total size of cached memory in kilobytes.
realfree - Total size of memory is real free
(memfree + buffers + cached).
realfreeper - Total size of memory is real free in
percent of total memory.
swapused - Total size of swap space is used is kilobytes.
swapfree - Total size of swap space is free in kilobytes.
swapusedper - Total size of swap space is used in percent.
swaptotal - Total size of swap space in kilobytes.
The following statistics are only available by kernels from 2.6.
slab - Total size of memory in kilobytes that used by
kernel for the data structure allocations.
dirty - Total size of memory pages in kilobytes that
waits to be written back to disk.
mapped - Total size of memory in kilbytes that is mapped
by devices or libraries with mmap.
writeback - Total size of memory that was written back to disk.
USAGE:
newObj = SysUsage.MemoryUsage() ## Creates an object
newObj.get_mem_usage() ## Gather statistics
"""
def __init__(self):
"""Constructor"""
self.meminfo = { }
file = '/proc/meminfo'
expr = re.compile(r'^(MemTotal|MemFree|Buffers|Cached|SwapTotal|SwapFree|Slab|Dirty|Mapped|Writeback):\s*(\d+)')
for line in open(file,"r"):
m = expr.search(line)
if m:
self.meminfo[m.group(1).lower()] = int(m.group(2))
def get_mem_usage(self):
"""The main function."""
output = self.meminfo
output['memused'] = int(output['memtotal'] - output['memfree'])
output['memusedper'] = int(100 * output['memused'] / output['memtotal'])
output['swapused'] = int(output['swaptotal'] - output['swapfree'])
output['realfree'] = int(output['memfree'] + output['buffers'] + output['cached'])
output['realfreeper'] = int(100 * output['realfree'] / output['memtotal'])
# In case no swap space on the machine
if not output['swaptotal']:
output['swapusedper'] = 0
else:
output['swapusedper'] = int(100 * output['swapused'] / output['swaptotal'])
return output
class DiskUsage:
"""DiskUsage class.
Exported methods:
disk_usage()
DISK USAGE STATISTICS:
total - The total size of the disk.
usage - The used disk space in kilobytes.
free - The free disk space in kilobytes.
usageper - The used disk space in percent.
mountpoint - The moint point of the disk.
USAGE:
newObj = SysUsage.DiskUsage() ## Creates an object.
newObj.disk_usage() ## Gather statistics
"""
def __init__(self,cmd=None):
"""Constructor"""
if cmd == None:
self.cmd = '/bin/df'
else:
self.cmd = cmd
self.stats = {}
self.stats['diskusage'] = {}
def get_disk_usage(self):
"""The main function.Gathers disk usage information"""
p = os.popen(self.cmd)
for line in p.readlines():
m = re.search(r'(\d+)\%\s+(.+)$',line)
if m:
self.stats['diskusage'][m.group(2)] = int(m.group(1))
return self.stats
class LoadAVG:
"""LoadAVG class.
Exported methods:
get_load_avg()
LOAD AVARAGE STATISTICS:
avg_1 - The average processor workload
of the last minute.
avg_5 - The average processor workload
of the last five minutes.
avg_15 - The average processor workload
of the last fifteen minutes.
USAGE:
newObj = LoadAVG() ## Creates an object
newObj.get_load_avg() ## Gather statistics
"""
def __init__(self):
"""Constructor"""
self.file = '/proc/loadavg'
self.tuple = ('avg_1','avg_5','avg_15')
self.elems = []
self.stats = {}
def get_load_avg(self):
"""The main function.Gathers load avarage statistics"""
try:
f = open(self.file,'r')
except IOError:
print "Unable to open the file %s" % (self.file)
sys.exit(1)
self.stats['load_avarage'] = {}
for x in map(float,f.read().split(" ")[0:3]):
x = self._format("%.2f",x)
self.elems.append(x)
self.stats['load_avarage'] = dict(zip(self.tuple,self.elems))
return self.stats
def _format(self,format, *arg):
return format % (arg)
if __name__ == '__main__':
o = LoadAVG()
print o.get_load_avg()
m = MemoryUsage()
usage = m.get_mem_usage()
print usage
du = DiskUsage()
print du.get_disk_usage()
| dorzheh/sysstat | SysUsage.py | Python | mit | 5,469 | 0.017188 |
# Unit tests for the boundary shape class and the boundary sphere class.
import unittest
import numpy as N
from tracer.spatial_geometry import generate_transform, rotx
from tracer.boundary_shape import *
class TestInBounds(unittest.TestCase):
def setUp(self):
self.points = N.array([
[0.,0.,0.],
[1.,1.,1.],
[2.,2.,2.]])
def test_sphere(self):
"""Sphere bounding volume"""
sphere = BoundarySphere(radius=2.)
N.testing.assert_array_equal(sphere.in_bounds(self.points), [True, True, False])
def test_cylinder(self):
"""Cylinder bounding volume"""
cyl = BoundaryCylinder(diameter=3.)
N.testing.assert_array_equal(cyl.in_bounds(self.points), [True, True, False])
def test_plane(self):
"""Translated plane section of a volume"""
plane = BoundaryPlane(rotation=rotx(-N.pi/6)[:3,:3], location=N.r_[0., 1., 0.])
N.testing.assert_array_equal(plane.in_bounds(self.points), [False, True, True])
class TestSphereBoundingRect(unittest.TestCase):
def setUp(self):
# Create some surfaces to intersect with spheres.
self.at_origin_xy = N.eye(4)
self.at_origin_yz = generate_transform(N.r_[0, 1, 0], N.pi/2, \
N.c_[[0,0,0]])
self.at_origin_slant = generate_transform(N.r_[0, 1, 0], N.pi/4, \
N.c_[[0,0,0]])
self.parallel_xy = generate_transform(N.r_[1, 0, 0], 0, N.c_[[0, 0, 1]])
self.parallel_yz = self.at_origin_yz.copy()
self.parallel_yz[0,3] += 1
self.parallel_slanted = self.at_origin_slant.copy()
self.parallel_slanted[[0,2],3] += N.sqrt(0.5)
def test_sphere_at_origin(self):
"""For a bounding sphere at the origin, the right bounding rects are returned"""
sphere = BoundarySphere(radius=2.)
extents = sphere.bounding_rect_for_plane(self.at_origin_xy)
self.failUnlessEqual(extents, (-2., 2., -2., 2.))
extents = sphere.bounding_rect_for_plane(self.at_origin_yz)
self.failUnlessEqual(extents, (-2., 2., -2., 2.))
extents = sphere.bounding_rect_for_plane(self.at_origin_slant)
self.failUnlessEqual(extents, (-2., 2., -2., 2.))
sqrt_3 = N.sqrt(3)
extents = sphere.bounding_rect_for_plane(self.parallel_xy)
self.failUnlessEqual(extents, (-sqrt_3, sqrt_3, -sqrt_3, sqrt_3))
extents = sphere.bounding_rect_for_plane(self.parallel_yz)
self.failUnlessEqual(extents, (-sqrt_3, sqrt_3, -sqrt_3, sqrt_3))
extents = sphere.bounding_rect_for_plane(self.parallel_slanted)
N.testing.assert_array_almost_equal(extents, \
(-sqrt_3, sqrt_3, -sqrt_3, sqrt_3))
def test_sphere_moved(self):
"""For a bounding sphere at 1,0,0 the right bounding rects are returned"""
sphere = BoundarySphere(radius=2., location=N.r_[1,0,0])
extents = sphere.bounding_rect_for_plane(self.at_origin_xy)
self.failUnlessEqual(extents, (-1., 3., -2., 2.))
sqrt_3 = N.sqrt(3)
extents = sphere.bounding_rect_for_plane(self.at_origin_yz)
self.failUnlessEqual(extents, (-sqrt_3, sqrt_3, -sqrt_3, sqrt_3))
sqrt_h = N.sqrt(0.5)
sqrt_35 = N.sqrt(3.5)
extents = sphere.bounding_rect_for_plane(self.at_origin_slant)
self.failUnlessEqual(extents, \
(sqrt_h - sqrt_35, sqrt_h + sqrt_35, -sqrt_35, sqrt_35))
extents = sphere.bounding_rect_for_plane(self.parallel_xy)
self.failUnlessEqual(extents, (1 - sqrt_3, 1 + sqrt_3, -sqrt_3, sqrt_3))
extents = sphere.bounding_rect_for_plane(self.parallel_yz)
self.failUnlessEqual(extents, (-2., 2., -2., 2.))
Reff = N.sqrt(4 - (1 - N.sqrt(0.5))**2)
extents = sphere.bounding_rect_for_plane(self.parallel_slanted)
N.testing.assert_array_almost_equal(extents, \
(sqrt_h - Reff, sqrt_h + Reff, -Reff, Reff))
| yosefm/tracer | tests/test_boundary_surface.py | Python | gpl-3.0 | 4,024 | 0.012177 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os, MySQLdb, csv
table_sql = """drop table if exists hotel;
create table hotel(
id int primary key,
Name varchar(255),
CardNo varchar(255),
Descriot varchar(255),
CtfTp varchar(255),
CtfId varchar(255),
Gender varchar(255),
Birthday varchar(255),
Address varchar(255),
Zip varchar(255),
Dirty varchar(255),
District1 varchar(255),
District2 varchar(255),
District3 varchar(255),
District4 varchar(255),
District5 varchar(255),
District6 varchar(255),
FirstNm varchar(255),
LastNm varchar(255),
Duty varchar(255),
Mobile varchar(255),
Tel varchar(255),
Fax varchar(255),
EMail varchar(255),
Nation varchar(255),
Taste varchar(255),
Education varchar(255),
Company varchar(255),
CTel varchar(255),
CAddress varchar(255),
CZip varchar(255),
Family varchar(255),
Version varchar(255)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;"""
def main():
db = MySQLdb.connect(user='root', passwd='oseasy_db', db='test', use_unicode=True)
insert_sql = "insert into hotel('Name', 'CardNo', 'Descriot', 'CtfTp', 'CtfId', 'Gender', 'Birthday', 'Address', 'Zip', 'Dirty', 'District1', 'District2', 'District3', 'District4', 'District5', 'District6', 'FirstNm', 'LastNm', 'Duty', 'Mobile', 'Tel', 'Fax', 'EMail', 'Nation', 'Taste', 'Education', 'Company', 'CTel', 'CAddress', 'CZip', 'Family', 'Version', 'id') values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
cursor = db.cursor()
cursor.execute(table_sql)
#db.commit()
cursor = db.cursor()
reader = csv.reader(open('D:\\wanquanban.csv'))
reader.next() #ignore first row
for row in reader:
for i, val in enumerate(row):
if isinstance(val, basestring):
row[i] = val.decode('utf-8')
row[-1] = int(row[-1])
cursor.execute(insert_sql, row)
db.commit()
db.close()
if __name__ == "__main__":
main() | aaronzhang1990/workshare | test/python/restore_hotel_2000w_data.py | Python | gpl-2.0 | 2,060 | 0.004369 |
"""umongo fields"""
import collections
import datetime as dt
from bson import DBRef, ObjectId, Decimal128
import marshmallow as ma
# from .registerer import retrieve_document
from .document import DocumentImplementation
from .exceptions import NotRegisteredDocumentError, DocumentDefinitionError
from .template import get_template
from .data_objects import Reference, List, Dict
from . import marshmallow_bonus as ma_bonus_fields
from .abstract import BaseField, I18nErrorDict
from .i18n import gettext as _
__all__ = (
# 'RawField',
# 'MappingField',
# 'TupleField',
'StringField',
'UUIDField',
'NumberField',
'IntegerField',
'DecimalField',
'BooleanField',
'FloatField',
'DateTimeField',
'NaiveDateTimeField',
'AwareDateTimeField',
# 'TimeField',
'DateField',
# 'TimeDeltaField',
'UrlField',
'URLField',
'EmailField',
'StrField',
'BoolField',
'IntField',
'DictField',
'ListField',
'ConstantField',
# 'PluckField'
'ObjectIdField',
'ReferenceField',
'GenericReferenceField',
'EmbeddedField'
)
# Republish supported marshmallow fields
# class RawField(BaseField, ma.fields.Raw):
# pass
class StringField(BaseField, ma.fields.String):
pass
class UUIDField(BaseField, ma.fields.UUID):
pass
class NumberField(BaseField, ma.fields.Number):
pass
class IntegerField(BaseField, ma.fields.Integer):
pass
class DecimalField(BaseField, ma.fields.Decimal):
def _serialize_to_mongo(self, obj):
return Decimal128(obj)
def _deserialize_from_mongo(self, value):
return value.to_decimal()
class BooleanField(BaseField, ma.fields.Boolean):
pass
class FloatField(BaseField, ma.fields.Float):
pass
def _round_to_millisecond(datetime):
"""Round a datetime to millisecond precision
MongoDB stores datetimes with a millisecond precision.
For consistency, use the same precision in the object representation.
"""
microseconds = round(datetime.microsecond, -3)
if microseconds == 1000000:
return datetime.replace(microsecond=0) + dt.timedelta(seconds=1)
return datetime.replace(microsecond=microseconds)
class DateTimeField(BaseField, ma.fields.DateTime):
def _deserialize(self, value, attr, data, **kwargs):
if isinstance(value, dt.datetime):
ret = value
else:
ret = super()._deserialize(value, attr, data, **kwargs)
return _round_to_millisecond(ret)
class NaiveDateTimeField(BaseField, ma.fields.NaiveDateTime):
def _deserialize(self, value, attr, data, **kwargs):
if isinstance(value, dt.datetime):
ret = value
else:
ret = super()._deserialize(value, attr, data, **kwargs)
return _round_to_millisecond(ret)
class AwareDateTimeField(BaseField, ma.fields.AwareDateTime):
def _deserialize(self, value, attr, data, **kwargs):
if isinstance(value, dt.datetime):
ret = value
else:
ret = super()._deserialize(value, attr, data, **kwargs)
return _round_to_millisecond(ret)
def _deserialize_from_mongo(self, value):
value = value.replace(tzinfo=dt.timezone.utc)
if self.default_timezone is not None:
value = value.astimezone(self.default_timezone)
return value
# class TimeField(BaseField, ma.fields.Time):
# pass
class DateField(BaseField, ma.fields.Date):
"""This field converts a date to a datetime to store it as a BSON Date"""
def _deserialize(self, value, attr, data, **kwargs):
if isinstance(value, dt.date):
return value
return super()._deserialize(value, attr, data)
def _serialize_to_mongo(self, obj):
return dt.datetime(obj.year, obj.month, obj.day)
def _deserialize_from_mongo(self, value):
return value.date()
# class TimeDeltaField(BaseField, ma.fields.TimeDelta):
# pass
class UrlField(BaseField, ma.fields.Url):
pass
class EmailField(BaseField, ma.fields.Email):
pass
class ConstantField(BaseField, ma.fields.Constant):
pass
class DictField(BaseField, ma.fields.Dict):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def cast_value_or_callable(key_field, value_field, value):
if value is ma.missing:
return ma.missing
if callable(value):
return lambda: Dict(key_field, value_field, value())
return Dict(key_field, value_field, value)
self.default = cast_value_or_callable(self.key_field, self.value_field, self.default)
self.missing = cast_value_or_callable(self.key_field, self.value_field, self.missing)
def _deserialize(self, value, attr, data, **kwargs):
value = super()._deserialize(value, attr, data, **kwargs)
return Dict(self.key_field, self.value_field, value)
def _serialize_to_mongo(self, obj):
if obj is None:
return ma.missing
return {
self.key_field.serialize_to_mongo(k) if self.key_field else k:
self.value_field.serialize_to_mongo(v) if self.value_field else v
for k, v in obj.items()
}
def _deserialize_from_mongo(self, value):
if value:
return Dict(
self.key_field,
self.value_field,
{
self.key_field.deserialize_from_mongo(k) if self.key_field else k:
self.value_field.deserialize_from_mongo(v) if self.value_field else v
for k, v in value.items()
}
)
return Dict(self.key_field, self.value_field)
def as_marshmallow_field(self):
field_kwargs = self._extract_marshmallow_field_params()
if self.value_field:
inner_ma_field = self.value_field.as_marshmallow_field()
else:
inner_ma_field = None
m_field = ma.fields.Dict(
self.key_field, inner_ma_field, metadata=self.metadata, **field_kwargs)
m_field.error_messages = I18nErrorDict(m_field.error_messages)
return m_field
def _required_validate(self, value):
if not hasattr(self.value_field, '_required_validate'):
return
required_validate = self.value_field._required_validate
errors = collections.defaultdict(dict)
for key, val in value.items():
try:
required_validate(val)
except ma.ValidationError as exc:
errors[key]["value"] = exc.messages
if errors:
raise ma.ValidationError(errors)
class ListField(BaseField, ma.fields.List):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def cast_value_or_callable(inner, value):
if value is ma.missing:
return ma.missing
if callable(value):
return lambda: List(inner, value())
return List(inner, value)
self.default = cast_value_or_callable(self.inner, self.default)
self.missing = cast_value_or_callable(self.inner, self.missing)
def _deserialize(self, value, attr, data, **kwargs):
value = super()._deserialize(value, attr, data, **kwargs)
return List(self.inner, value)
def _serialize_to_mongo(self, obj):
if obj is None:
return ma.missing
return [self.inner.serialize_to_mongo(each) for each in obj]
def _deserialize_from_mongo(self, value):
if value:
return List(
self.inner,
[self.inner.deserialize_from_mongo(each) for each in value]
)
return List(self.inner)
def map_to_field(self, mongo_path, path, func):
"""Apply a function to every field in the schema
"""
func(mongo_path, path, self.inner)
if hasattr(self.inner, 'map_to_field'):
self.inner.map_to_field(mongo_path, path, func)
def as_marshmallow_field(self):
field_kwargs = self._extract_marshmallow_field_params()
inner_ma_field = self.inner.as_marshmallow_field()
m_field = ma.fields.List(inner_ma_field, metadata=self.metadata, **field_kwargs)
m_field.error_messages = I18nErrorDict(m_field.error_messages)
return m_field
def _required_validate(self, value):
if not hasattr(self.inner, '_required_validate'):
return
required_validate = self.inner._required_validate
errors = {}
for i, sub_value in enumerate(value):
try:
required_validate(sub_value)
except ma.ValidationError as exc:
errors[i] = exc.messages
if errors:
raise ma.ValidationError(errors)
# Aliases
URLField = UrlField
StrField = StringField
BoolField = BooleanField
IntField = IntegerField
class ObjectIdField(BaseField, ma_bonus_fields.ObjectId):
pass
class ReferenceField(BaseField, ma_bonus_fields.Reference):
def __init__(self, document, *args, reference_cls=Reference, **kwargs):
"""
:param document: Can be a :class:`umongo.embedded_document.DocumentTemplate`,
another instance's :class:`umongo.embedded_document.DocumentImplementation` or
the embedded document class name.
.. warning:: The referenced document's _id must be an `ObjectId`.
"""
super().__init__(*args, **kwargs)
# TODO : check document_cls is implementation or string
self.reference_cls = reference_cls
# Can be the Template, Template's name or another Implementation
if not isinstance(document, str):
self.document = get_template(document)
else:
self.document = document
self._document_cls = None
self._document_implementation_cls = DocumentImplementation
@property
def document_cls(self):
"""
Return the instance's :class:`umongo.embedded_document.DocumentImplementation`
implementing the `document` attribute.
"""
if not self._document_cls:
self._document_cls = self.instance.retrieve_document(self.document)
return self._document_cls
def _deserialize(self, value, attr, data, **kwargs):
if value is None:
return None
if isinstance(value, DBRef):
if self.document_cls.collection.name != value.collection:
raise ma.ValidationError(_("DBRef must be on collection `{collection}`.").format(
self.document_cls.collection.name))
value = value.id
elif isinstance(value, Reference):
if value.document_cls != self.document_cls:
raise ma.ValidationError(_("`{document}` reference expected.").format(
document=self.document_cls.__name__))
if not isinstance(value, self.reference_cls):
value = self.reference_cls(value.document_cls, value.pk)
return value
elif isinstance(value, self.document_cls):
if not value.is_created:
raise ma.ValidationError(
_("Cannot reference a document that has not been created yet."))
value = value.pk
elif isinstance(value, self._document_implementation_cls):
raise ma.ValidationError(_("`{document}` reference expected.").format(
document=self.document_cls.__name__))
value = super()._deserialize(value, attr, data, **kwargs)
return self.reference_cls(self.document_cls, value)
def _serialize_to_mongo(self, obj):
return obj.pk
def _deserialize_from_mongo(self, value):
return self.reference_cls(self.document_cls, value)
class GenericReferenceField(BaseField, ma_bonus_fields.GenericReference):
def __init__(self, *args, reference_cls=Reference, **kwargs):
super().__init__(*args, **kwargs)
self.reference_cls = reference_cls
self._document_implementation_cls = DocumentImplementation
def _document_cls(self, class_name):
try:
return self.instance.retrieve_document(class_name)
except NotRegisteredDocumentError:
raise ma.ValidationError(_('Unknown document `{document}`.').format(
document=class_name))
def _serialize(self, value, attr, obj):
if value is None:
return None
return {'id': str(value.pk), 'cls': value.document_cls.__name__}
def _deserialize(self, value, attr, data, **kwargs):
if value is None:
return None
if isinstance(value, Reference):
if not isinstance(value, self.reference_cls):
value = self.reference_cls(value.document_cls, value.pk)
return value
if isinstance(value, self._document_implementation_cls):
if not value.is_created:
raise ma.ValidationError(
_("Cannot reference a document that has not been created yet."))
return self.reference_cls(value.__class__, value.pk)
if isinstance(value, dict):
if value.keys() != {'cls', 'id'}:
raise ma.ValidationError(_("Generic reference must have `id` and `cls` fields."))
try:
_id = ObjectId(value['id'])
except ValueError:
raise ma.ValidationError(_("Invalid `id` field."))
document_cls = self._document_cls(value['cls'])
return self.reference_cls(document_cls, _id)
raise ma.ValidationError(_("Invalid value for generic reference field."))
def _serialize_to_mongo(self, obj):
return {'_id': obj.pk, '_cls': obj.document_cls.__name__}
def _deserialize_from_mongo(self, value):
document_cls = self._document_cls(value['_cls'])
return self.reference_cls(document_cls, value['_id'])
class EmbeddedField(BaseField, ma.fields.Nested):
def __init__(self, embedded_document, *args, **kwargs):
"""
:param embedded_document: Can be a
:class:`umongo.embedded_document.EmbeddedDocumentTemplate`,
another instance's :class:`umongo.embedded_document.EmbeddedDocumentImplementation`
or the embedded document class name.
"""
# Don't need to pass `nested` attribute given it is overloaded
super().__init__(None, *args, **kwargs)
# Try to retrieve the template if possible for consistency
if not isinstance(embedded_document, str):
self.embedded_document = get_template(embedded_document)
else:
self.embedded_document = embedded_document
self._embedded_document_cls = None
@property
def nested(self):
# Overload `nested` attribute to be able to fetch it lazily
return self.embedded_document_cls.Schema
@nested.setter
def nested(self, value):
pass
@property
def embedded_document_cls(self):
"""
Return the instance's :class:`umongo.embedded_document.EmbeddedDocumentImplementation`
implementing the `embedded_document` attribute.
"""
if not self._embedded_document_cls:
embedded_document_cls = self.instance.retrieve_embedded_document(self.embedded_document)
if embedded_document_cls.opts.abstract:
raise DocumentDefinitionError(
"EmbeddedField doesn't accept abstract embedded document"
)
self._embedded_document_cls = embedded_document_cls
return self._embedded_document_cls
def _serialize(self, value, attr, obj):
if value is None:
return None
return value.dump()
def _deserialize(self, value, attr, data, **kwargs):
embedded_document_cls = self.embedded_document_cls
if isinstance(value, embedded_document_cls):
return value
if not isinstance(value, dict):
raise ma.ValidationError({'_schema': ['Invalid input type.']})
# Handle inheritance deserialization here using `cls` field as hint
if embedded_document_cls.opts.offspring and 'cls' in value:
to_use_cls_name = value.pop('cls')
if not any(o for o in embedded_document_cls.opts.offspring
if o.__name__ == to_use_cls_name):
raise ma.ValidationError(_('Unknown document `{document}`.').format(
document=to_use_cls_name))
try:
to_use_cls = embedded_document_cls.opts.instance.retrieve_embedded_document(
to_use_cls_name)
except NotRegisteredDocumentError as exc:
raise ma.ValidationError(str(exc))
return to_use_cls(**value)
return embedded_document_cls(**value)
def _serialize_to_mongo(self, obj):
return obj.to_mongo()
def _deserialize_from_mongo(self, value):
return self.embedded_document_cls.build_from_mongo(value)
def _validate_missing(self, value):
# Overload default to handle recursive check
super()._validate_missing(value)
errors = {}
if value is ma.missing:
def get_sub_value(_):
return ma.missing
elif isinstance(value, dict):
# value is a dict for deserialization
def get_sub_value(key):
return value.get(key, ma.missing)
elif isinstance(value, self.embedded_document_cls):
# value is a valid EmbeddedDocument
def get_sub_value(key):
return value._data.get(key)
else:
# value is invalid, just return and let `_deserialize`
# raises an error about this
return
for name, field in self.embedded_document_cls.schema.fields.items():
sub_value = get_sub_value(name)
# `_validate_missing` doesn't check for required fields here, so we
# can safely skip missing values
if sub_value is ma.missing:
continue
try:
field._validate_missing(sub_value)
except ma.ValidationError as exc:
errors[name] = exc.messages
if errors:
raise ma.ValidationError(errors)
def map_to_field(self, mongo_path, path, func):
"""Apply a function to every field in the schema"""
for name, field in self.embedded_document_cls.schema.fields.items():
cur_path = '%s.%s' % (path, name)
cur_mongo_path = '%s.%s' % (mongo_path, field.attribute or name)
func(cur_mongo_path, cur_path, field)
if hasattr(field, 'map_to_field'):
field.map_to_field(cur_mongo_path, cur_path, func)
def as_marshmallow_field(self):
# Overwrite default `as_marshmallow_field` to handle nesting
field_kwargs = self._extract_marshmallow_field_params()
nested_ma_schema = self.embedded_document_cls.schema.as_marshmallow_schema()
m_field = ma.fields.Nested(nested_ma_schema, metadata=self.metadata, **field_kwargs)
m_field.error_messages = I18nErrorDict(m_field.error_messages)
return m_field
def _required_validate(self, value):
value.required_validate()
| Scille/umongo | umongo/fields.py | Python | mit | 19,389 | 0.001238 |
import pygame
from pygame.locals import *
from math import sin
import states
class EndEvent(object):
text = [
"Ah, hello there. Welcome to the center of the moon!",
"Oh, me? I'm just the man in the moon. I live here.",
"Don't act so shocked! It's rude you know.",
"I don't get a lot of visitors down here, what with the moon rabbits.",
"How did you befriend them? . . . You did befriend them, didn't you?",
"I really don't want to have to clean up another set of blood stains.",
"Hey, I think I hear them coming. They must really like you!"
]
texture = None
font = None
def __init__(self, pos):
self.pos = pos
self.start_time = None
self.time = 0.0
self.fade = None
def update(self, delta, pos, player_pos):
self.time += delta
pos = (pos[0] + self.pos[0], pos[1] + self.pos[1])
distance = abs(player_pos[0] - pos[0]) + abs(player_pos[1] - pos[1])
if not self.start_time and distance < 5.0:
self.start_time = self.time
if self.fade != None:
self.fade += delta / 4.0
if self.fade > 1.0:
raise states.StateChange(states.MainMenuState())
elif self.start_time:
count = int((self.time - self.start_time) / 0.05)
i = 0
while count > len(EndEvent.text[i]) + 50:
count -= len(EndEvent.text[i]) + 50
i += 1
if i >= len(EndEvent.text):
self.fade = 0.0
break
def render(self, screen, camera, pos):
if not EndEvent.texture:
EndEvent.texture = pygame.image.load("data/art/maninthemoon.png")
EndEvent.texture.set_colorkey((255, 0, 255))
pos = (pos[0] + self.pos[0], pos[1] + self.pos[1])
spos = (pos[0], pos[1] + sin(self.time * 8) / 8)
spos = camera.screen_pos(spos)
spos = (
spos[0] - EndEvent.texture.get_width() / 2,
spos[1] - EndEvent.texture.get_height() / 2
)
screen.blit(self.texture, spos)
if self.start_time:
if not EndEvent.font:
EndEvent.font = pygame.font.Font("data/fonts/Prototype.ttf", 12)
count = int((self.time - self.start_time) / 0.05)
i = 0
while count > len(EndEvent.text[i]) + 50 and i < len(EndEvent.text) - 1:
count -= len(EndEvent.text[i]) + 50
i += 1
words = EndEvent.text[i][:count].split()
lines = [""]
for word in words:
if len(lines[-1]) > 32:
lines.append(word)
else:
lines[-1] += " " + word
for i in range(len(lines)):
texture = EndEvent.font.render(lines[i], 1, (255, 255, 255))
spos = camera.screen_pos(pos)
spos = (
spos[0] - texture.get_width() / 2,
spos[1] - texture.get_height() / 2 + i * 20 - 40
)
screen.blit(texture, spos)
if self.fade != None:
a = 255.0 - self.fade * 255.0
screen.fill((a, a, a), special_flags=BLEND_MULT)
| Cynerva/jttcotm | end.py | Python | bsd-3-clause | 3,326 | 0.001503 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""This file is part of the prometeo project.
This program is free software: you can redistribute it and/or modify it
under the terms of the GNU Lesser General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your
option) any later version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU Lesser General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>
"""
__author__ = 'Emanuele Bertoldi <[email protected]>'
__copyright__ = 'Copyright (c) 2011 Emanuele Bertoldi'
__version__ = '0.0.5'
from django.utils.translation import ugettext_lazy as _
from django.db import models
class ActivationToken(models.Model):
"""Activation token for user account.
"""
profile = models.ForeignKey('auth.UserProfile', null=True, verbose_name=_("user profile"))
activation_key = models.CharField(_("activation key"), max_length=40, blank=True, null=True)
key_expiration = models.DateTimeField(_("key expiration"), blank=True, null=True)
| zuck/prometeo-erp | core/registration/models.py | Python | lgpl-3.0 | 1,316 | 0.003799 |
from data_importers.ems_importers import BaseHalaroseCsvImporter
FAL_ADMINAREAS = (
"Bo'ness",
"Bonnybridge",
"Denny",
"Falkirk Ward 1",
"Falkirk Ward 2",
"Falkirk Ward 3",
"Falkirk Ward 4",
"Falkirk Ward 5",
"Falkirk Ward 6",
"Falkirk Ward 7",
"Falkirk Ward 8",
"Falkirk Ward 9",
"Falkirk",
"Grangemouth",
"Larbert",
)
FAL_INCLUDE_STATIONS = [
"Maddiston Old Folks Hall",
"Deanburn Primary School",
"Blackness Community Hall",
]
FAL_EXCLUDE_STATIONS = [
"Cowie Community Centre",
"Balfour Centre",
"Bannockburn Community Centre",
]
class Command(BaseHalaroseCsvImporter):
council_id = "FAL"
addresses_name = "2021-04-01T20:39:48.865108/Central Scotland polling_station_export-2021-03-31.csv"
stations_name = "2021-04-01T20:39:48.865108/Central Scotland polling_station_export-2021-03-31.csv"
elections = ["2021-05-06"]
def station_record_to_dict(self, record):
if (
(record.adminarea not in FAL_ADMINAREAS)
and (record.pollingstationname not in FAL_INCLUDE_STATIONS)
or (record.pollingstationname in FAL_EXCLUDE_STATIONS)
):
return None
return super().station_record_to_dict(record)
def address_record_to_dict(self, record):
if (
(record.adminarea not in FAL_ADMINAREAS)
and (record.pollingstationname not in FAL_INCLUDE_STATIONS)
or (record.pollingstationname in FAL_EXCLUDE_STATIONS)
):
return None
if record.housepostcode in [
"FK2 7FG",
"FK6 5EP",
"FK2 7YN",
]:
return None
return super().address_record_to_dict(record)
| DemocracyClub/UK-Polling-Stations | polling_stations/apps/data_importers/management/commands/import_falkirk.py | Python | bsd-3-clause | 1,748 | 0.001144 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Testing topi conv2d operator for VTA"""
import json
import os
import numpy as np
from collections import namedtuple
import tvm
from tvm import autotvm
from tvm.contrib import util
from tvm.contrib.pickle_memoize import memoize
import topi
import topi.testing
import vta
from vta import program_fpga, reconfig_runtime
import vta.testing
from vta.testing import simulator
Workload = namedtuple("Conv2DWorkload",
['batch', 'height', 'width', 'in_filter', 'out_filter',
'hkernel', 'wkernel', 'hpad', 'wpad', 'hstride', 'wstride'])
# Get batch info from env
env = vta.get_env()
# ResNet18 workloads
resnet_wkls = [
# Workloads of resnet18 on imagenet
# ('resnet-18.C1', Workload(env.BATCH, 224, 224, 3, 64, 7, 7, 3, 3, 2, 2)),
('resnet-18.C2', Workload(env.BATCH, 56, 56, 64, 64, 3, 3, 1, 1, 1, 1)),
('resnet-18.C3', Workload(env.BATCH, 56, 56, 64, 128, 3, 3, 1, 1, 2, 2)),
('resnet-18.C4', Workload(env.BATCH, 56, 56, 64, 128, 1, 1, 0, 0, 2, 2)),
('resnet-18.C5', Workload(env.BATCH, 28, 28, 128, 128, 3, 3, 1, 1, 1, 1)),
('resnet-18.C6', Workload(env.BATCH, 28, 28, 128, 256, 3, 3, 1, 1, 2, 2)),
('resnet-18.C7', Workload(env.BATCH, 28, 28, 128, 256, 1, 1, 0, 0, 2, 2)),
('resnet-18.C8', Workload(env.BATCH, 14, 14, 256, 256, 3, 3, 1, 1, 1, 1)),
('resnet-18.C9', Workload(env.BATCH, 14, 14, 256, 512, 3, 3, 1, 1, 2, 2)),
('resnet-18.C10', Workload(env.BATCH, 14, 14, 256, 512, 1, 1, 0, 0, 2, 2)),
('resnet-18.C11', Workload(env.BATCH, 7, 7, 512, 512, 3, 3, 1, 1, 1, 1)),
]
# FIXME: we need a custom clip operator to circumvent a pattern detection limitation
@tvm.tag_scope(tag=topi.tag.ELEMWISE)
def my_clip(x, a_min, a_max):
"""Unlike topi's current clip, put min and max into two stages."""
const_min = tvm.const(a_min, x.dtype)
const_max = tvm.const(a_max, x.dtype)
x = tvm.compute(x.shape, lambda *i: tvm.min(x(*i), const_max), name="clipA")
x = tvm.compute(x.shape, lambda *i: tvm.max(x(*i), const_min), name="clipB")
return x
def run_conv2d(env, remote, wl, target,
check_correctness=True, print_ir=False,
samples=4):
# Workload assertions
assert wl.hpad == wl.wpad
# Perform packing only if we are targeting the accelerator
if "arm_cpu" in target.keys:
data_pack = False
layout = "NCHW"
elif "vta" in target.keys:
data_pack = True
layout = "NCHW%dn%dc" % (env.BATCH, env.BLOCK_IN)
# Derive shapes depending upon packing
a_shape = (wl.batch, wl.in_filter, wl.height, wl.width)
w_shape = (wl.out_filter, wl.in_filter, wl.hkernel, wl.wkernel)
b_shape = (wl.batch, wl.out_filter, 1, 1)
if data_pack:
data_shape = (wl.batch//env.BATCH, wl.in_filter//env.BLOCK_IN,
wl.height, wl.width, env.BATCH, env.BLOCK_IN)
kernel_shape = (wl.out_filter//env.BLOCK_OUT, wl.in_filter//env.BLOCK_IN,
wl.hkernel, wl.wkernel, env.BLOCK_OUT, env.BLOCK_IN)
bias_shape = (wl.batch//env.BATCH, wl.out_filter//env.BLOCK_OUT,
1, 1, env.BATCH, env.BLOCK_OUT)
else:
data_shape = a_shape
kernel_shape = w_shape
bias_shape = b_shape
data = tvm.placeholder(data_shape, name="data", dtype=env.inp_dtype)
kernel = tvm.placeholder(kernel_shape, name="kernel", dtype=env.wgt_dtype)
bias = tvm.placeholder(bias_shape, name="bias", dtype=env.acc_dtype)
# Define base computation schedule
with target:
res = topi.nn.conv2d(
data, kernel, (wl.hstride, wl.wstride), (wl.hpad, wl.wpad), (1, 1),
layout, env.acc_dtype)
res = topi.right_shift(res, 8)
res = topi.add(res, bias)
res = my_clip(res, 0, (1 << env.OUT_WIDTH - 1) - 1)
res = topi.cast(res, env.out_dtype)
# Derive base schedule
s = topi.generic.schedule_conv2d_nchw([res])
if print_ir:
print(vta.lower(s, [data, kernel, bias, res], simple_mode=True))
# Derive number of ops
fout_height = (wl.height + 2 * wl.hpad - wl.hkernel) // wl.hstride + 1
fout_width = (wl.width + 2 * wl.wpad - wl.wkernel) // wl.wstride + 1
num_ops = 2 * wl.batch * fout_height * fout_width * wl.hkernel * wl.wkernel * wl.out_filter * wl.in_filter
# @memoize("vta.tests.test_benchmark_topi.conv2d.verify_nhwc")
def get_ref_data():
# derive min max for act, wgt, and bias types (max non inclusive)
a_min, a_max = 0 - (1 << (env.INP_WIDTH - 1)), (1 << (env.INP_WIDTH - 1))
w_min, w_max = 0 - (1 << (env.WGT_WIDTH - 1)), (1 << (env.WGT_WIDTH - 1))
b_min, b_max = 0 - 1 << (env.INP_WIDTH + env.WGT_WIDTH - 2), 1 << (env.INP_WIDTH + env.WGT_WIDTH - 2)
a_np = np.random.randint(a_min, a_max, size=a_shape).astype(data.dtype)
w_np = np.random.randint(w_min, w_max, size=w_shape).astype(kernel.dtype)
b_np = np.random.randint(b_min, b_max, size=b_shape).astype(env.acc_dtype)
r_np = topi.testing.conv2d_nchw_python(
a_np.astype(env.acc_dtype), w_np.astype(env.acc_dtype), (wl.hstride, wl.wstride), wl.hpad).astype(env.acc_dtype)
return a_np, w_np, b_np, r_np
# Data in original format
data_np, kernel_np, bias_np, res_ref = get_ref_data()
if data_pack:
data_np = data_np.reshape(
wl.batch//env.BATCH, env.BATCH,
wl.in_filter//env.BLOCK_IN, env.BLOCK_IN,
wl.height, wl.width).transpose((0, 2, 4, 5, 1, 3))
kernel_np = kernel_np.reshape(
wl.out_filter//env.BLOCK_OUT, env.BLOCK_OUT,
wl.in_filter//env.BLOCK_IN, env.BLOCK_IN,
wl.hkernel, wl.wkernel).transpose((0, 2, 4, 5, 1, 3))
bias_np = bias_np.reshape(
wl.batch//env.BATCH, wl.out_filter//env.BLOCK_OUT,
1, 1, env.BATCH, env.BLOCK_OUT)
# Build
if "vta" in target.keys:
mod = vta.build(s, [data, kernel, bias, res],
target=target,
target_host=env.target_host,
name="conv2d")
else:
mod = tvm.build(s, [data, kernel, bias, res],
target=target,
target_host=env.target_host,
name="conv2d")
temp = util.tempdir()
mod.save(temp.relpath("conv2d.o"))
remote.upload(temp.relpath("conv2d.o"))
f = remote.load_module("conv2d.o")
ctx = remote.context(str(target))
res_np = np.zeros(topi.util.get_const_tuple(res.shape)).astype(res.dtype)
data_arr = tvm.nd.array(data_np, ctx)
kernel_arr = tvm.nd.array(kernel_np, ctx)
bias_arr = tvm.nd.array(bias_np, ctx)
res_arr = tvm.nd.array(res_np, ctx)
time_f = f.time_evaluator("conv2d", ctx, number=samples)
# In vta sim mode, collect simulator runtime statistics
stats = {}
cost = None
if env.TARGET in ["sim", "tsim"]:
# Check if we're in local RPC mode (allows us to rebuild the
# runtime on the fly when varying the VTA designs)
local_rpc = int(os.environ.get("VTA_LOCAL_SIM_RPC", "0"))
if local_rpc:
if env.TARGET == "sim":
remote.get_function("vta.simulator.profiler_clear")()
else:
remote.get_function("vta.tsim.profiler_clear")()
cost = time_f(data_arr, kernel_arr, bias_arr, res_arr)
if env.TARGET == "sim":
stats = json.loads(remote.get_function("vta.simulator.profiler_status")())
else:
stats = json.loads(remote.get_function("vta.tsim.profiler_status")())
else:
simulator.clear_stats()
cost = time_f(data_arr, kernel_arr, bias_arr, res_arr)
stats = simulator.stats()
else:
cost = time_f(data_arr, kernel_arr, bias_arr, res_arr)
# Check correctness
correct = False
if check_correctness:
res_orig = res_arr.asnumpy()
if data_pack:
res_orig = res_orig.transpose(
(0, 4, 1, 5, 2, 3)).reshape(wl.batch, wl.out_filter, fout_height, fout_width)
bias_np = bias_np.transpose(
(0, 4, 1, 5, 2, 3)).reshape(wl.batch, wl.out_filter, 1, 1)
res_ref = res_ref >> env.WGT_WIDTH
res_ref += bias_np
res_ref = np.clip(res_ref, 0, (1 << env.OUT_WIDTH - 1) - 1)
res_ref = res_ref.astype(env.out_dtype)
correct = np.allclose(res_orig, res_ref)
gops = (num_ops / cost.mean) / float(10 ** 9)
status = "PASSED" if correct else "FAILED"
if "arm_cpu" in target.keys:
device = "CPU"
elif "vta" in target.keys:
device = "VTA"
print("%s CONV2D TEST %s: Time cost = %g sec/op, %g GOPS" % (device, status, cost.mean, gops))
return correct, cost, stats
def test_conv2d(device="vta"):
def _run(env, remote):
if device == "vta":
target = env.target
if env.TARGET not in ["sim", "tsim"]:
assert tvm.module.enabled("rpc")
program_fpga(remote, bitstream=None)
reconfig_runtime(remote)
elif device == "arm_cpu":
target = env.target_vta_cpu
with autotvm.tophub.context(target): # load pre-tuned schedule parameters
for _, wl in resnet_wkls:
print(wl)
run_conv2d(env, remote, wl, target)
vta.testing.run(_run)
if __name__ == "__main__":
test_conv2d(device="arm_cpu")
test_conv2d(device="vta")
| Huyuwei/tvm | vta/tests/python/integration/test_benchmark_topi_conv2d.py | Python | apache-2.0 | 10,351 | 0.003188 |
import sys
import os
import getpass
from django.core.management.base import BaseCommand, CommandError
from django.db import DEFAULT_DB_ALIAS
from django.core import exceptions
from django.utils.text import capfirst
from django.utils.encoding import force_str
from django.contrib.auth.password_validation import validate_password
from ...models import Person, PersonEmail
class NotRunningInTTYException(Exception):
pass
class Command(BaseCommand):
help = "Used to create a superuser."
requires_migrations_checks = True
def __init__(self, *args, **kwargs):
super(Command, self).__init__(*args, **kwargs)
self.email_field = PersonEmail._meta.get_field("address")
def add_arguments(self, parser):
parser.add_argument(
"--email",
dest="email",
default=None,
help="Specifies the login for the superuser.",
)
parser.add_argument(
"--noinput",
"--no-input",
action="store_false",
dest="interactive",
default=True,
help=(
"Tells Django to NOT prompt the user for input of any kind. "
"You must use --email with --noinput, along with an option for "
"any other required field. Superusers created with --noinput will "
"not be able to log in until they're given a valid password."
),
)
parser.add_argument(
"--database",
action="store",
dest="database",
default=DEFAULT_DB_ALIAS,
help='Specifies the database to use. Default is "default".',
)
def handle(self, *args, **options):
email = options["email"]
database = options["database"]
# If not provided, create the user with an unusable password
password = None
user_data = {}
# Same as user_data but with foreign keys as fake model instances
# instead of raw IDs.
fake_user_data = {}
# Do quick and dirty validation if --noinput
if not options["interactive"]:
try:
if not email:
raise CommandError("You must use --email with --noinput.")
email = self.email_field.clean(email, None)
except exceptions.ValidationError as e:
raise CommandError("; ".join(e.messages))
password = os.environ.get("SUPERPERSON_PASSWORD", None)
else:
try:
if hasattr(sys.stdin, "isatty") and not sys.stdin.isatty():
raise NotRunningInTTYException("Not running in a TTY")
# Get a username
verbose_field_name = self.email_field.verbose_name
while email is None:
input_msg = capfirst(verbose_field_name)
username_rel = self.email_field.remote_field
input_msg = force_str(
"%s%s: "
% (
input_msg,
" (%s.%s)"
% (
username_rel.model._meta.object_name,
username_rel.field_name,
)
if username_rel
else "",
)
)
email = self.get_input_data(self.email_field, input_msg)
if not email:
continue
if self.email_field.unique:
try:
Person._default_manager.db_manager(
database
).get_by_natural_key(email)
except Person.DoesNotExist:
pass
else:
self.stderr.write(
"Error: That %s is already taken." % verbose_field_name
)
email = None
# Get a password
while password is None:
password = getpass.getpass()
password2 = getpass.getpass(force_str("Password (again): "))
if password != password2:
self.stderr.write("Error: Your passwords didn't match.")
password = None
# Don't validate passwords that don't match.
continue
if password.strip() == "":
self.stderr.write("Error: Blank passwords aren't allowed.")
password = None
# Don't validate blank passwords.
continue
try:
validate_password(password2, Person(**fake_user_data))
except exceptions.ValidationError as err:
self.stderr.write("\n".join(err.messages))
password = None
except KeyboardInterrupt:
self.stderr.write("\nOperation cancelled.")
sys.exit(1)
except NotRunningInTTYException:
self.stdout.write(
"Superuser creation skipped due to not running in a TTY. "
"You can run `manage.py createsuperuser` in your project "
"to create one manually."
)
if email:
user_data["email"] = email
user_data["password"] = password
Person._default_manager.db_manager(database).create_superperson(**user_data)
if options["verbosity"] >= 1:
self.stdout.write("Superuser created successfully.")
def get_input_data(self, field, message, default=None):
"""
Override this method if you want to customize data inputs or
validation exceptions.
"""
raw_value = input(message)
if default and raw_value == "":
raw_value = default
try:
val = field.clean(raw_value, None)
except exceptions.ValidationError as e:
self.stderr.write("Error: %s" % "; ".join(e.messages))
val = None
return val
| lafranceinsoumise/api-django | agir/people/management/commands/createsuperperson.py | Python | agpl-3.0 | 6,404 | 0.001093 |
from SimpleCV.Tracking.TrackClass import Track, CAMShiftTrack, SURFTrack, LKTrack, MFTrack
from SimpleCV.Tracking.CAMShiftTracker import camshiftTracker
from SimpleCV.Tracking.LKTracker import lkTracker
from SimpleCV.Tracking.SURFTracker import surfTracker
from SimpleCV.Tracking.MFTracker import mfTracker
from SimpleCV.Tracking.TrackSet import TrackSet
| tpltnt/SimpleCV | SimpleCV/Tracking/__init__.py | Python | bsd-3-clause | 355 | 0.002817 |
import chardet
from vint.ast.node_type import NodeType
from vint.ast.traversing import traverse, SKIP_CHILDREN
from vint.linting.level import Level
from vint.linting.lint_target import AbstractLintTarget
from vint.linting.policy.abstract_policy import AbstractPolicy
from vint.linting.policy_registry import register_policy
@register_policy
class ProhibitMissingScriptEncoding(AbstractPolicy):
description = 'Use scriptencoding when multibyte char exists'
reference = ':help :scriptencoding'
level = Level.WARNING
has_scriptencoding = False
def listen_node_types(self):
return [NodeType.TOPLEVEL]
def is_valid(self, node, lint_context):
""" Whether the specified node is valid.
This policy prohibit scriptencoding missing when multibyte char exists.
"""
traverse(node, on_enter=self._check_scriptencoding)
if self.has_scriptencoding:
return True
return not _has_multibyte_char(lint_context)
def _check_scriptencoding(self, node):
# TODO: Use BREAK when implemented
if self.has_scriptencoding:
return SKIP_CHILDREN
node_type = NodeType(node['type'])
if node_type is not NodeType.EXCMD:
return
self.has_scriptencoding = node['str'].startswith('scripte')
def _has_multibyte_char(lint_context):
lint_target = lint_context['lint_target'] # type: AbstractLintTarget
byte_seq = lint_target.read()
return len(byte_seq) > 0 and chardet.detect(byte_seq)['encoding'] != 'ascii'
| Kuniwak/vint | vint/linting/policy/prohibit_missing_scriptencoding.py | Python | mit | 1,557 | 0.001927 |
import inspect
import sys
import pytest
from numpy.core import arange
from numpy.testing import assert_, assert_equal, assert_raises_regex
from numpy.lib import deprecate
import numpy.lib.utils as utils
from io import StringIO
@pytest.mark.skipif(sys.flags.optimize == 2, reason="Python running -OO")
def test_lookfor():
out = StringIO()
utils.lookfor('eigenvalue', module='numpy', output=out,
import_modules=False)
out = out.getvalue()
assert_('numpy.linalg.eig' in out)
@deprecate
def old_func(self, x):
return x
@deprecate(message="Rather use new_func2")
def old_func2(self, x):
return x
def old_func3(self, x):
return x
new_func3 = deprecate(old_func3, old_name="old_func3", new_name="new_func3")
def old_func4(self, x):
"""Summary.
Further info.
"""
return x
new_func4 = deprecate(old_func4)
def old_func5(self, x):
"""Summary.
Bizarre indentation.
"""
return x
new_func5 = deprecate(old_func5, message="This function is\ndeprecated.")
def old_func6(self, x):
"""
Also in PEP-257.
"""
return x
new_func6 = deprecate(old_func6)
def test_deprecate_decorator():
assert_('deprecated' in old_func.__doc__)
def test_deprecate_decorator_message():
assert_('Rather use new_func2' in old_func2.__doc__)
def test_deprecate_fn():
assert_('old_func3' in new_func3.__doc__)
assert_('new_func3' in new_func3.__doc__)
@pytest.mark.skipif(sys.flags.optimize == 2, reason="-OO discards docstrings")
@pytest.mark.parametrize('old_func, new_func', [
(old_func4, new_func4),
(old_func5, new_func5),
(old_func6, new_func6),
])
def test_deprecate_help_indentation(old_func, new_func):
_compare_docs(old_func, new_func)
# Ensure we don't mess up the indentation
for knd, func in (('old', old_func), ('new', new_func)):
for li, line in enumerate(func.__doc__.split('\n')):
if li == 0:
assert line.startswith(' ') or not line.startswith(' '), knd
elif line:
assert line.startswith(' '), knd
def _compare_docs(old_func, new_func):
old_doc = inspect.getdoc(old_func)
new_doc = inspect.getdoc(new_func)
index = new_doc.index('\n\n') + 2
assert_equal(new_doc[index:], old_doc)
@pytest.mark.skipif(sys.flags.optimize == 2, reason="-OO discards docstrings")
def test_deprecate_preserve_whitespace():
assert_('\n Bizarre' in new_func5.__doc__)
def test_safe_eval_nameconstant():
# Test if safe_eval supports Python 3.4 _ast.NameConstant
utils.safe_eval('None')
class TestByteBounds:
def test_byte_bounds(self):
# pointer difference matches size * itemsize
# due to contiguity
a = arange(12).reshape(3, 4)
low, high = utils.byte_bounds(a)
assert_equal(high - low, a.size * a.itemsize)
def test_unusual_order_positive_stride(self):
a = arange(12).reshape(3, 4)
b = a.T
low, high = utils.byte_bounds(b)
assert_equal(high - low, b.size * b.itemsize)
def test_unusual_order_negative_stride(self):
a = arange(12).reshape(3, 4)
b = a.T[::-1]
low, high = utils.byte_bounds(b)
assert_equal(high - low, b.size * b.itemsize)
def test_strided(self):
a = arange(12)
b = a[::2]
low, high = utils.byte_bounds(b)
# the largest pointer address is lost (even numbers only in the
# stride), and compensate addresses for striding by 2
assert_equal(high - low, b.size * 2 * b.itemsize - b.itemsize)
def test_assert_raises_regex_context_manager():
with assert_raises_regex(ValueError, 'no deprecation warning'):
raise ValueError('no deprecation warning')
| abalkin/numpy | numpy/lib/tests/test_utils.py | Python | bsd-3-clause | 3,769 | 0.001061 |
import morepath
from more.jinja2 import Jinja2App
class App(Jinja2App):
pass
@App.path(path='')
class Root(object):
pass
@App.view(model=Root)
def hello_world(self, request):
return "Hello world!"
@App.html(template='base.html')
def main(request):
return {'name': 'matt'}
if __name__ == '__main__':
config = morepath.setup()
config.scan()
config.commit()
morepath.run(App())
| makaimc/pycontacts | morepath_jinja_sqlalchemy/app.py | Python | mit | 417 | 0 |
'''
Created by auto_sdk on 2015.09.17
'''
from top.api.base import RestApi
class AreasGetRequest(RestApi):
def __init__(self,domain='gw.api.taobao.com',port=80):
RestApi.__init__(self,domain, port)
self.fields = None
def getapiname(self):
return 'taobao.areas.get'
| colaftc/webtool | top/api/rest/AreasGetRequest.py | Python | mit | 285 | 0.031579 |
"""
Copyright (c) 2004-Present Pivotal Software, Inc.
This program and the accompanying materials are made available under
the terms of the under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from contextlib import closing
from datetime import datetime
from StringIO import StringIO
from unittest2.runner import _WritelnDecorator
import unittest2 as unittest
import tinctest
import os
from unittest2.runner import _WritelnDecorator
from contextlib import closing
from StringIO import StringIO
from mpp.models import MPPTestCase
from mpp.lib.PSQL import PSQL
@unittest.skip('mock')
class MockMPPTestCaseWithClassVariable(MPPTestCase):
db_name = 'dataset_0.01_heap'
def test_do_stuff(self):
self.assertTrue(True)
@unittest.skip('mock')
class MockMPPTestCase(MPPTestCase):
"""
@db_name dataset_0.01_heap
"""
def test_do_stuff(self):
self.assertTrue(True)
def test_failure(self):
self.assertTrue(False)
def test_gather_logs(self):
"""
@gather_logs_on_failure True
@restart_on_failure True
"""
PSQL.run_sql_command("select pg_sleep(5)")
self.assertTrue(False)
def test_restart_on_failure(self):
"""
@gather_logs_on_failure True
@restart_on_failure True
"""
PSQL.run_sql_command("select * from some_table_that_does_not_exist_to_generate_log_errors")
self.assertTrue(False)
class MPPTestCaseRegressionTests(unittest.TestCase):
def test_sanity_run(self):
test_loader = tinctest.TINCTestLoader()
test_suite = test_loader.loadTestsFromName('mpp.models.regress.mpp_tc.regress_mpp_test_case.MockMPPTestCase.test_do_stuff')
self.assertIsNotNone(test_suite)
self.assertTrue(len(test_suite._tests), 1)
for test in test_suite._tests:
test.__class__.__unittest_skip__ = False
with closing(_WritelnDecorator(StringIO())) as buffer:
test_result = tinctest.TINCTextTestResult(buffer, True, 1)
test_suite.run(test_result)
self.assertEqual(test_result.testsRun, 1)
self.assertEqual(len(test_result.errors), 0)
self.assertEqual(len(test_result.skipped), 0)
self.assertEqual(len(test_result.failures), 0)
def test_sanity_failure(self):
test_loader = tinctest.TINCTestLoader()
test_suite = test_loader.loadTestsFromName('mpp.models.regress.mpp_tc.regress_mpp_test_case.MockMPPTestCase.test_failure')
self.assertIsNotNone(test_suite)
self.assertTrue(len(test_suite._tests), 1)
for test in test_suite._tests:
test.__class__.__unittest_skip__ = False
with closing(_WritelnDecorator(StringIO())) as buffer:
test_result = tinctest.TINCTextTestResult(buffer, True, 1)
test_suite.run(test_result)
self.assertEqual(test_result.testsRun, 1)
self.assertEqual(len(test_result.errors), 0)
self.assertEqual(len(test_result.skipped), 0)
self.assertEqual(len(test_result.failures), 1)
def test_failure_gather_logs(self):
test_loader = tinctest.TINCTestLoader()
test_suite = test_loader.loadTestsFromName('mpp.models.regress.mpp_tc.regress_mpp_test_case.MockMPPTestCase.test_gather_logs')
self.assertIsNotNone(test_suite)
self.assertTrue(len(test_suite._tests), 1)
for test in test_suite._tests:
test.__class__.__unittest_skip__ = False
with closing(_WritelnDecorator(StringIO())) as buffer:
tinc_test_runner = tinctest.TINCTestRunner(stream = buffer, descriptions = True, verbosity = 1)
test_result = tinc_test_runner.run(test_suite)
self.assertEqual(test_result.testsRun, 1)
self.assertEqual(len(test_result.errors), 0)
self.assertEqual(len(test_result.skipped), 0)
self.assertEqual(len(test_result.failures), 1)
expected_log_file = os.path.join(MockMPPTestCase.get_out_dir(), test._testMethodName + '.logs')
self.assertTrue(os.path.exists(expected_log_file))
self.assertTrue(os.path.getsize(expected_log_file) > 0)
def test_restart_on_failure(self):
test_loader = tinctest.TINCTestLoader()
test_suite = test_loader.loadTestsFromName('mpp.models.regress.mpp_tc.regress_mpp_test_case.MockMPPTestCase.test_restart_on_failure')
self.assertIsNotNone(test_suite)
self.assertTrue(len(test_suite._tests), 1)
for test in test_suite._tests:
test.__class__.__unittest_skip__ = False
with closing(_WritelnDecorator(StringIO())) as buffer:
tinc_test_runner = tinctest.TINCTestRunner(stream = buffer, descriptions = True, verbosity = 1)
test_result = tinc_test_runner.run(test_suite)
self.assertEqual(test_result.testsRun, 1)
self.assertEqual(len(test_result.errors), 0)
self.assertEqual(len(test_result.skipped), 0)
self.assertEqual(len(test_result.failures), 1)
# TODO may be add a check later on to see if we actually restart the cluster.
expected_log_file = os.path.join(MockMPPTestCase.get_out_dir(), test._testMethodName + '.logs')
self.assertTrue(os.path.exists(expected_log_file))
self.assertTrue(os.path.getsize(expected_log_file) > 0)
class MPPTestCaseTestSetUpClass(unittest.TestCase):
def test_setup(self):
test_case = MockMPPTestCase('test_do_stuff')
self.assertEqual(test_case.db_name, "dataset_0.01_heap")
class MPPTestCaseTestSetUpClassWithClassVariable(unittest.TestCase):
def test_setup(self):
test_case = MockMPPTestCaseWithClassVariable('test_do_stuff')
self.assertEqual(test_case.db_name, "dataset_0.01_heap")
| edespino/gpdb | src/test/tinc/tincrepo/mpp/models/regress/mpp_tc/regress_mpp_test_case.py | Python | apache-2.0 | 6,282 | 0.004935 |
import unittest
from src.Analyzer import plot_word_embeddings
from src.Labels import Labels
class AnalyzerTests(unittest.TestCase):
def test_plot_word_embeddings(self):
plot_word_embeddings("Doc2Vec", "Test", "Objective", "Subjective", [[-3.99940408, -1.43488923],[ 3.51106635, -2.01347499], [-1.14695001, -2.10861514]],
[Labels.strong_pos, Labels.strong_pos, Labels.strong_neg])
| xyryx/SentimentAnalysis | tests/AnalyzerTests.py | Python | mit | 425 | 0.009412 |
#!/usr/bin/env python
#
# Freesound is (c) MUSIC TECHNOLOGY GROUP, UNIVERSITAT POMPEU FABRA
#
# Freesound is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# Freesound is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Authors:
# See AUTHORS file.
#
# 03/10/2013: Modified from original code
import sys
from compmusic.extractors.imagelib.MelSpectrogramImage import create_wave_images
from processing import AudioProcessingException
'''
parser = optparse.OptionParser("usage: %prog [options] input-filename", conflict_handler="resolve")
parser.add_option("-a", "--waveout", action="store", dest="output_filename_w", type="string", help="output waveform image (default input filename + _w.png)")
parser.add_option("-s", "--specout", action="store", dest="output_filename_s", type="string", help="output spectrogram image (default input filename + _s.jpg)")
parser.add_option("-w", "--width", action="store", dest="image_width", type="int", help="image width in pixels (default %default)")
parser.add_option("-h", "--height", action="store", dest="image_height", type="int", help="image height in pixels (default %default)")
parser.add_option("-f", "--fft", action="store", dest="fft_size", type="int", help="fft size, power of 2 for increased performance (default %default)")
parser.add_option("-p", "--profile", action="store_true", dest="profile", help="run profiler and output profiling information")
parser.set_defaults(output_filename_w=None, output_filename_s=None, image_width=500, image_height=171, fft_size=2048)
(options, args) = parser.parse_args()
if len(args) == 0:
parser.print_help()
parser.error("not enough arguments")
if len(args) > 1 and (options.output_filename_w != None or options.output_filename_s != None):
parser.error("when processing multiple files you can't define the output filename!")
'''
def progress_callback(percentage):
sys.stdout.write(str(percentage) + "% ")
sys.stdout.flush()
# process all files so the user can use wildcards like *.wav
def genimages(input_file, output_file_w, output_file_s, output_file_m, options):
args = (input_file, output_file_w, output_file_s, output_file_m, options.image_width, options.image_height,
options.fft_size, progress_callback, options.f_min, options.f_max, options.scale_exp, options.pallete)
print("processing file %s:\n\t" % input_file, end="")
try:
create_wave_images(*args)
except AudioProcessingException as e:
print("Error running wav2png: ", e)
| MTG/pycompmusic | compmusic/extractors/imagelib/wav2png.py | Python | agpl-3.0 | 3,050 | 0.005246 |
import pytest
import numpy as np
import matplotlib.pyplot as plt
from neupy import plots
from base import BaseTestCase
class HintonDiagramTestCase(BaseTestCase):
single_thread = True
@pytest.mark.mpl_image_compare
def test_simple_hinton(self):
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
plt.sca(ax) # To test the case when ax=None
weight = np.random.randn(20, 20)
plots.hinton(weight, add_legend=True)
return fig
@pytest.mark.mpl_image_compare
def test_max_weight(self):
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
weight = 100 * np.random.randn(20, 20)
plots.hinton(weight, ax=ax, max_weight=10, add_legend=True)
return fig
@pytest.mark.mpl_image_compare
def test_hinton_without_legend(self):
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
weight = np.random.randn(20, 20)
plots.hinton(weight, ax=ax, add_legend=False)
return fig
@pytest.mark.mpl_image_compare
def test_hinton_only_positive(self):
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
weight = np.random.random((20, 20))
plots.hinton(weight, ax=ax)
return fig
@pytest.mark.mpl_image_compare
def test_hinton_only_negative(self):
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
weight = -np.random.random((20, 20))
plots.hinton(weight, ax=ax)
return fig
@pytest.mark.mpl_image_compare
def test_hinton_1darray(self):
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
weight = -np.random.randn(20)
plots.hinton(weight, ax=ax)
return fig
| itdxer/neupy | tests/plots/test_hinton.py | Python | mit | 1,732 | 0 |
#!/usr/bin/env python
# Copyright (C) 2006-2021 Music Technology Group - Universitat Pompeu Fabra
#
# This file is part of Essentia
#
# Essentia is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation (FSF), either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the Affero GNU General Public License
# version 3 along with this program. If not, see http://www.gnu.org/licenses/
from essentia_test import *
from essentia.standard import MonoLoader, SuperFluxExtractor
from numpy import *
class TestSuperFluxExtractor(TestCase):
def testInvalidParam(self):
# All the parameters ranges are theoretically from 0 to infinity
# Hence, we use neg. value for invalid checks.
self.assertConfigureFails(SuperFluxExtractor(), { 'combine': -1})
self.assertConfigureFails(SuperFluxExtractor(), { 'frameSize': -1})
self.assertConfigureFails(SuperFluxExtractor(), { 'hopSize': -1})
self.assertConfigureFails(SuperFluxExtractor(), { 'ratioThreshold': -1})
self.assertConfigureFails(SuperFluxExtractor(), { 'sampleRate': -1})
self.assertConfigureFails(SuperFluxExtractor(), { 'threshold': -1})
def testRegressionDubstep(self):
audio = MonoLoader(filename=join(testdata.audio_dir, 'recorded', 'dubstep.wav'))()
# This test case will use the documented default parameters from recording dubstep.wav
onsets = SuperFluxExtractor(combine=30,frameSize=2048,hopSize=256,ratioThreshold=16,
sampleRate=44100,threshold=0.5)(audio)
# This commented out code was used to obtain reference samples for storing in a file.
# save('superfluxdub', onsets)
# Reference samples are loaded as expected values
expected_superflux = load(join(filedir(), 'superflux/superfluxdub.npy'))
self.assertAlmostEqualVector(onsets, expected_superflux, 1e-5)
def testRegressionTechnoloop(self):
audio = MonoLoader(filename=join(testdata.audio_dir, 'recorded', 'techno_loop.wav'))()
# This test case will use peak parameters slighlt ifferent from default from recording techno_loop.wav
onsets = SuperFluxExtractor(combine=20,frameSize=2048,hopSize=256,ratioThreshold=8,
sampleRate=44100,threshold=0.25)(audio)
# This commented out code was used to obtain reference samples for storing in a file.
# save('superfluxtechno', onsets)
# Reference samples are loaded as expected values
expected_superflux = load(join(filedir(), 'superflux/superfluxtechno.npy'))
self.assertAlmostEqualVector(onsets, expected_superflux, 1e-5)
def _assertVectorWithinVector(self, found, expected, precision=1e-5):
for i in range(len(found)):
for j in range(1,len(expected)):
if found[i] <= expected[j] and found[i] >= expected[j-1]:
if fabs(found[i] - expected[j-1]) < fabs(expected[j] - found[i]):
self.assertAlmostEqual(found[i], expected[j-1], precision)
else:
self.assertAlmostEqual(found[i], expected[j], precision)
def testSilence(self):
# zeros should return no onsets (empty array)
self.assertEqualVector(SuperFluxExtractor()(zeros(44100)), [])
def testEmpty(self):
# empty input should return no onsets (empty array)
self.assertEqualVector(SuperFluxExtractor()([]), [])
def testImpulse(self):
# Given an impulse should return its position
sampleRate = 44100
frameSize = 2048
hopSize = 256
signal = zeros(sampleRate * 2)
# impulses at 0:30 and 1:00
signal[22050] = 1.
signal[44100] = 1.
expected = [0.5, 1.]
result = SuperFluxExtractor(sampleRate=sampleRate, frameSize=frameSize,
hopSize=hopSize)(signal)
# SuperfluxPeaks has a parameter 'combine' which is a threshold that
# puts together consecutive peaks. This means that a peak will be
# detected as soon as it is seen by a frame. Thus, the frame size
# also influences the expected precision of the algorithm.
precision = (hopSize + frameSize) / sampleRate
self.assertAlmostEqualVectorAbs(result, expected, precision)
suite = allTests(TestSuperFluxExtractor)
if __name__ == '__main__':
TextTestRunner(verbosity=2).run(suite)
| MTG/essentia | test/src/unittests/rhythm/test_superfluxextractor.py | Python | agpl-3.0 | 4,892 | 0.00879 |
# -*- coding: UTF-8 -*-
"""
Desc: django util.
Note:
---------------------------------------
# 2016/04/30 kangtian created
"""
from hashlib import md5
def gen_md5(content_str):
m = md5()
m.update(content_str)
return m.hexdigest()
| tiankangkan/paper_plane | k_util/hash_util.py | Python | gpl-3.0 | 257 | 0 |
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.conf import settings
admin.autodiscover()
from front.views import *
from front.views import views as front_views
from django.views.decorators.csrf import csrf_exempt
if not settings.DEBUG:
s = {'SSL': settings.ENABLE_SSL}
else:
s = {}
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'DisSoNet.views.home', name='home'),
url(r'^$', HomeView.as_view(), name='home'),
url(r'^$', HomeView.as_view(), name='home'),
url(r'^test/', front_views.test, name='test'),
url(r'^privacy/', front_views.privacy, name='privacy'),
url(r'^stream_debug/', front_views.stream_debug, name='stream'),
url(r'^admin/', include(admin.site.urls)),
url(r'^github/setup/', GitHubView.as_view(), s, name='initGithub'),
url(r'^accounts/login/', LoginView.as_view(), s, name='login'),
url(r'^accounts/logout/', LogoutView.as_view(), s, name='logout'),
url(r'^accounts/view/', UserView.as_view(), s, name='user_view'),
url(r'^accounts/register/', RegisterView.as_view(), s, name='register'),
url(r'^accounts/reset/$', front_views.reset, s, name='reset'),
url(r'^accounts/reset/e/(?P<email>[\w-]+)/$', front_views.reset,
s, name='reset'),
url(r'^accounts/reset/done/$', front_views.reset_done,
s, name='reset_done'),
url(r'^accounts/reset/confirm/(?P<uidb64>[0-9A-Za-z]+)-(?P<token>[\w-]+)/$',
front_views.reset_confirm, s, name='reset_confirm'),
url(r'^accounts/reset/complete/$', front_views.reset_complete,
name='reset_complete'),
# urls for post(s)/
url(r'^post/?$', PublicPosts.as_view(), name='public_posts'),
url(r'^posts/?$', PublicPosts.as_view(), name='public_posts'),
# urls for post(s)/<post_id>/
url(r'^post/(?P<post_id>[\w-]+)/?$', PostResource.as_view(), name='post_resource'),
url(r'^posts/(?P<post_id>[\w-]+)/?$', PostResource.as_view(), name='post_resource'),
# urls for post(s)/<post_id>/comments/
url(r'^post/(?P<post_id>[\w-]+)/comments/?$',
csrf_exempt(PostComments.as_view()), name='post_comments'),
url(r'^posts/(?P<post_id>[\w-]+)/comments/?$',
csrf_exempt(PostComments.as_view()), name='post_comments'),
url(r'^author/posts/?$', AuthorStream.as_view(), name='author_posts'),
url(r'^author/(?P<author_id>[\w-]+)/posts/?$', VisiblePostToUser.as_view(),
name='visibile_posts'),
url(r'^author/(?P<author_id>[\w-]+)/?$', AuthorProfile.as_view(),
name='author_profile'),
url(r'^friendrequest/$', csrf_exempt(FriendRequestView.as_view()),
name='friend_request'),
url(r'^friends/(?P<user_id_1>[\w-]+)/(?P<user_id_2>[\w-]+)/$',
AreFriends.as_view(), name='are_friends'),
url(r'^friends/?', FriendsView.as_view(),
s, name='friends_view'),
url(r'^test_rest/(?P<id>[\w-]+)/?$', front_views.test_rest, name="test_rest"),
)
| Solanar/CMPUT410-Project | DisSoNet/DisSoNet/urls.py | Python | apache-2.0 | 2,954 | 0.003385 |
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
from funcy import partial
from canopen_301_402.constants import *
from canopen_301_402.async.async_chain import AsyncChain
from canopen_301_402.async.sdo_write_object import SdoWriteObject
from canopen_301_402.can402.ops.notify_new_target import NotifyNewTarget
import struct
class SetTarget(AsyncChain):
"""docstring for SetTarget"""
def __init__(self, node, value, relative=False, immediatly=False, target_type="Position", *args, **kwargs):
parameter_name = "Target " + target_type
self.node = node
self.value = value
self.relative = relative
self.immediatly = immediatly
set_target = partial(SdoWriteObject,
node = node,
parameter_name = parameter_name,
value = self.value)
notify_new_target = partial(NotifyNewTarget,
node = node,
relative = self.relative,
immediatly = self.immediatly)
operations = [set_target, notify_new_target]
super(SetTarget, self).__init__(node, operations, *args, **kwargs)
| xaedes/canopen_301_402 | src/canopen_301_402/can402/ops/set_target.py | Python | mit | 1,247 | 0.022454 |
"""
This module contains tools used to get data using tourinsoft's API.
Request outputs JSON which is converted to a dictionary.
"""
import requests
URL = 'http://wcf.tourinsoft.com/Syndication/3.0/cdt72/'
EVENT_URL = 'e9a8e2bf-c933-4831-9ebb-87eec559a21a/'
PLACES_URL = '969e24f9-75a2-4cc6-a46c-db1f6ebbfe97/'
# shortcuts
def request_places():
return request(event=False)
def request_events():
return request(event=True)
def flatten_dictionary(dictionary):
"""
Input: a request's JSON dictionary output with nested dictionary
Output: a flattened dictionary (format: key1.key2 = value2)
"""
flattenedDictionary = dict()
for key, value in dictionary.items():
if isinstance(value, dict):
for subkey, subvalue in value.items():
flattenedDictionary[key + '.' + subkey] = subvalue
else:
flattenedDictionary[key] = value
return flattenedDictionary
def request(collection='Objects', filters=dict(), metadata=True,
event=True):
"""
General request function.
Input:
- collection: general API folder (Objects contains events + places)
- parameters: dictionary of parameters (useless)
- metadata: boolean to get metadata instead of values
(doesn't seem to matter in practice)
- event: search in events DB (if False, will search in places DB)
Output: a tuple of dictionaries (one dictionary for one object) containing
the properties of each object.
"""
filters_str = ('&$filter=' + key + " eq '" + value + "'"
for key, value in filters.items())
url = URL + ''.join((EVENT_URL if event else PLACES_URL,
collection,
'?$format=json',
''.join(filters_str),
'&metadata' if metadata else ''))
# Values field contains a list of all objects (other fields are useless)
# Flatten dictionary formats nested dictionaries (see module parser)
jsonOutput = requests.get(url).json()['value']
return tuple(flatten_dictionary(tobject) for tobject in jsonOutput)
# Usage example
# test = request(filters={"Commune": "LE MANS"}, metadata=False)
| Aluriak/24hducode2016 | src/source_tourinsoft/request_data.py | Python | unlicense | 2,237 | 0.000447 |
#!/usr/bin/env ../../jazzshell
"""
Supplies a set of admin operations for trained models.
============================== License ========================================
Copyright (C) 2008, 2010-12 University of Edinburgh, Mark Granroth-Wilding
This file is part of The Jazz Parser.
The Jazz Parser is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
The Jazz Parser is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with The Jazz Parser. If not, see <http://www.gnu.org/licenses/>.
============================ End license ======================================
"""
__author__ = "Mark Granroth-Wilding <[email protected]>"
import sys, math, os, re
from optparse import OptionParser
from jazzparser.utils.interface import boolean_input
from jazzparser.misc.chordlabel import HPChordLabeler, ModelLoadError
def main():
usage = "%prog <command> [<command-arg> ...]"
description = "Provides administrative operations for trained "\
"chord labeler models. Use the 'help' command to get a list "\
"of commands or command usage."
parser = OptionParser(usage=usage, description=description)
options, arguments = parser.parse_args()
if len(arguments) < 1:
print >>sys.stderr, "You must specify a command"
sys.exit(1)
command = arguments[0].lower()
command_args = arguments[1:]
def _load_model(name):
# Load the model from its file
return HPChordLabeler.load_model(name)
# Define behaviour for each command
list_help = "Lists all the trained models available"
def _list(args):
# List the available models for the given model type
models = HPChordLabeler.list_models()
print "Available models:"
print ", ".join(list(sorted(models)))
desc_help = "Outputs the descriptive text associated with the model at training time"
def _desc(args):
if len(args) == 0:
raise CommandError, "desc requires a model name as an argument"
try:
model = _load_model(args[0])
except ModelLoadError, err:
print >>sys.stderr, "No model %s\n" % (args[0])
raise err
print "Model descriptor"
print "================"
print model.description
del_help = "Deletes a model and all its associated files"
def _del(args):
if len(args) != 1:
raise CommandError, "del requires a model name as an argument"
models = HPChordLabeler.list_models()
model_name = args[0]
if model_name not in models:
print >>sys.stderr, "No model %s" % model_name
model = _load_model(model_name)
print "Deleting %s" % model_name
model.delete()
params_help = "Outputs the model's parameters in a human-readable format "\
"(not available for all model types)"
def _params(args):
if len(args) == 0:
raise CommandError, "params requires a model name as an argument"
try:
model = _load_model(args[0])
except ModelLoadError, err:
print >>sys.stderr, "No model %s\n" % (args[0])
raise err
print "Model parameters"
print "================"
print model.readable_parameters
# Add commands by adding an entry to this dictionary
# The key is the command name
# The value is a tuple of a function to call and the help text for the command
commands = {
'list' : (_list, list_help),
'desc' : (_desc, desc_help),
'del' : (_del, del_help),
'params' : (_params, params_help),
}
all_commands = commands.keys() + ['help']
try:
if command == "help":
if len(command_args) == 0:
print "Available commands: %s" % ", ".join(all_commands)
print "Use 'help' followed by the command name to get command-specific usage."
sys.exit(0)
elif len(command_args) > 1:
raise CommandError, "to get command help, use the command 'help' followed by the command name"
if command_args[0] not in commands:
raise CommandError, "unknown command '%s'. Available commands are: %s" % \
(command_args[0], ", ".join(all_commands))
# Print out the help text given for the command
print "Help for command '%s':" % command_args[0]
print commands[command_args[0]][1]
sys.exit(0)
elif command in commands:
# Run the command
commands[command][0](command_args)
else:
# The command wasn't found in our defined commands
raise CommandError, "unknown command '%s'. Available "\
"commands are: %s" % (command, ", ".join(all_commands))
except CommandError, err:
print "Error running command: %s" % err
sys.exit(1)
except ModelLoadError, err:
print "Error loading the model: %s" % err
sys.exit(1)
class CommandError(Exception):
pass
if __name__ == "__main__":
main()
| markgw/jazzparser | bin/models/chordlabel/admin.py | Python | gpl-3.0 | 5,587 | 0.00698 |
print("Salutations, Universe!")
| KuttKatrea/sublime-toolrunner | test/print.py | Python | mit | 32 | 0 |
class Solution:
def moveZeroes(nums):
j=1
for i in range(len(nums)-1):
if nums[i] == 0:
while j < len(nums):
if nums[j] == 0:
j+=1
else:
#swap
nums[i], nums[j] = nums[j], 0
break
return nums
print(Solution.moveZeroes([0,1,2,3])) | shobhitmishra/CodingProblems | LeetCode/Session3/testMoveZero.py | Python | mit | 422 | 0.021327 |
# Generated by Django 1.11.2 on 2017-08-08 03:08
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [("institutions", "0010_auto_20170808_0252")]
operations = []
| watchdogpolska/feder | feder/institutions/migrations/0011_auto_20170808_0308.py | Python | mit | 211 | 0 |
#
import os
import unittest
class TestBase(unittest.TestCase):
@classmethod
def datafilename(cls, name):
fname = os.path.join(
os.path.dirname(__file__),
"data",
name,
)
assert os.path.exists(fname)
return fname
| deets/pyCamBam | tests/base.py | Python | gpl-2.0 | 295 | 0.00678 |
import wash
class Site(wash.Site):
def __init__(self, *args, **kwargs):
super(Site, self).__init__(*args, **kwargs)
self.court_id = self.__module__
self.courtLevel = 'C'
self.pubStatus = 'PUB'
self._set_parameters()
def _get_case_names(self):
path = "{base}/td[4]/text()".format(base=self.base)
return list(self.html.xpath(path))
| Andr3iC/juriscraper | opinions/united_states/state/washctapp_p.py | Python | bsd-2-clause | 397 | 0 |
#!/usr/bin/python3
# This code is available for use under CC0 (Creative Commons 0 - universal).
# You can copy, modify, distribute and perform the work, even for commercial
# purposes, all without asking permission. For more information, see LICENSE.md or
# https://creativecommons.org/publicdomain/zero/1.0/
# usage:
# opts = Picker(
# title = 'Delete all files',
# options = ["Yes", "No"]
# ).getSelected()
# returns a simple list
# cancel returns False
import curses
import shutil
import signal
from curses import wrapper
class Picker:
"""Allows you to select from a list with curses"""
stdscr = None
win = None
title = ""
arrow = ""
footer = ""
more = ""
c_selected = ""
c_empty = ""
cursor = 0
offset = 0
selected = 0
selcount = 0
aborted = False
window_height = shutil.get_terminal_size().lines - 10
window_width = shutil.get_terminal_size().columns - 20
all_options = []
length = 0
def curses_start(self):
self.stdscr = curses.initscr()
curses.noecho()
curses.cbreak()
self.win = curses.newwin(
5 + self.window_height,
self.window_width,
2,
4
)
def sigwinch_handler(self, n, frame):
self.window_height = shutil.get_terminal_size().lines - 10
self.window_width = shutil.get_terminal_size().columns - 20
curses.endwin()
self.stdscr.clear()
self.stdscr = curses.initscr()
self.win = curses.newwin(
5 + self.window_height,
self.window_width,
2,
4
)
def curses_stop(self):
curses.nocbreak()
self.stdscr.keypad(0)
curses.echo()
curses.endwin()
def getSelected(self):
if self.aborted == True:
return( False )
ret_s = [x for x in self.all_options if x["selected"]]
ret = [x["label"] for x in ret_s]
return(ret)
def redraw(self):
self.win.clear()
self.win.border(
self.border[0], self.border[1],
self.border[2], self.border[3],
self.border[4], self.border[5],
self.border[6], self.border[7]
)
self.win.addstr(
self.window_height + 4, 5, " " + self.footer + " "
)
position = 0
range = self.all_options[self.offset:self.offset+self.window_height+1]
for option in range:
if option["selected"] == True:
line_label = self.c_selected + " "
else:
line_label = self.c_empty + " "
if len(option["label"]) > (self.window_width - 20):
reduced = option["label"][:self.window_width - 20] + "..."
else:
reduced = option["label"]
self.win.addstr(position + 2, 5, line_label + reduced)
position = position + 1
# hint for more content above
if self.offset > 0:
self.win.addstr(1, 5, self.more)
# hint for more content below
if self.offset + self.window_height <= self.length - 2:
self.win.addstr(self.window_height + 3, 5, self.more)
self.win.addstr(0, 5, " " + self.title + " ")
self.win.addstr(
0, self.window_width - 8,
" " + str(self.selcount) + "/" + str(self.length) + " "
)
self.win.addstr(self.cursor + 2,1, self.arrow)
self.win.refresh()
def check_cursor_up(self):
if self.cursor < 0:
self.cursor = 0
if self.offset > 0:
self.offset = self.offset - 1
def check_cursor_down(self):
if self.cursor >= self.length:
self.cursor = self.cursor - 1
if self.cursor > self.window_height:
self.cursor = self.window_height
self.offset = self.offset + 1
if self.offset + self.cursor >= self.length:
self.offset = self.offset - 1
def curses_loop(self, stdscr):
while 1:
self.redraw()
c = stdscr.getch()
if c == ord('q') or c == ord('Q'):
self.aborted = True
break
elif c == curses.KEY_UP:
self.cursor = self.cursor - 1
elif c == curses.KEY_DOWN:
self.cursor = self.cursor + 1
#elif c == curses.KEY_PPAGE:
#elif c == curses.KEY_NPAGE:
elif c == ord(' '):
self.all_options[self.selected]["selected"] = \
not self.all_options[self.selected]["selected"]
elif c == 10:
break
# deal with interaction limits
self.check_cursor_up()
self.check_cursor_down()
# compute selected position only after dealing with limits
self.selected = self.cursor + self.offset
temp = self.getSelected()
self.selcount = len(temp)
def __init__(self, options, title='Select', arrow="-->",
footer="Space = toggle, Enter = accept, q = cancel",
more="...", border="||--++++", c_selected="[X]", c_empty="[ ]", checked="[ ]"):
self.title = title
self.arrow = arrow
self.footer = footer
self.more = more
self.border = border
self.c_selected = c_selected
self.c_empty = c_empty
self.all_options = []
for option in options:
self.all_options.append({
"label": option,
"selected": True if (option in checked) else False
})
self.length = len(self.all_options)
self.curses_start()
signal.signal(signal.SIGWINCH, self.sigwinch_handler)
curses.wrapper( self.curses_loop )
self.curses_stop()
| MSchuwalow/StudDP | studdp/picker.py | Python | mit | 5,867 | 0.002045 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.