repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringclasses 990
values | size
stringlengths 4
7
| content
stringlengths 666
1.03M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,297,778B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
hosseinmh/Django_learning | djmod/.venv/lib/python3.5/site-packages/setuptools/site-patch.py | 356 | 2307 | def __boot():
import sys
import os
PYTHONPATH = os.environ.get('PYTHONPATH')
if PYTHONPATH is None or (sys.platform == 'win32' and not PYTHONPATH):
PYTHONPATH = []
else:
PYTHONPATH = PYTHONPATH.split(os.pathsep)
pic = getattr(sys, 'path_importer_cache', {})
stdpath = sys.path[len(PYTHONPATH):]
mydir = os.path.dirname(__file__)
for item in stdpath:
if item == mydir or not item:
continue # skip if current dir. on Windows, or my own directory
importer = pic.get(item)
if importer is not None:
loader = importer.find_module('site')
if loader is not None:
# This should actually reload the current module
loader.load_module('site')
break
else:
try:
import imp # Avoid import loop in Python >= 3.3
stream, path, descr = imp.find_module('site', [item])
except ImportError:
continue
if stream is None:
continue
try:
# This should actually reload the current module
imp.load_module('site', stream, path, descr)
finally:
stream.close()
break
else:
raise ImportError("Couldn't find the real 'site' module")
known_paths = dict([(makepath(item)[1], 1) for item in sys.path]) # 2.2 comp
oldpos = getattr(sys, '__egginsert', 0) # save old insertion position
sys.__egginsert = 0 # and reset the current one
for item in PYTHONPATH:
addsitedir(item)
sys.__egginsert += oldpos # restore effective old position
d, nd = makepath(stdpath[0])
insert_at = None
new_path = []
for item in sys.path:
p, np = makepath(item)
if np == nd and insert_at is None:
# We've hit the first 'system' path entry, so added entries go here
insert_at = len(new_path)
if np in known_paths or insert_at is None:
new_path.append(item)
else:
# new path after the insert point, back-insert it
new_path.insert(insert_at, item)
insert_at += 1
sys.path[:] = new_path
if __name__ == 'site':
__boot()
del __boot
| mit | -7,169,448,315,799,882,000 | 30.175676 | 81 | 0.550065 | false |
eayunstack/horizon | openstack_dashboard/dashboards/identity/projects/workflows.py | 6 | 37160 | # Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf import settings
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from openstack_auth import utils as auth_utils
from horizon import exceptions
from horizon import forms
from horizon import messages
from horizon.utils import memoized
from horizon import workflows
from openstack_dashboard import api
from openstack_dashboard.api import base
from openstack_dashboard.api import cinder
from openstack_dashboard.api import keystone
from openstack_dashboard.api import nova
from openstack_dashboard.usage import quotas
INDEX_URL = "horizon:identity:projects:index"
ADD_USER_URL = "horizon:identity:projects:create_user"
PROJECT_GROUP_ENABLED = keystone.VERSIONS.active >= 3
PROJECT_USER_MEMBER_SLUG = "update_members"
PROJECT_GROUP_MEMBER_SLUG = "update_group_members"
COMMON_HORIZONTAL_TEMPLATE = "identity/projects/_common_horizontal_form.html"
class ProjectQuotaAction(workflows.Action):
ifcb_label = _("Injected File Content (Bytes)")
metadata_items = forms.IntegerField(min_value=-1,
label=_("Metadata Items"))
cores = forms.IntegerField(min_value=-1, label=_("VCPUs"))
instances = forms.IntegerField(min_value=-1, label=_("Instances"))
injected_files = forms.IntegerField(min_value=-1,
label=_("Injected Files"))
injected_file_content_bytes = forms.IntegerField(min_value=-1,
label=ifcb_label)
volumes = forms.IntegerField(min_value=-1, label=_("Volumes"))
snapshots = forms.IntegerField(min_value=-1, label=_("Volume Snapshots"))
gigabytes = forms.IntegerField(
min_value=-1, label=_("Total Size of Volumes and Snapshots (GB)"))
ram = forms.IntegerField(min_value=-1, label=_("RAM (MB)"))
floating_ips = forms.IntegerField(min_value=-1, label=_("Floating IPs"))
fixed_ips = forms.IntegerField(min_value=-1, label=_("Fixed IPs"))
security_groups = forms.IntegerField(min_value=-1,
label=_("Security Groups"))
security_group_rules = forms.IntegerField(min_value=-1,
label=_("Security Group Rules"))
# Neutron
security_group = forms.IntegerField(min_value=-1,
label=_("Security Groups"))
security_group_rule = forms.IntegerField(min_value=-1,
label=_("Security Group Rules"))
floatingip = forms.IntegerField(min_value=-1, label=_("Floating IPs"))
network = forms.IntegerField(min_value=-1, label=_("Networks"))
port = forms.IntegerField(min_value=-1, label=_("Ports"))
router = forms.IntegerField(min_value=-1, label=_("Routers"))
subnet = forms.IntegerField(min_value=-1, label=_("Subnets"))
def __init__(self, request, *args, **kwargs):
super(ProjectQuotaAction, self).__init__(request,
*args,
**kwargs)
disabled_quotas = quotas.get_disabled_quotas(request)
for field in disabled_quotas:
if field in self.fields:
self.fields[field].required = False
self.fields[field].widget = forms.HiddenInput()
class UpdateProjectQuotaAction(ProjectQuotaAction):
def clean(self):
cleaned_data = super(UpdateProjectQuotaAction, self).clean()
usages = quotas.tenant_quota_usages(
self.request, tenant_id=self.initial['project_id'])
# Validate the quota values before updating quotas.
bad_values = []
for key, value in cleaned_data.items():
used = usages[key].get('used', 0)
if value is not None and value >= 0 and used > value:
bad_values.append(_('%(used)s %(key)s used') %
{'used': used,
'key': quotas.QUOTA_NAMES.get(key, key)})
if bad_values:
value_str = ", ".join(bad_values)
msg = (_('Quota value(s) cannot be less than the current usage '
'value(s): %s.') %
value_str)
raise forms.ValidationError(msg)
return cleaned_data
class Meta(object):
name = _("Quota")
slug = 'update_quotas'
help_text = _("Set maximum quotas for the project.")
class CreateProjectQuotaAction(ProjectQuotaAction):
class Meta(object):
name = _("Quota")
slug = 'create_quotas'
help_text = _("Set maximum quotas for the project.")
class UpdateProjectQuota(workflows.Step):
action_class = UpdateProjectQuotaAction
template_name = COMMON_HORIZONTAL_TEMPLATE
depends_on = ("project_id",)
contributes = quotas.QUOTA_FIELDS
class CreateProjectQuota(workflows.Step):
action_class = CreateProjectQuotaAction
template_name = COMMON_HORIZONTAL_TEMPLATE
depends_on = ("project_id",)
contributes = quotas.QUOTA_FIELDS
class CreateProjectInfoAction(workflows.Action):
# Hide the domain_id and domain_name by default
domain_id = forms.CharField(label=_("Domain ID"),
required=False,
widget=forms.HiddenInput())
domain_name = forms.CharField(label=_("Domain Name"),
required=False,
widget=forms.HiddenInput())
name = forms.CharField(label=_("Name"),
max_length=64)
description = forms.CharField(widget=forms.widgets.Textarea(
attrs={'rows': 4}),
label=_("Description"),
required=False)
enabled = forms.BooleanField(label=_("Enabled"),
required=False,
initial=True)
def __init__(self, request, *args, **kwargs):
super(CreateProjectInfoAction, self).__init__(request,
*args,
**kwargs)
# For keystone V3, display the two fields in read-only
if keystone.VERSIONS.active >= 3:
readonlyInput = forms.TextInput(attrs={'readonly': 'readonly'})
self.fields["domain_id"].widget = readonlyInput
self.fields["domain_name"].widget = readonlyInput
class Meta(object):
name = _("Project Information")
help_text = _("Create a project to organize users.")
class CreateProjectInfo(workflows.Step):
action_class = CreateProjectInfoAction
template_name = COMMON_HORIZONTAL_TEMPLATE
contributes = ("domain_id",
"domain_name",
"project_id",
"name",
"description",
"enabled")
class UpdateProjectMembersAction(workflows.MembershipAction):
def __init__(self, request, *args, **kwargs):
super(UpdateProjectMembersAction, self).__init__(request,
*args,
**kwargs)
err_msg = _('Unable to retrieve user list. Please try again later.')
# Use the domain_id from the project
domain_id = self.initial.get("domain_id", None)
project_id = ''
if 'project_id' in self.initial:
project_id = self.initial['project_id']
# Get the default role
try:
default_role = api.keystone.get_default_role(self.request)
# Default role is necessary to add members to a project
if default_role is None:
default = getattr(settings,
"OPENSTACK_KEYSTONE_DEFAULT_ROLE", None)
msg = (_('Could not find default role "%s" in Keystone') %
default)
raise exceptions.NotFound(msg)
except Exception:
exceptions.handle(self.request,
err_msg,
redirect=reverse(INDEX_URL))
default_role_name = self.get_default_role_field_name()
self.fields[default_role_name] = forms.CharField(required=False)
self.fields[default_role_name].initial = default_role.id
# Get list of available users
all_users = []
try:
all_users = api.keystone.user_list(request,
domain=domain_id)
except Exception:
exceptions.handle(request, err_msg)
users_list = [(user.id, user.name) for user in all_users]
# Get list of roles
role_list = []
try:
role_list = api.keystone.role_list(request)
except Exception:
exceptions.handle(request,
err_msg,
redirect=reverse(INDEX_URL))
for role in role_list:
field_name = self.get_member_field_name(role.id)
label = role.name
self.fields[field_name] = forms.MultipleChoiceField(required=False,
label=label)
self.fields[field_name].choices = users_list
self.fields[field_name].initial = []
# Figure out users & roles
if project_id:
try:
users_roles = api.keystone.get_project_users_roles(request,
project_id)
except Exception:
exceptions.handle(request,
err_msg,
redirect=reverse(INDEX_URL))
for user_id in users_roles:
roles_ids = users_roles[user_id]
for role_id in roles_ids:
field_name = self.get_member_field_name(role_id)
self.fields[field_name].initial.append(user_id)
class Meta(object):
name = _("Project Members")
slug = PROJECT_USER_MEMBER_SLUG
class UpdateProjectMembers(workflows.UpdateMembersStep):
action_class = UpdateProjectMembersAction
available_list_title = _("All Users")
members_list_title = _("Project Members")
no_available_text = _("No users found.")
no_members_text = _("No users.")
def contribute(self, data, context):
if data:
try:
roles = api.keystone.role_list(self.workflow.request)
except Exception:
exceptions.handle(self.workflow.request,
_('Unable to retrieve user list.'))
post = self.workflow.request.POST
for role in roles:
field = self.get_member_field_name(role.id)
context[field] = post.getlist(field)
return context
class UpdateProjectGroupsAction(workflows.MembershipAction):
def __init__(self, request, *args, **kwargs):
super(UpdateProjectGroupsAction, self).__init__(request,
*args,
**kwargs)
err_msg = _('Unable to retrieve group list. Please try again later.')
# Use the domain_id from the project
domain_id = self.initial.get("domain_id", None)
project_id = ''
if 'project_id' in self.initial:
project_id = self.initial['project_id']
# Get the default role
try:
default_role = api.keystone.get_default_role(self.request)
# Default role is necessary to add members to a project
if default_role is None:
default = getattr(settings,
"OPENSTACK_KEYSTONE_DEFAULT_ROLE", None)
msg = (_('Could not find default role "%s" in Keystone') %
default)
raise exceptions.NotFound(msg)
except Exception:
exceptions.handle(self.request,
err_msg,
redirect=reverse(INDEX_URL))
default_role_name = self.get_default_role_field_name()
self.fields[default_role_name] = forms.CharField(required=False)
self.fields[default_role_name].initial = default_role.id
# Get list of available groups
all_groups = []
try:
all_groups = api.keystone.group_list(request,
domain=domain_id)
except Exception:
exceptions.handle(request, err_msg)
groups_list = [(group.id, group.name) for group in all_groups]
# Get list of roles
role_list = []
try:
role_list = api.keystone.role_list(request)
except Exception:
exceptions.handle(request,
err_msg,
redirect=reverse(INDEX_URL))
for role in role_list:
field_name = self.get_member_field_name(role.id)
label = role.name
self.fields[field_name] = forms.MultipleChoiceField(required=False,
label=label)
self.fields[field_name].choices = groups_list
self.fields[field_name].initial = []
# Figure out groups & roles
if project_id:
try:
groups_roles = api.keystone.get_project_groups_roles(
request, project_id)
except Exception:
exceptions.handle(request,
err_msg,
redirect=reverse(INDEX_URL))
for group_id in groups_roles:
roles_ids = groups_roles[group_id]
for role_id in roles_ids:
field_name = self.get_member_field_name(role_id)
self.fields[field_name].initial.append(group_id)
class Meta(object):
name = _("Project Groups")
slug = PROJECT_GROUP_MEMBER_SLUG
class UpdateProjectGroups(workflows.UpdateMembersStep):
action_class = UpdateProjectGroupsAction
available_list_title = _("All Groups")
members_list_title = _("Project Groups")
no_available_text = _("No groups found.")
no_members_text = _("No groups.")
def contribute(self, data, context):
if data:
try:
roles = api.keystone.role_list(self.workflow.request)
except Exception:
exceptions.handle(self.workflow.request,
_('Unable to retrieve role list.'))
post = self.workflow.request.POST
for role in roles:
field = self.get_member_field_name(role.id)
context[field] = post.getlist(field)
return context
class CommonQuotaWorkflow(workflows.Workflow):
def _update_project_quota(self, request, data, project_id):
# Update the project quota.
nova_data = dict(
[(key, data[key]) for key in quotas.NOVA_QUOTA_FIELDS])
nova.tenant_quota_update(request, project_id, **nova_data)
if base.is_service_enabled(request, 'volume'):
cinder_data = dict([(key, data[key]) for key in
quotas.CINDER_QUOTA_FIELDS])
cinder.tenant_quota_update(request,
project_id,
**cinder_data)
if api.base.is_service_enabled(request, 'network') and \
api.neutron.is_quotas_extension_supported(request):
neutron_data = {}
disabled_quotas = quotas.get_disabled_quotas(request)
for key in quotas.NEUTRON_QUOTA_FIELDS:
if key not in disabled_quotas:
neutron_data[key] = data[key]
api.neutron.tenant_quota_update(request,
project_id,
**neutron_data)
class CreateProject(CommonQuotaWorkflow):
slug = "create_project"
name = _("Create Project")
finalize_button_name = _("Create Project")
success_message = _('Created new project "%s".')
failure_message = _('Unable to create project "%s".')
success_url = "horizon:identity:projects:index"
default_steps = (CreateProjectInfo,
UpdateProjectMembers,
CreateProjectQuota)
def __init__(self, request=None, context_seed=None, entry_point=None,
*args, **kwargs):
if PROJECT_GROUP_ENABLED:
self.default_steps = (CreateProjectInfo,
UpdateProjectMembers,
UpdateProjectGroups,
CreateProjectQuota)
super(CreateProject, self).__init__(request=request,
context_seed=context_seed,
entry_point=entry_point,
*args,
**kwargs)
def format_status_message(self, message):
return message % self.context.get('name', 'unknown project')
def _create_project(self, request, data):
# create the project
domain_id = data['domain_id']
try:
desc = data['description']
self.object = api.keystone.tenant_create(request,
name=data['name'],
description=desc,
enabled=data['enabled'],
domain=domain_id)
return self.object
except Exception:
exceptions.handle(request, ignore=True)
return
def _update_project_members(self, request, data, project_id):
# update project members
users_to_add = 0
try:
available_roles = api.keystone.role_list(request)
member_step = self.get_step(PROJECT_USER_MEMBER_SLUG)
# count how many users are to be added
for role in available_roles:
field_name = member_step.get_member_field_name(role.id)
role_list = data[field_name]
users_to_add += len(role_list)
# add new users to project
for role in available_roles:
field_name = member_step.get_member_field_name(role.id)
role_list = data[field_name]
users_added = 0
for user in role_list:
api.keystone.add_tenant_user_role(request,
project=project_id,
user=user,
role=role.id)
users_added += 1
users_to_add -= users_added
except Exception:
if PROJECT_GROUP_ENABLED:
group_msg = _(", add project groups")
else:
group_msg = ""
exceptions.handle(request,
_('Failed to add %(users_to_add)s project '
'members%(group_msg)s and set project quotas.')
% {'users_to_add': users_to_add,
'group_msg': group_msg})
finally:
auth_utils.remove_project_cache(request.user.token.id)
def _update_project_groups(self, request, data, project_id):
# update project groups
groups_to_add = 0
try:
available_roles = api.keystone.role_list(request)
member_step = self.get_step(PROJECT_GROUP_MEMBER_SLUG)
# count how many groups are to be added
for role in available_roles:
field_name = member_step.get_member_field_name(role.id)
role_list = data[field_name]
groups_to_add += len(role_list)
# add new groups to project
for role in available_roles:
field_name = member_step.get_member_field_name(role.id)
role_list = data[field_name]
groups_added = 0
for group in role_list:
api.keystone.add_group_role(request,
role=role.id,
group=group,
project=project_id)
groups_added += 1
groups_to_add -= groups_added
except Exception:
exceptions.handle(request,
_('Failed to add %s project groups '
'and update project quotas.')
% groups_to_add)
def _update_project_quota(self, request, data, project_id):
try:
super(CreateProject, self)._update_project_quota(
request, data, project_id)
except Exception:
exceptions.handle(request, _('Unable to set project quotas.'))
def handle(self, request, data):
project = self._create_project(request, data)
if not project:
return False
project_id = project.id
self._update_project_members(request, data, project_id)
if PROJECT_GROUP_ENABLED:
self._update_project_groups(request, data, project_id)
self._update_project_quota(request, data, project_id)
return True
class UpdateProjectInfoAction(CreateProjectInfoAction):
enabled = forms.BooleanField(required=False, label=_("Enabled"))
def __init__(self, request, initial, *args, **kwargs):
super(UpdateProjectInfoAction, self).__init__(
request, initial, *args, **kwargs)
if initial['project_id'] == request.user.project_id:
self.fields['enabled'].widget.attrs['disabled'] = True
self.fields['enabled'].help_text = _(
'You cannot disable your current project')
def clean(self):
cleaned_data = super(UpdateProjectInfoAction, self).clean()
# NOTE(tsufiev): in case the current project is being edited, its
# 'enabled' field is disabled to prevent changing the field value
# which is always `True` for the current project (because the user
# logged in it). Since Django treats disabled checkbox as providing
# `False` value even if its initial value is `True`, we need to
# restore the original `True` value of 'enabled' field here.
if self.fields['enabled'].widget.attrs.get('disabled', False):
cleaned_data['enabled'] = True
return cleaned_data
class Meta(object):
name = _("Project Information")
slug = 'update_info'
help_text = _("Edit the project details.")
class UpdateProjectInfo(workflows.Step):
action_class = UpdateProjectInfoAction
template_name = COMMON_HORIZONTAL_TEMPLATE
depends_on = ("project_id",)
contributes = ("domain_id",
"domain_name",
"name",
"description",
"enabled")
class UpdateProject(CommonQuotaWorkflow):
slug = "update_project"
name = _("Edit Project")
finalize_button_name = _("Save")
success_message = _('Modified project "%s".')
failure_message = _('Unable to modify project "%s".')
success_url = "horizon:identity:projects:index"
default_steps = (UpdateProjectInfo,
UpdateProjectMembers,
UpdateProjectQuota)
def __init__(self, request=None, context_seed=None, entry_point=None,
*args, **kwargs):
if PROJECT_GROUP_ENABLED:
self.default_steps = (UpdateProjectInfo,
UpdateProjectMembers,
UpdateProjectGroups,
UpdateProjectQuota)
super(UpdateProject, self).__init__(request=request,
context_seed=context_seed,
entry_point=entry_point,
*args,
**kwargs)
def format_status_message(self, message):
return message % self.context.get('name', 'unknown project')
@memoized.memoized_method
def _get_available_roles(self, request):
return api.keystone.role_list(request)
def _update_project(self, request, data):
# update project info
try:
project_id = data['project_id']
return api.keystone.tenant_update(
request,
project_id,
name=data['name'],
description=data['description'],
enabled=data['enabled'])
except Exception:
exceptions.handle(request, ignore=True)
return
def _add_roles_to_users(self, request, data, project_id, user_id,
role_ids, available_roles):
member_step = self.get_step(PROJECT_USER_MEMBER_SLUG)
current_role_ids = list(role_ids)
for role in available_roles:
field_name = member_step.get_member_field_name(role.id)
# Check if the user is in the list of users with this role.
if user_id in data[field_name]:
# Add it if necessary
if role.id not in current_role_ids:
# user role has changed
api.keystone.add_tenant_user_role(
request,
project=project_id,
user=user_id,
role=role.id)
else:
# User role is unchanged, so remove it from the
# remaining roles list to avoid removing it later.
index = current_role_ids.index(role.id)
current_role_ids.pop(index)
return current_role_ids
def _remove_roles_from_user(self, request, project_id, user_id,
current_role_ids):
for id_to_delete in current_role_ids:
api.keystone.remove_tenant_user_role(
request,
project=project_id,
user=user_id,
role=id_to_delete)
def _is_removing_self_admin_role(self, request, project_id, user_id,
available_roles, current_role_ids):
is_current_user = user_id == request.user.id
is_current_project = project_id == request.user.tenant_id
available_admin_role_ids = [role.id for role in available_roles
if role.name.lower() == 'admin']
admin_roles = [role for role in current_role_ids
if role in available_admin_role_ids]
if len(admin_roles):
removing_admin = any([role in current_role_ids
for role in admin_roles])
else:
removing_admin = False
if is_current_user and is_current_project and removing_admin:
# Cannot remove "admin" role on current(admin) project
msg = _('You cannot revoke your administrative privileges '
'from the project you are currently logged into. '
'Please switch to another project with '
'administrative privileges or remove the '
'administrative role manually via the CLI.')
messages.warning(request, msg)
return True
else:
return False
def _update_project_members(self, request, data, project_id):
# update project members
users_to_modify = 0
# Project-user member step
member_step = self.get_step(PROJECT_USER_MEMBER_SLUG)
try:
# Get our role options
available_roles = self._get_available_roles(request)
# Get the users currently associated with this project so we
# can diff against it.
users_roles = api.keystone.get_project_users_roles(
request, project=project_id)
users_to_modify = len(users_roles)
for user_id in users_roles.keys():
# Check if there have been any changes in the roles of
# Existing project members.
current_role_ids = list(users_roles[user_id])
modified_role_ids = self._add_roles_to_users(
request, data, project_id, user_id,
current_role_ids, available_roles)
# Prevent admins from doing stupid things to themselves.
removing_admin = self._is_removing_self_admin_role(
request, project_id, user_id, available_roles,
modified_role_ids)
# Otherwise go through and revoke any removed roles.
if not removing_admin:
self._remove_roles_from_user(request, project_id, user_id,
modified_role_ids)
users_to_modify -= 1
# Grant new roles on the project.
for role in available_roles:
field_name = member_step.get_member_field_name(role.id)
# Count how many users may be added for exception handling.
users_to_modify += len(data[field_name])
for role in available_roles:
users_added = 0
field_name = member_step.get_member_field_name(role.id)
for user_id in data[field_name]:
if user_id not in users_roles:
api.keystone.add_tenant_user_role(request,
project=project_id,
user=user_id,
role=role.id)
users_added += 1
users_to_modify -= users_added
return True
except Exception:
if PROJECT_GROUP_ENABLED:
group_msg = _(", update project groups")
else:
group_msg = ""
exceptions.handle(request,
_('Failed to modify %(users_to_modify)s'
' project members%(group_msg)s and '
'update project quotas.')
% {'users_to_modify': users_to_modify,
'group_msg': group_msg})
return False
finally:
auth_utils.remove_project_cache(request.user.token.id)
def _update_project_groups(self, request, data, project_id, domain_id):
# update project groups
groups_to_modify = 0
member_step = self.get_step(PROJECT_GROUP_MEMBER_SLUG)
try:
available_roles = self._get_available_roles(request)
# Get the groups currently associated with this project so we
# can diff against it.
project_groups = api.keystone.group_list(request,
domain=domain_id,
project=project_id)
groups_to_modify = len(project_groups)
for group in project_groups:
# Check if there have been any changes in the roles of
# Existing project members.
current_roles = api.keystone.roles_for_group(
self.request,
group=group.id,
project=project_id)
current_role_ids = [role.id for role in current_roles]
for role in available_roles:
# Check if the group is in the list of groups with
# this role.
field_name = member_step.get_member_field_name(role.id)
if group.id in data[field_name]:
# Add it if necessary
if role.id not in current_role_ids:
# group role has changed
api.keystone.add_group_role(
request,
role=role.id,
group=group.id,
project=project_id)
else:
# Group role is unchanged, so remove it from
# the remaining roles list to avoid removing it
# later.
index = current_role_ids.index(role.id)
current_role_ids.pop(index)
# Revoke any removed roles.
for id_to_delete in current_role_ids:
api.keystone.remove_group_role(request,
role=id_to_delete,
group=group.id,
project=project_id)
groups_to_modify -= 1
# Grant new roles on the project.
for role in available_roles:
field_name = member_step.get_member_field_name(role.id)
# Count how many groups may be added for error handling.
groups_to_modify += len(data[field_name])
for role in available_roles:
groups_added = 0
field_name = member_step.get_member_field_name(role.id)
for group_id in data[field_name]:
if not filter(lambda x: group_id == x.id,
project_groups):
api.keystone.add_group_role(request,
role=role.id,
group=group_id,
project=project_id)
groups_added += 1
groups_to_modify -= groups_added
return True
except Exception:
exceptions.handle(request,
_('Failed to modify %s project '
'members, update project groups '
'and update project quotas.')
% groups_to_modify)
return False
def _update_project_quota(self, request, data, project_id):
try:
super(UpdateProject, self)._update_project_quota(
request, data, project_id)
return True
except Exception:
exceptions.handle(request, _('Modified project information and '
'members, but unable to modify '
'project quotas.'))
return False
def handle(self, request, data):
# FIXME(gabriel): This should be refactored to use Python's built-in
# sets and do this all in a single "roles to add" and "roles to remove"
# pass instead of the multi-pass thing happening now.
project = self._update_project(request, data)
if not project:
return False
project_id = data['project_id']
# Use the domain_id from the project if available
domain_id = getattr(project, "domain_id", '')
ret = self._update_project_members(request, data, project_id)
if not ret:
return False
if PROJECT_GROUP_ENABLED:
ret = self._update_project_groups(request, data,
project_id, domain_id)
if not ret:
return False
ret = self._update_project_quota(request, data, project_id)
if not ret:
return False
return True
| apache-2.0 | 1,162,863,208,443,927,600 | 42.615023 | 79 | 0.523924 | false |
savi-dev/nova | nova/scheduler/chance.py | 6 | 4430 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2010 OpenStack, LLC.
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Chance (Random) Scheduler implementation
"""
import random
from nova import exception
from nova import flags
from nova.scheduler import driver
FLAGS = flags.FLAGS
class ChanceScheduler(driver.Scheduler):
"""Implements Scheduler as a random node selector."""
def _filter_hosts(self, request_spec, hosts, filter_properties):
"""Filter a list of hosts based on request_spec."""
ignore_hosts = filter_properties.get('ignore_hosts', [])
hosts = [host for host in hosts if host not in ignore_hosts]
return hosts
def _schedule(self, context, topic, request_spec, filter_properties):
"""Picks a host that is up at random."""
elevated = context.elevated()
hosts = self.hosts_up(elevated, topic)
if not hosts:
msg = _("Is the appropriate service running?")
raise exception.NoValidHost(reason=msg)
hosts = self._filter_hosts(request_spec, hosts, filter_properties)
if not hosts:
msg = _("Could not find another compute")
raise exception.NoValidHost(reason=msg)
return hosts[int(random.random() * len(hosts))]
def schedule_run_instance(self, context, request_spec,
admin_password, injected_files,
requested_networks, is_first_time,
filter_properties):
"""Create and run an instance or instances"""
instance_uuids = request_spec.get('instance_uuids')
for num, instance_uuid in enumerate(instance_uuids):
request_spec['instance_properties']['launch_index'] = num
try:
host = self._schedule(context, 'compute', request_spec,
filter_properties)
updated_instance = driver.instance_update_db(context,
instance_uuid, host)
self.compute_rpcapi.run_instance(context,
instance=updated_instance, host=host,
requested_networks=requested_networks,
injected_files=injected_files,
admin_password=admin_password,
is_first_time=is_first_time,
request_spec=request_spec,
filter_properties=filter_properties)
except Exception as ex:
# NOTE(vish): we don't reraise the exception here to make sure
# that all instances in the request get set to
# error properly
driver.handle_schedule_error(context, ex, instance_uuid,
request_spec)
def schedule_prep_resize(self, context, image, request_spec,
filter_properties, instance, instance_type,
reservations):
"""Select a target for resize."""
host = self._schedule(context, 'compute', request_spec,
filter_properties)
self.compute_rpcapi.prep_resize(context, image, instance,
instance_type, host, reservations)
def schedule_create_volume(self, context, volume_id, snapshot_id,
image_id):
"""Picks a host that is up at random."""
host = self._schedule(context, FLAGS.volume_topic, None, {})
driver.cast_to_host(context, FLAGS.volume_topic, host, 'create_volume',
volume_id=volume_id, snapshot_id=snapshot_id,
image_id=image_id)
| apache-2.0 | 5,369,135,336,323,449,000 | 42.431373 | 79 | 0.595711 | false |
dasseclab/dasseclab | clones/routersploit/tests/exploits/routers/mikrotik/test_winbox_auth_bypass_creds_disclosure.py | 1 | 3375 | from routersploit.modules.exploits.routers.mikrotik.winbox_auth_bypass_creds_disclosure import Exploit
def test_check_success(tcp_target):
command_mock1 = tcp_target.get_command_mock(
b"\x68\x01\x00\x66\x4d\x32\x05\x00\xff\x01\x06\x00\xff\x09\x05\x07"
b"\x00\xff\x09\x07\x01\x00\x00\x21\x35\x2f\x2f\x2f\x2f\x2f\x2e\x2f"
b"\x2e\x2e\x2f\x2f\x2f\x2f\x2f\x2f\x2e\x2f\x2e\x2e\x2f\x2f\x2f\x2f"
b"\x2f\x2f\x2e\x2f\x2e\x2e\x2f\x66\x6c\x61\x73\x68\x2f\x72\x77\x2f"
b"\x73\x74\x6f\x72\x65\x2f\x75\x73\x65\x72\x2e\x64\x61\x74\x02\x00"
b"\xff\x88\x02\x00\x00\x00\x00\x00\x08\x00\x00\x00\x01\x00\xff\x88"
b"\x02\x00\x02\x00\x00\x00\x02\x00\x00\x00"
)
command_mock1.return_value = (
b"\x37\x01\x00\x35\x4d\x32\x01\x00\xff\x88\x02\x00\x00\x00\x00\x00"
b"\x08\x00\x00\x00\x02\x00\xff\x88\x02\x00\x02\x00\x00\x00\x02\x00"
b"\x00\x00\x01\x00\xfe\x09\x1b\x03\x00\xff\x09\x02\x02\x00\x00\x08"
b"\x36\x01\x00\x00\x06\x00\xff\x09\x05"
)
command_mock2 = tcp_target.get_command_mock(
b"\x3b\x01\x00\x39\x4d\x32\x05\x00\xff\x01\x06\x00\xff\x09\x06\x01"
b"\x00\xfe\x09\x1b\x02\x00\x00\x08\x00\x80\x00\x00\x07\x00\xff\x09"
b"\x04\x02\x00\xff\x88\x02\x00\x00\x00\x00\x00\x08\x00\x00\x00\x01"
b"\x00\xff\x88\x02\x00\x02\x00\x00\x00\x02\x00\x00\x00"
)
command_mock2.return_value = (
b"\xff\x01\x01\x68\x4d\x32\x01\x00\xff\x88\x02\x00\x00\x00\x00\x00"
b"\x08\x00\x00\x00\x02\x00\xff\x88\x02\x00\x02\x00\x00\x00\x02\x00"
b"\x00\x00\x04\x00\x00\x01\x03\x00\xff\x09\x02\x06\x00\xff\x09\x06"
b"\x03\x00\x00\x30\x36\x01\x57\x00\x4d\x32\x10\x00\x00\xa8\x00\x00"
b"\x1c\x00\x00\x01\x0a\x00\xfe\x00\x05\x00\x00\x09\x00\x06\x00\x00"
b"\x09\x00\x0b\x00\x00\x08\xfe\xff\x07\x00\x12\x00\x00\x09\x02\x01"
b"\x00\xfe\x09\x02\x02\x00\x00\x09\x03\x09\x00\xfe\x21\x00\x11\x00"
b"\x00\x21\x10\x76\x08\xc6\x04\x66\xa6\x3d\x2a\xb7\xcd\xec\x68\xe2"
b"\x6e\x44\x0e\x01\x00\x00\x21\x05\x75\x73\x65\x72\x31\x6d\x69\x6e"
b"\x6a\x00\x4d\x32\x10\x00\x00\xa8\x00\x00\x1c\x00\x00\x01\x0a\x00"
b"\xfe\x00\x05\x00\x00\x09\x00\x06\x00\x00\x09\x00\x0b\x00\x00\x08"
b"\xfe\xff\x07\x00\x12\x00\x00\x09\x02\x01\x00\xfe\x09\x01\x02\x00"
b"\x00\x09\x03\x09\x00\xfe\x21\x13\x73\x79\x73\x74\x65\x6d\x20\x64"
b"\x65\x66\x61\x75\x6c\x74\x20\x75\x73\x65\x72\x11\x00\x00\x21\x10"
b"\x29\xdb\xb3\x6f\x27\x5a\x0e\x2d\x09\xd5\xfb\x27\xb1\x44\xec\x93"
b"\x01\x00\x00\x21\x05\x61\x64\x6d\x69\x6e\x72\x00\x4d\x32\x10\x00"
b"\x00\x6b\xff\xa8\x00\x00\x1c\x00\x00\x01\x0a\x00\xfe\x00\x05\x00"
b"\x00\x09\x00\x06\x00\x00\x09\x00\x1f\x00\x00\x08\x36\x2b\x35\x5b"
b"\x0b\x00\x00\x08\xfe\xff\x07\x00\x12\x00\x00\x09\x02\x01\x00\xfe"
b"\x09\x01\x02\x00\x00\x09\x03\x09\x00\xfe\x21\x13\x73\x79\x73\x74"
b"\x65\x6d\x20\x64\x65\x66\x61\x75\x6c\x74\x20\x75\x73\x65\x72\x11"
b"\x00\x00\x21\x10\x29\xdb\xb3\x6f\x27\x5a\x0e\x2d\x09\xd5\xfb\x27"
b"\xb1\x44\xec\x93\x01\x00\x00\x21\x05\x61\x64\x6d\x69\x6e"
)
exploit = Exploit()
assert exploit.target == ""
assert exploit.port == 8291
exploit.target = tcp_target.host
exploit.port = tcp_target.port
assert exploit.check()
assert exploit.run() is None
| gpl-2.0 | 680,103,342,078,552,800 | 52.571429 | 102 | 0.647704 | false |
dwks/silvius-backend | kaldigstserver/decoder2.py | 1 | 8962 | """
Created on May 17, 2013
@author: tanel
"""
import gi
gi.require_version('Gst', '1.0')
from gi.repository import GObject, Gst
GObject.threads_init()
Gst.init(None)
import logging
import thread
import os
logger = logging.getLogger(__name__)
import pdb
class DecoderPipeline2(object):
def __init__(self, conf={}):
logger.info("Creating decoder using conf: %s" % conf)
self.create_pipeline(conf)
self.outdir = conf.get("out-dir", None)
if not os.path.exists(self.outdir):
os.makedirs(self.outdir)
elif not os.path.isdir(self.outdir):
raise Exception("Output directory %s already exists as a file" % self.outdir)
self.result_handler = None
self.full_result_handler = None
self.eos_handler = None
self.error_handler = None
self.request_id = "<undefined>"
def create_pipeline(self, conf):
self.appsrc = Gst.ElementFactory.make("appsrc", "appsrc")
self.decodebin = Gst.ElementFactory.make("decodebin", "decodebin")
self.audioconvert = Gst.ElementFactory.make("audioconvert", "audioconvert")
self.audioresample = Gst.ElementFactory.make("audioresample", "audioresample")
self.tee = Gst.ElementFactory.make("tee", "tee")
self.queue1 = Gst.ElementFactory.make("queue", "queue1")
self.filesink = Gst.ElementFactory.make("filesink", "filesink")
self.queue2 = Gst.ElementFactory.make("queue", "queue2")
self.asr = Gst.ElementFactory.make("kaldinnet2onlinedecoder", "asr")
self.fakesink = Gst.ElementFactory.make("fakesink", "fakesink")
# This needs to be set first
if "use-threaded-decoder" in conf["decoder"]:
self.asr.set_property("use-threaded-decoder", conf["decoder"]["use-threaded-decoder"])
decoder_config = conf.get("decoder", {})
if 'nnet-mode' in decoder_config:
logger.info("Setting decoder property: %s = %s" % ('nnet-mode', decoder_config['nnet-mode']))
self.asr.set_property('nnet-mode', decoder_config['nnet-mode'])
del decoder_config['nnet-mode']
for (key, val) in decoder_config.iteritems():
if key != "use-threaded-decoder":
logger.info("Setting decoder property: %s = %s" % (key, val))
self.asr.set_property(key, val)
self.appsrc.set_property("is-live", True)
self.filesink.set_property("location", "/dev/null")
logger.info('Created GStreamer elements')
self.pipeline = Gst.Pipeline()
for element in [self.appsrc, self.decodebin, self.audioconvert, self.audioresample, self.tee,
self.queue1, self.filesink,
self.queue2, self.asr, self.fakesink]:
logger.debug("Adding %s to the pipeline" % element)
self.pipeline.add(element)
logger.info('Linking GStreamer elements')
self.appsrc.link(self.decodebin)
#self.appsrc.link(self.audioconvert)
self.decodebin.connect('pad-added', self._connect_decoder)
self.audioconvert.link(self.audioresample)
self.audioresample.link(self.tee)
self.tee.link(self.queue1)
self.queue1.link(self.filesink)
self.tee.link(self.queue2)
self.queue2.link(self.asr)
self.asr.link(self.fakesink)
# Create bus and connect several handlers
self.bus = self.pipeline.get_bus()
self.bus.add_signal_watch()
self.bus.enable_sync_message_emission()
self.bus.connect('message::eos', self._on_eos)
self.bus.connect('message::error', self._on_error)
#self.bus.connect('message::cutter', self._on_cutter)
self.asr.connect('partial-result', self._on_partial_result)
self.asr.connect('final-result', self._on_final_result)
self.asr.connect('full-final-result', self._on_full_final_result)
logger.info("Setting pipeline to READY")
self.pipeline.set_state(Gst.State.READY)
logger.info("Set pipeline to READY")
def _connect_decoder(self, element, pad):
logger.info("%s: Connecting audio decoder" % self.request_id)
pad.link(self.audioconvert.get_static_pad("sink"))
logger.info("%s: Connected audio decoder" % self.request_id)
def _on_partial_result(self, asr, hyp):
logger.info("%s: Got partial result: %s" % (self.request_id, hyp.decode('utf8')))
if self.result_handler:
self.result_handler(hyp.decode('utf8'), False)
def _on_final_result(self, asr, hyp):
logger.info("%s: Got final result: %s" % (self.request_id, hyp.decode('utf8')))
if self.result_handler:
self.result_handler(hyp.decode('utf8'), True)
def _on_full_final_result(self, asr, result_json):
logger.info("%s: Got full final result: %s" % (self.request_id, result_json.decode('utf8')))
if self.full_result_handler:
self.full_result_handler(result_json)
def _on_error(self, bus, msg):
self.error = msg.parse_error()
logger.error(self.error)
self.finish_request()
if self.error_handler:
self.error_handler(self.error[0].message)
def _on_eos(self, bus, msg):
logger.info('%s: Pipeline received eos signal' % self.request_id)
#self.decodebin.unlink(self.audioconvert)
self.finish_request()
if self.eos_handler:
self.eos_handler[0](self.eos_handler[1])
def get_adaptation_state(self):
return self.asr.get_property("adaptation-state")
def set_adaptation_state(self, adaptation_state):
"""Sets the adaptation state to a certian value, previously retrieved using get_adaptation_state()
Should be called after init_request(..)
"""
return self.asr.set_property("adaptation-state", adaptation_state)
def finish_request(self):
logger.info("%s: Resetting decoder state" % self.request_id)
if self.outdir:
self.filesink.set_state(Gst.State.NULL)
self.filesink.set_property('location', "/dev/null")
self.filesink.set_state(Gst.State.PLAYING)
self.pipeline.set_state(Gst.State.NULL)
self.request_id = "<undefined>"
def init_request(self, id, caps_str):
self.request_id = id
logger.info("%s: Initializing request" % (self.request_id))
if caps_str and len(caps_str) > 0:
logger.info("%s: Setting caps to %s" % (self.request_id, caps_str))
caps = Gst.caps_from_string(caps_str)
self.appsrc.set_property("caps", caps)
else:
#caps = Gst.caps_from_string("")
self.appsrc.set_property("caps", None)
#self.pipeline.set_state(Gst.State.READY)
pass
#self.appsrc.set_state(Gst.State.PAUSED)
if self.outdir:
self.pipeline.set_state(Gst.State.PAUSED)
self.filesink.set_state(Gst.State.NULL)
self.filesink.set_property('location', "%s/%s.raw" % (self.outdir, id))
self.filesink.set_state(Gst.State.PLAYING)
#self.filesink.set_state(Gst.State.PLAYING)
#self.decodebin.set_state(Gst.State.PLAYING)
self.pipeline.set_state(Gst.State.PLAYING)
self.filesink.set_state(Gst.State.PLAYING)
# push empty buffer (to avoid hang on client diconnect)
#buf = Gst.Buffer.new_allocate(None, 0, None)
#self.appsrc.emit("push-buffer", buf)
# reset adaptation state
self.set_adaptation_state("")
def process_data(self, data):
logger.debug('%s: Pushing buffer of size %d to pipeline' % (self.request_id, len(data)))
buf = Gst.Buffer.new_allocate(None, len(data), None)
buf.fill(0, data)
self.appsrc.emit("push-buffer", buf)
logger.debug('%s: Pushing buffer done' % self.request_id)
def end_request(self):
logger.info("%s: Pushing EOS to pipeline" % self.request_id)
self.appsrc.emit("end-of-stream")
def set_result_handler(self, handler):
self.result_handler = handler
def set_full_result_handler(self, handler):
self.full_result_handler = handler
def set_eos_handler(self, handler, user_data=None):
self.eos_handler = (handler, user_data)
def set_error_handler(self, handler):
self.error_handler = handler
def cancel(self):
logger.info("%s: Sending EOS to pipeline in order to cancel processing" % self.request_id)
self.appsrc.emit("end-of-stream")
#self.asr.set_property("silent", True)
#self.pipeline.set_state(Gst.State.NULL)
#if (self.pipeline.get_state() == Gst.State.PLAYING):
#logger.debug("Sending EOS to pipeline")
#self.pipeline.send_event(Gst.Event.new_eos())
#self.pipeline.set_state(Gst.State.READY)
logger.info("%s: Cancelled pipeline" % self.request_id)
| bsd-2-clause | -3,667,906,717,819,400,700 | 37.62931 | 106 | 0.625307 | false |
tklaus/ansible | lib/ansible/playbook/role/metadata.py | 80 | 3201 | # (c) 2014 Michael DeHaan, <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from six import iteritems, string_types
from ansible.errors import AnsibleParserError
from ansible.playbook.attribute import Attribute, FieldAttribute
from ansible.playbook.base import Base
from ansible.playbook.helpers import load_list_of_roles
from ansible.playbook.role.include import RoleInclude
__all__ = ['RoleMetadata']
class RoleMetadata(Base):
'''
This class wraps the parsing and validation of the optional metadata
within each Role (meta/main.yml).
'''
_allow_duplicates = FieldAttribute(isa='bool', default=False)
_dependencies = FieldAttribute(isa='list', default=[])
_galaxy_info = FieldAttribute(isa='GalaxyInfo')
def __init__(self, owner=None):
self._owner = owner
super(RoleMetadata, self).__init__()
@staticmethod
def load(data, owner, variable_manager=None, loader=None):
'''
Returns a new RoleMetadata object based on the datastructure passed in.
'''
if not isinstance(data, dict):
raise AnsibleParserError("the 'meta/main.yml' for role %s is not a dictionary" % owner.get_name())
m = RoleMetadata(owner=owner).load_data(data, variable_manager=variable_manager, loader=loader)
return m
def _load_dependencies(self, attr, ds):
'''
This is a helper loading function for the dependencies list,
which returns a list of RoleInclude objects
'''
if ds is None:
ds = []
current_role_path = None
if self._owner:
current_role_path = os.path.dirname(self._owner._role_path)
return load_list_of_roles(ds, play=self._owner._play, current_role_path=current_role_path, variable_manager=self._variable_manager, loader=self._loader)
def _load_galaxy_info(self, attr, ds):
'''
This is a helper loading function for the galaxy info entry
in the metadata, which returns a GalaxyInfo object rather than
a simple dictionary.
'''
return ds
def serialize(self):
return dict(
allow_duplicates = self._allow_duplicates,
dependencies = self._dependencies,
)
def deserialize(self, data):
setattr(self, 'allow_duplicates', data.get('allow_duplicates', False))
setattr(self, 'dependencies', data.get('dependencies', []))
| gpl-3.0 | -6,819,741,198,179,548,000 | 33.053191 | 160 | 0.676976 | false |
agiliq/nginx-python-buildpack | vendor/pip-1.5.4/pip/_vendor/html5lib/treebuilders/etree.py | 915 | 12621 | from __future__ import absolute_import, division, unicode_literals
from pip._vendor.six import text_type
import re
from . import _base
from .. import ihatexml
from .. import constants
from ..constants import namespaces
from ..utils import moduleFactoryFactory
tag_regexp = re.compile("{([^}]*)}(.*)")
def getETreeBuilder(ElementTreeImplementation, fullTree=False):
ElementTree = ElementTreeImplementation
ElementTreeCommentType = ElementTree.Comment("asd").tag
class Element(_base.Node):
def __init__(self, name, namespace=None):
self._name = name
self._namespace = namespace
self._element = ElementTree.Element(self._getETreeTag(name,
namespace))
if namespace is None:
self.nameTuple = namespaces["html"], self._name
else:
self.nameTuple = self._namespace, self._name
self.parent = None
self._childNodes = []
self._flags = []
def _getETreeTag(self, name, namespace):
if namespace is None:
etree_tag = name
else:
etree_tag = "{%s}%s" % (namespace, name)
return etree_tag
def _setName(self, name):
self._name = name
self._element.tag = self._getETreeTag(self._name, self._namespace)
def _getName(self):
return self._name
name = property(_getName, _setName)
def _setNamespace(self, namespace):
self._namespace = namespace
self._element.tag = self._getETreeTag(self._name, self._namespace)
def _getNamespace(self):
return self._namespace
namespace = property(_getNamespace, _setNamespace)
def _getAttributes(self):
return self._element.attrib
def _setAttributes(self, attributes):
# Delete existing attributes first
# XXX - there may be a better way to do this...
for key in list(self._element.attrib.keys()):
del self._element.attrib[key]
for key, value in attributes.items():
if isinstance(key, tuple):
name = "{%s}%s" % (key[2], key[1])
else:
name = key
self._element.set(name, value)
attributes = property(_getAttributes, _setAttributes)
def _getChildNodes(self):
return self._childNodes
def _setChildNodes(self, value):
del self._element[:]
self._childNodes = []
for element in value:
self.insertChild(element)
childNodes = property(_getChildNodes, _setChildNodes)
def hasContent(self):
"""Return true if the node has children or text"""
return bool(self._element.text or len(self._element))
def appendChild(self, node):
self._childNodes.append(node)
self._element.append(node._element)
node.parent = self
def insertBefore(self, node, refNode):
index = list(self._element).index(refNode._element)
self._element.insert(index, node._element)
node.parent = self
def removeChild(self, node):
self._element.remove(node._element)
node.parent = None
def insertText(self, data, insertBefore=None):
if not(len(self._element)):
if not self._element.text:
self._element.text = ""
self._element.text += data
elif insertBefore is None:
# Insert the text as the tail of the last child element
if not self._element[-1].tail:
self._element[-1].tail = ""
self._element[-1].tail += data
else:
# Insert the text before the specified node
children = list(self._element)
index = children.index(insertBefore._element)
if index > 0:
if not self._element[index - 1].tail:
self._element[index - 1].tail = ""
self._element[index - 1].tail += data
else:
if not self._element.text:
self._element.text = ""
self._element.text += data
def cloneNode(self):
element = type(self)(self.name, self.namespace)
for name, value in self.attributes.items():
element.attributes[name] = value
return element
def reparentChildren(self, newParent):
if newParent.childNodes:
newParent.childNodes[-1]._element.tail += self._element.text
else:
if not newParent._element.text:
newParent._element.text = ""
if self._element.text is not None:
newParent._element.text += self._element.text
self._element.text = ""
_base.Node.reparentChildren(self, newParent)
class Comment(Element):
def __init__(self, data):
# Use the superclass constructor to set all properties on the
# wrapper element
self._element = ElementTree.Comment(data)
self.parent = None
self._childNodes = []
self._flags = []
def _getData(self):
return self._element.text
def _setData(self, value):
self._element.text = value
data = property(_getData, _setData)
class DocumentType(Element):
def __init__(self, name, publicId, systemId):
Element.__init__(self, "<!DOCTYPE>")
self._element.text = name
self.publicId = publicId
self.systemId = systemId
def _getPublicId(self):
return self._element.get("publicId", "")
def _setPublicId(self, value):
if value is not None:
self._element.set("publicId", value)
publicId = property(_getPublicId, _setPublicId)
def _getSystemId(self):
return self._element.get("systemId", "")
def _setSystemId(self, value):
if value is not None:
self._element.set("systemId", value)
systemId = property(_getSystemId, _setSystemId)
class Document(Element):
def __init__(self):
Element.__init__(self, "DOCUMENT_ROOT")
class DocumentFragment(Element):
def __init__(self):
Element.__init__(self, "DOCUMENT_FRAGMENT")
def testSerializer(element):
rv = []
def serializeElement(element, indent=0):
if not(hasattr(element, "tag")):
element = element.getroot()
if element.tag == "<!DOCTYPE>":
if element.get("publicId") or element.get("systemId"):
publicId = element.get("publicId") or ""
systemId = element.get("systemId") or ""
rv.append("""<!DOCTYPE %s "%s" "%s">""" %
(element.text, publicId, systemId))
else:
rv.append("<!DOCTYPE %s>" % (element.text,))
elif element.tag == "DOCUMENT_ROOT":
rv.append("#document")
if element.text is not None:
rv.append("|%s\"%s\"" % (' ' * (indent + 2), element.text))
if element.tail is not None:
raise TypeError("Document node cannot have tail")
if hasattr(element, "attrib") and len(element.attrib):
raise TypeError("Document node cannot have attributes")
elif element.tag == ElementTreeCommentType:
rv.append("|%s<!-- %s -->" % (' ' * indent, element.text))
else:
assert isinstance(element.tag, text_type), \
"Expected unicode, got %s, %s" % (type(element.tag), element.tag)
nsmatch = tag_regexp.match(element.tag)
if nsmatch is None:
name = element.tag
else:
ns, name = nsmatch.groups()
prefix = constants.prefixes[ns]
name = "%s %s" % (prefix, name)
rv.append("|%s<%s>" % (' ' * indent, name))
if hasattr(element, "attrib"):
attributes = []
for name, value in element.attrib.items():
nsmatch = tag_regexp.match(name)
if nsmatch is not None:
ns, name = nsmatch.groups()
prefix = constants.prefixes[ns]
attr_string = "%s %s" % (prefix, name)
else:
attr_string = name
attributes.append((attr_string, value))
for name, value in sorted(attributes):
rv.append('|%s%s="%s"' % (' ' * (indent + 2), name, value))
if element.text:
rv.append("|%s\"%s\"" % (' ' * (indent + 2), element.text))
indent += 2
for child in element:
serializeElement(child, indent)
if element.tail:
rv.append("|%s\"%s\"" % (' ' * (indent - 2), element.tail))
serializeElement(element, 0)
return "\n".join(rv)
def tostring(element):
"""Serialize an element and its child nodes to a string"""
rv = []
filter = ihatexml.InfosetFilter()
def serializeElement(element):
if isinstance(element, ElementTree.ElementTree):
element = element.getroot()
if element.tag == "<!DOCTYPE>":
if element.get("publicId") or element.get("systemId"):
publicId = element.get("publicId") or ""
systemId = element.get("systemId") or ""
rv.append("""<!DOCTYPE %s PUBLIC "%s" "%s">""" %
(element.text, publicId, systemId))
else:
rv.append("<!DOCTYPE %s>" % (element.text,))
elif element.tag == "DOCUMENT_ROOT":
if element.text is not None:
rv.append(element.text)
if element.tail is not None:
raise TypeError("Document node cannot have tail")
if hasattr(element, "attrib") and len(element.attrib):
raise TypeError("Document node cannot have attributes")
for child in element:
serializeElement(child)
elif element.tag == ElementTreeCommentType:
rv.append("<!--%s-->" % (element.text,))
else:
# This is assumed to be an ordinary element
if not element.attrib:
rv.append("<%s>" % (filter.fromXmlName(element.tag),))
else:
attr = " ".join(["%s=\"%s\"" % (
filter.fromXmlName(name), value)
for name, value in element.attrib.items()])
rv.append("<%s %s>" % (element.tag, attr))
if element.text:
rv.append(element.text)
for child in element:
serializeElement(child)
rv.append("</%s>" % (element.tag,))
if element.tail:
rv.append(element.tail)
serializeElement(element)
return "".join(rv)
class TreeBuilder(_base.TreeBuilder):
documentClass = Document
doctypeClass = DocumentType
elementClass = Element
commentClass = Comment
fragmentClass = DocumentFragment
implementation = ElementTreeImplementation
def testSerializer(self, element):
return testSerializer(element)
def getDocument(self):
if fullTree:
return self.document._element
else:
if self.defaultNamespace is not None:
return self.document._element.find(
"{%s}html" % self.defaultNamespace)
else:
return self.document._element.find("html")
def getFragment(self):
return _base.TreeBuilder.getFragment(self)._element
return locals()
getETreeModule = moduleFactoryFactory(getETreeBuilder)
| mit | 6,283,369,525,505,267,000 | 36.451039 | 85 | 0.509785 | false |
pdubroy/kurt | build/MacOS/PyInstaller/pyinstaller-svn-r812/Build.py | 1 | 42370 | #!/usr/bin/env python
#
# Build packages using spec files
#
# Copyright (C) 2005, Giovanni Bajo
# Based on previous work under copyright (c) 1999, 2002 McMillan Enterprises, Inc.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
import sys
import os
import shutil
import pprint
import time
import py_compile
import tempfile
try:
from hashlib import md5
except ImportError:
from md5 import new as md5
import UserList
import mf
import archive
import iu
import carchive
import bindepend
STRINGTYPE = type('')
TUPLETYPE = type((None,))
UNCOMPRESSED, COMPRESSED = range(2)
# todo: use pkg_resources here
HOMEPATH = os.path.dirname(sys.argv[0])
SPECPATH = None
BUILDPATH = None
WARNFILE = None
rthooks = {}
iswin = sys.platform[:3] == 'win'
cygwin = sys.platform == 'cygwin'
def system(cmd):
# This workaround is required because NT shell doesn't work with commands
# that start with double quotes (required if there are spaces inside the
# command path)
if iswin:
cmd = 'echo on && ' + cmd
os.system(cmd)
def _save_data(filename, data):
outf = open(filename, 'w')
pprint.pprint(data, outf)
outf.close()
def _load_data(filename):
return eval(open(filename, 'r').read().replace("\r\n","\n"))
def setupUPXFlags():
f = os.environ.get("UPX", "")
is24 = hasattr(sys, "version_info") and sys.version_info[:2] >= (2,4)
if iswin and is24:
# Binaries built with Visual Studio 7.1 require --strip-loadconf
# or they won't compress. Configure.py makes sure that UPX is new
# enough to support --strip-loadconf.
f = "--strip-loadconf " + f
# Do not compress any icon, so that additional icons in the executable
# can still be externally bound
f = "--compress-icons=0 " + f
f = "--best " + f
os.environ["UPX"] = f
def mtime(fnm):
try:
return os.stat(fnm)[8]
except:
return 0
def absnormpath(apath):
return os.path.abspath(os.path.normpath(apath))
def compile_pycos(toc):
"""Given a TOC or equivalent list of tuples, generates all the required
pyc/pyo files, writing in a local directory if required, and returns the
list of tuples with the updated pathnames.
"""
global BUILDPATH
# For those modules that need to be rebuilt, use the build directory
# PyInstaller creates during the build process.
basepath = "/".join([BUILDPATH, "localpycos"])
new_toc = []
for (nm, fnm, typ) in toc:
# Trim the terminal "c" or "o"
source_fnm = fnm[:-1]
# If the source is newer than the compiled, or the compiled doesn't
# exist, we need to perform a build ourselves.
if mtime(source_fnm) > mtime(fnm):
try:
py_compile.compile(source_fnm)
except IOError:
# If we're compiling on a system directory, probably we don't
# have write permissions; thus we compile to a local directory
# and change the TOC entry accordingly.
ext = os.path.splitext(fnm)[1]
if "__init__" not in fnm:
# If it's a normal module, use last part of the qualified
# name as module name and the first as leading path
leading, mod_name = nm.split(".")[:-1], nm.split(".")[-1]
else:
# In case of a __init__ module, use all the qualified name
# as leading path and use "__init__" as the module name
leading, mod_name = nm.split("."), "__init__"
leading.insert(0, basepath)
leading = "/".join(leading)
if not os.path.exists(leading):
os.makedirs(leading)
fnm = "/".join([leading, mod_name + ext])
py_compile.compile(source_fnm, fnm)
new_toc.append((nm, fnm, typ))
return new_toc
#--- functons for checking guts ---
def _check_guts_eq(attr, old, new, last_build):
"""
rebuild is required if values differ
"""
if old != new:
print "building because %s changed" % attr
return True
return False
def _check_guts_toc_mtime(attr, old, toc, last_build, pyc=0):
"""
rebuild is required if mtimes of files listed in old toc are newer
than ast_build
if pyc=1, check for .py files, too
"""
for (nm, fnm, typ) in old:
if mtime(fnm) > last_build:
print "building because %s changed" % fnm
return True
elif pyc and mtime(fnm[:-1]) > last_build:
print "building because %s changed" % fnm[:-1]
return True
return False
def _check_guts_toc(attr, old, toc, last_build, pyc=0):
"""
rebuild is required if either toc content changed if mtimes of
files listed in old toc are newer than ast_build
if pyc=1, check for .py files, too
"""
return _check_guts_eq (attr, old, toc, last_build) \
or _check_guts_toc_mtime(attr, old, toc, last_build, pyc=pyc)
#--
class Target:
invcnum = 0
def __init__(self):
self.invcnum = Target.invcnum
Target.invcnum += 1
self.out = os.path.join(BUILDPATH, 'out%s%d.toc' % (self.__class__.__name__,
self.invcnum))
self.outnm = os.path.basename(self.out)
self.dependencies = TOC()
def __postinit__(self):
print "checking %s" % (self.__class__.__name__,)
if self.check_guts(mtime(self.out)):
self.assemble()
GUTS = []
def check_guts(self, last_build):
pass
def get_guts(self, last_build, missing ='missing or bad'):
"""
returns None if guts have changed
"""
try:
data = _load_data(self.out)
except:
print "building because", os.path.basename(self.out), missing
return None
if len(data) != len(self.GUTS):
print "building because %s is bad" % self.outnm
return None
for i in range(len(self.GUTS)):
attr, func = self.GUTS[i]
if func is None:
# no check for this value
continue
if func(attr, data[i], getattr(self, attr), last_build):
return None
return data
class Analysis(Target):
def __init__(self, scripts=None, pathex=None, hookspath=None, excludes=None):
Target.__init__(self)
self.inputs = scripts
for script in scripts:
if not os.path.exists(script):
raise ValueError, "script '%s' not found" % script
self.pathex = []
if pathex:
for path in pathex:
self.pathex.append(absnormpath(path))
sys.pathex = self.pathex[:]
self.hookspath = hookspath
self.excludes = excludes
self.scripts = TOC()
self.pure = TOC()
self.binaries = TOC()
self.zipfiles = TOC()
self.datas = TOC()
self.__postinit__()
GUTS = (('inputs', _check_guts_eq),
('pathex', _check_guts_eq),
('hookspath', _check_guts_eq),
('excludes', _check_guts_eq),
('scripts', _check_guts_toc_mtime),
('pure', lambda *args: apply(_check_guts_toc_mtime,
args, {'pyc': 1 } )),
('binaries', _check_guts_toc_mtime),
('zipfiles', _check_guts_toc_mtime),
('datas', _check_guts_toc_mtime),
)
def check_guts(self, last_build):
if last_build == 0:
print "building %s because %s non existent" % (self.__class__.__name__, self.outnm)
return True
for fnm in self.inputs:
if mtime(fnm) > last_build:
print "building because %s changed" % fnm
return True
data = Target.get_guts(self, last_build)
if not data:
return True
scripts, pure, binaries, zipfiles, datas = data[-5:]
self.scripts = TOC(scripts)
self.pure = TOC(pure)
self.binaries = TOC(binaries)
self.zipfiles = TOC(zipfiles)
self.datas = TOC(datas)
return False
def assemble(self):
print "running Analysis", os.path.basename(self.out)
# Reset seen variable to correctly discover dependencies
# if there are multiple Analysis in a single specfile.
bindepend.seen = {}
paths = self.pathex
for i in range(len(paths)):
# FIXME: isn't self.pathex already norm-abs-pathed?
paths[i] = absnormpath(paths[i])
###################################################
# Scan inputs and prepare:
dirs = {} # input directories
pynms = [] # python filenames with no extension
for script in self.inputs:
if not os.path.exists(script):
print "Analysis: script %s not found!" % script
sys.exit(1)
d, base = os.path.split(script)
if not d:
d = os.getcwd()
d = absnormpath(d)
pynm, ext = os.path.splitext(base)
dirs[d] = 1
pynms.append(pynm)
###################################################
# Initialize analyzer and analyze scripts
analyzer = mf.ImportTracker(dirs.keys()+paths, self.hookspath,
self.excludes,
target_platform=target_platform)
#print analyzer.path
scripts = [] # will contain scripts to bundle
for i in range(len(self.inputs)):
script = self.inputs[i]
print "Analyzing:", script
analyzer.analyze_script(script)
scripts.append((pynms[i], script, 'PYSOURCE'))
###################################################
# Fills pure, binaries and rthookcs lists to TOC
pure = [] # pure python modules
binaries = [] # binaries to bundle
zipfiles = [] # zipfiles to bundle
datas = [] # datafiles to bundle
rthooks = [] # rthooks if needed
for modnm, mod in analyzer.modules.items():
# FIXME: why can we have a mod == None here?
if mod is not None:
hooks = findRTHook(modnm) #XXX
if hooks:
rthooks.extend(hooks)
datas.extend(mod.datas)
if isinstance(mod, mf.BuiltinModule):
pass
else:
fnm = mod.__file__
if isinstance(mod, mf.ExtensionModule):
binaries.append((mod.__name__, fnm, 'EXTENSION'))
elif isinstance(mod, (mf.PkgInZipModule, mf.PyInZipModule)):
zipfiles.append(("eggs/" + os.path.basename(str(mod.owner)),
str(mod.owner), 'ZIPFILE'))
else:
# mf.PyModule instances expose a list of binary
# dependencies, most probably shared libraries accessed
# via ctypes. Add them to the overall required binaries.
binaries.extend(mod.binaries)
if modnm != '__main__':
pure.append((modnm, fnm, 'PYMODULE'))
binaries.extend(bindepend.Dependencies(binaries,
platform=target_platform))
self.fixMissingPythonLib(binaries)
if zipfiles:
scripts[-1:-1] = [("_pyi_egg_install.py", os.path.join(HOMEPATH, "support/_pyi_egg_install.py"), 'PYSOURCE')]
# Add realtime hooks just before the last script (which is
# the entrypoint of the application).
scripts[-1:-1] = rthooks
self.scripts = TOC(scripts)
self.pure = TOC(pure)
self.binaries = TOC(binaries)
self.zipfiles = TOC(zipfiles)
self.datas = TOC(datas)
try: # read .toc
oldstuff = _load_data(self.out)
except:
oldstuff = None
self.pure = TOC(compile_pycos(self.pure))
newstuff = (self.inputs, self.pathex, self.hookspath, self.excludes,
self.scripts, self.pure, self.binaries, self.zipfiles, self.datas)
if oldstuff != newstuff:
_save_data(self.out, newstuff)
wf = open(WARNFILE, 'w')
for ln in analyzer.getwarnings():
wf.write(ln+'\n')
wf.close()
print "Warnings written to %s" % WARNFILE
return 1
print self.out, "no change!"
return 0
def fixMissingPythonLib(self, binaries):
"""Add the Python library if missing from the binaries.
Some linux distributions (e.g. debian-based) statically build the
Python executable to the libpython, so bindepend doesn't include
it in its output.
"""
# minimal patch for OSX. Loader expects "Python" framework lib to be bundled
if target_platform == "darwin":
lib = os.path.join(sys.exec_prefix,'Python')
try:
exists = os.stat(lib)
binaries.append(('Python', lib, 'BINARY'))
except:
print 'Warning: could not find python framework to bundle'
if target_platform != 'linux2': return
name = 'libpython%d.%d.so' % sys.version_info[:2]
for (nm, fnm, typ) in binaries:
if typ == 'BINARY' and name in fnm:
# lib found
return
lib = bindepend.findLibrary(name)
if lib is None:
raise IOError("Python library not found!")
binaries.append((os.path.split(lib)[1], lib, 'BINARY'))
def findRTHook(modnm):
hooklist = rthooks.get(modnm)
if hooklist:
rslt = []
for script in hooklist:
nm = os.path.basename(script)
nm = os.path.splitext(nm)[0]
if os.path.isabs(script):
path = script
else:
path = os.path.join(HOMEPATH, script)
rslt.append((nm, path, 'PYSOURCE'))
return rslt
return None
class PYZ(Target):
typ = 'PYZ'
def __init__(self, toc, name=None, level=9, crypt=None):
Target.__init__(self)
self.toc = toc
self.name = name
if name is None:
self.name = self.out[:-3] + 'pyz'
if config['useZLIB']:
self.level = level
else:
self.level = 0
if config['useCrypt'] and crypt is not None:
self.crypt = archive.Keyfile(crypt).key
else:
self.crypt = None
self.dependencies = compile_pycos(config['PYZ_dependencies'])
self.__postinit__()
GUTS = (('name', _check_guts_eq),
('level', _check_guts_eq),
('crypt', _check_guts_eq),
('toc', _check_guts_toc), # todo: pyc=1
)
def check_guts(self, last_build):
if not os.path.exists(self.name):
print "rebuilding %s because %s is missing" % (self.outnm, os.path.basename(self.name))
return True
data = Target.get_guts(self, last_build)
if not data:
return True
return False
def assemble(self):
print "building PYZ", os.path.basename(self.out)
pyz = archive.ZlibArchive(level=self.level, crypt=self.crypt)
toc = self.toc - config['PYZ_dependencies']
pyz.build(self.name, toc)
_save_data(self.out, (self.name, self.level, self.crypt, self.toc))
return 1
def cacheDigest(fnm):
data = open(fnm, "rb").read()
digest = md5(data).digest()
return digest
def checkCache(fnm, strip, upx):
# On darwin a cache is required anyway to keep the libaries
# with relative install names
if not strip and not upx and sys.platform != 'darwin':
return fnm
if strip:
strip = 1
else:
strip = 0
if upx:
upx = 1
else:
upx = 0
# Load cache index
cachedir = os.path.join(HOMEPATH, 'bincache%d%d' % (strip, upx))
if not os.path.exists(cachedir):
os.makedirs(cachedir)
cacheindexfn = os.path.join(cachedir, "index.dat")
if os.path.exists(cacheindexfn):
cache_index = _load_data(cacheindexfn)
else:
cache_index = {}
# Verify if the file we're looking for is present in the cache.
basenm = os.path.normcase(os.path.basename(fnm))
digest = cacheDigest(fnm)
cachedfile = os.path.join(cachedir, basenm)
cmd = None
if cache_index.has_key(basenm):
if digest != cache_index[basenm]:
os.remove(cachedfile)
else:
return cachedfile
if upx:
if strip:
fnm = checkCache(fnm, 1, 0)
bestopt = "--best"
# FIXME: Linux builds of UPX do not seem to contain LZMA (they assert out)
# A better configure-time check is due.
if config["hasUPX"] >= (3,) and os.name == "nt":
bestopt = "--lzma"
upx_executable = "upx"
if config.get('upx_dir'):
upx_executable = os.path.join(config['upx_dir'], upx_executable)
cmd = '"' + upx_executable + '" ' + bestopt + " -q \"%s\"" % cachedfile
else:
if strip:
cmd = "strip \"%s\"" % cachedfile
shutil.copy2(fnm, cachedfile)
os.chmod(cachedfile, 0755)
if cmd: system(cmd)
# update cache index
cache_index[basenm] = digest
_save_data(cacheindexfn, cache_index)
return cachedfile
UNCOMPRESSED, COMPRESSED, ENCRYPTED = range(3)
class PKG(Target):
typ = 'PKG'
xformdict = {'PYMODULE' : 'm',
'PYSOURCE' : 's',
'EXTENSION' : 'b',
'PYZ' : 'z',
'PKG' : 'a',
'DATA': 'x',
'BINARY': 'b',
'ZIPFILE': 'Z',
'EXECUTABLE': 'b'}
def __init__(self, toc, name=None, cdict=None, exclude_binaries=0,
strip_binaries=0, upx_binaries=0, crypt=0):
Target.__init__(self)
self.toc = toc
self.cdict = cdict
self.name = name
self.exclude_binaries = exclude_binaries
self.strip_binaries = strip_binaries
self.upx_binaries = upx_binaries
self.crypt = crypt
if name is None:
self.name = self.out[:-3] + 'pkg'
if self.cdict is None:
if config['useZLIB']:
self.cdict = {'EXTENSION':COMPRESSED,
'DATA':COMPRESSED,
'BINARY':COMPRESSED,
'EXECUTABLE':COMPRESSED,
'PYSOURCE':COMPRESSED,
'PYMODULE':COMPRESSED }
if self.crypt:
self.cdict['PYSOURCE'] = ENCRYPTED
self.cdict['PYMODULE'] = ENCRYPTED
else:
self.cdict = { 'PYSOURCE':UNCOMPRESSED }
self.__postinit__()
GUTS = (('name', _check_guts_eq),
('cdict', _check_guts_eq),
('toc', _check_guts_toc_mtime),
('exclude_binaries', _check_guts_eq),
('strip_binaries', _check_guts_eq),
('upx_binaries', _check_guts_eq),
('crypt', _check_guts_eq),
)
def check_guts(self, last_build):
if not os.path.exists(self.name):
print "rebuilding %s because %s is missing" % (self.outnm, os.path.basename(self.name))
return 1
data = Target.get_guts(self, last_build)
if not data:
return True
# todo: toc equal
return False
def assemble(self):
print "building PKG", os.path.basename(self.name)
trash = []
mytoc = []
toc = TOC()
for item in self.toc:
inm, fnm, typ = item
if typ == 'EXTENSION':
binext = os.path.splitext(fnm)[1]
if not os.path.splitext(inm)[1] == binext:
inm = inm + binext
toc.append((inm, fnm, typ))
seen = {}
for inm, fnm, typ in toc:
if typ in ('BINARY', 'EXTENSION'):
if self.exclude_binaries:
self.dependencies.append((inm, fnm, typ))
else:
fnm = checkCache(fnm, self.strip_binaries,
self.upx_binaries and ( iswin or cygwin )
and config['hasUPX'])
# Avoid importing the same binary extension twice. This might
# happen if they come from different sources (eg. once from
# binary dependence, and once from direct import).
if typ == 'BINARY' and seen.has_key(fnm):
continue
seen[fnm] = 1
mytoc.append((inm, fnm, self.cdict.get(typ,0),
self.xformdict.get(typ,'b')))
elif typ == 'OPTION':
mytoc.append((inm, '', 0, 'o'))
else:
mytoc.append((inm, fnm, self.cdict.get(typ,0), self.xformdict.get(typ,'b')))
archive = carchive.CArchive()
archive.build(self.name, mytoc)
_save_data(self.out,
(self.name, self.cdict, self.toc, self.exclude_binaries,
self.strip_binaries, self.upx_binaries, self.crypt))
for item in trash:
os.remove(item)
return 1
class EXE(Target):
typ = 'EXECUTABLE'
exclude_binaries = 0
append_pkg = 1
def __init__(self, *args, **kws):
Target.__init__(self)
self.console = kws.get('console',1)
self.debug = kws.get('debug',0)
self.name = kws.get('name',None)
self.icon = kws.get('icon',None)
self.versrsrc = kws.get('version',None)
self.strip = kws.get('strip',None)
self.upx = kws.get('upx',None)
self.crypt = kws.get('crypt', 0)
self.exclude_binaries = kws.get('exclude_binaries',0)
self.append_pkg = kws.get('append_pkg', self.append_pkg)
if self.name is None:
self.name = self.out[:-3] + 'exe'
if not os.path.isabs(self.name):
self.name = os.path.join(SPECPATH, self.name)
if target_iswin or cygwin:
self.pkgname = self.name[:-3] + 'pkg'
else:
self.pkgname = self.name + '.pkg'
self.toc = TOC()
for arg in args:
if isinstance(arg, TOC):
self.toc.extend(arg)
elif isinstance(arg, Target):
self.toc.append((os.path.basename(arg.name), arg.name, arg.typ))
self.toc.extend(arg.dependencies)
else:
self.toc.extend(arg)
self.toc.extend(config['EXE_dependencies'])
self.pkg = PKG(self.toc, cdict=kws.get('cdict',None), exclude_binaries=self.exclude_binaries,
strip_binaries=self.strip, upx_binaries=self.upx, crypt=self.crypt)
self.dependencies = self.pkg.dependencies
self.__postinit__()
GUTS = (('name', _check_guts_eq),
('console', _check_guts_eq),
('debug', _check_guts_eq),
('icon', _check_guts_eq),
('versrsrc', _check_guts_eq),
('strip', _check_guts_eq),
('upx', _check_guts_eq),
('crypt', _check_guts_eq),
('mtm', None,), # checked bellow
)
def check_guts(self, last_build):
if not os.path.exists(self.name):
print "rebuilding %s because %s missing" % (self.outnm, os.path.basename(self.name))
return 1
if not self.append_pkg and not os.path.exists(self.pkgname):
print "rebuilding because %s missing" % (
os.path.basename(self.pkgname),)
return 1
data = Target.get_guts(self, last_build)
if not data:
return True
icon, versrsrc = data[3:5]
if (icon or versrsrc) and not config['hasRsrcUpdate']:
# todo: really ignore :-)
print "ignoring icon and version resources = platform not capable"
mtm = data[-1]
crypt = data[-2]
if crypt != self.crypt:
print "rebuilding %s because crypt option changed" % outnm
return 1
if mtm != mtime(self.name):
print "rebuilding", self.outnm, "because mtimes don't match"
return True
if mtm < mtime(self.pkg.out):
print "rebuilding", self.outnm, "because pkg is more recent"
return True
return False
def _bootloader_postfix(self, exe):
if target_iswin:
exe = exe + "_"
is24 = hasattr(sys, "version_info") and sys.version_info[:2] >= (2,4)
exe = exe + "67"[is24]
exe = exe + "rd"[self.debug]
exe = exe + "wc"[self.console]
else:
if not self.console:
exe = exe + 'w'
if self.debug:
exe = exe + '_d'
return exe
def assemble(self):
print "building EXE from", os.path.basename(self.out)
trash = []
if not os.path.exists(os.path.dirname(self.name)):
os.makedirs(os.path.dirname(self.name))
outf = open(self.name, 'wb')
exe = self._bootloader_postfix('support/loader/run')
exe = os.path.join(HOMEPATH, exe)
if target_iswin or cygwin:
exe = exe + '.exe'
if config['hasRsrcUpdate']:
if self.icon:
tmpnm = tempfile.mktemp()
shutil.copy2(exe, tmpnm)
os.chmod(tmpnm, 0755)
icon.CopyIcons(tmpnm, self.icon)
trash.append(tmpnm)
exe = tmpnm
if self.versrsrc:
tmpnm = tempfile.mktemp()
shutil.copy2(exe, tmpnm)
os.chmod(tmpnm, 0755)
versionInfo.SetVersion(tmpnm, self.versrsrc)
trash.append(tmpnm)
exe = tmpnm
exe = checkCache(exe, self.strip, self.upx and config['hasUPX'])
self.copy(exe, outf)
if self.append_pkg:
print "Appending archive to EXE", self.name
self.copy(self.pkg.name, outf)
else:
print "Copying archive to", self.pkgname
shutil.copy2(self.pkg.name, self.pkgname)
outf.close()
os.chmod(self.name, 0755)
_save_data(self.out,
(self.name, self.console, self.debug, self.icon,
self.versrsrc, self.strip, self.upx, self.crypt, mtime(self.name)))
for item in trash:
os.remove(item)
return 1
def copy(self, fnm, outf):
inf = open(fnm, 'rb')
while 1:
data = inf.read(64*1024)
if not data:
break
outf.write(data)
class DLL(EXE):
def assemble(self):
print "building DLL", os.path.basename(self.out)
outf = open(self.name, 'wb')
dll = self._bootloader_postfix('support/loader/inprocsrvr')
dll = os.path.join(HOMEPATH, dll) + '.dll'
self.copy(dll, outf)
self.copy(self.pkg.name, outf)
outf.close()
os.chmod(self.name, 0755)
_save_data(self.out,
(self.name, self.console, self.debug, self.icon,
self.versrsrc, self.strip, self.upx, mtime(self.name)))
return 1
class COLLECT(Target):
def __init__(self, *args, **kws):
Target.__init__(self)
self.name = kws.get('name',None)
if self.name is None:
self.name = 'dist_' + self.out[:-4]
self.strip_binaries = kws.get('strip',0)
self.upx_binaries = kws.get('upx',0)
if not os.path.isabs(self.name):
self.name = os.path.join(SPECPATH, self.name)
self.toc = TOC()
for arg in args:
if isinstance(arg, TOC):
self.toc.extend(arg)
elif isinstance(arg, Target):
self.toc.append((os.path.basename(arg.name), arg.name, arg.typ))
if isinstance(arg, EXE) and not arg.append_pkg:
self.toc.append((os.path.basename(arg.pkgname), arg.pkgname, 'PKG'))
self.toc.extend(arg.dependencies)
else:
self.toc.extend(arg)
self.__postinit__()
GUTS = (('name', _check_guts_eq),
('strip_binaries', _check_guts_eq),
('upx_binaries', _check_guts_eq),
('toc', _check_guts_eq), # additional check below
)
def check_guts(self, last_build):
data = Target.get_guts(self, last_build)
if not data:
return True
toc = data[-1]
for inm, fnm, typ in self.toc:
if typ == 'EXTENSION':
ext = os.path.splitext(fnm)[1]
test = os.path.join(self.name, inm+ext)
else:
test = os.path.join(self.name, os.path.basename(fnm))
if not os.path.exists(test):
print "building %s because %s is missing" % (self.outnm, test)
return 1
if mtime(fnm) > mtime(test):
print "building %s because %s is more recent" % (self.outnm, fnm)
return 1
return 0
def assemble(self):
print "building COLLECT", os.path.basename(self.out)
if not os.path.exists(self.name):
os.makedirs(self.name)
toc = TOC()
for inm, fnm, typ in self.toc:
if typ == 'EXTENSION':
binext = os.path.splitext(fnm)[1]
if not os.path.splitext(inm)[1] == binext:
inm = inm + binext
toc.append((inm, fnm, typ))
for inm, fnm, typ in toc:
tofnm = os.path.join(self.name, inm)
todir = os.path.dirname(tofnm)
if not os.path.exists(todir):
os.makedirs(todir)
if typ in ('EXTENSION', 'BINARY'):
fnm = checkCache(fnm, self.strip_binaries,
self.upx_binaries and ( iswin or cygwin )
and config['hasUPX'])
shutil.copy2(fnm, tofnm)
if typ in ('EXTENSION', 'BINARY'):
os.chmod(tofnm, 0755)
_save_data(self.out,
(self.name, self.strip_binaries, self.upx_binaries, self.toc))
return 1
class BUNDLE(Target):
def __init__(self, *args, **kws):
Target.__init__(self)
self.appname = kws.get("appname", None)
self.version = kws.get("version", "0.0.0")
self.toc = TOC()
for arg in args:
if isinstance(arg, EXE):
if self.appname is None:
self.appname = "Mac%s" % (arg.name,)
self.name = os.path.join(os.path.dirname(SPECPATH), self.appname + ".app")
self.exename = arg.name
self.toc.append((os.path.basename(arg.name), arg.name, arg.typ))
self.toc.extend(arg.dependencies)
else:
print "unsupported entry %s", arg.__class__.__name__
self.__postinit__()
GUTS = (('toc', _check_guts_eq), # additional check below
)
def check_guts(self, last_build):
data = Target.get_guts(self, last_build)
if not data:
return True
toc = data[-1]
for inm, fnm, typ in self.toc:
test = os.path.join(self.name, os.path.basename(fnm))
if not os.path.exists(test):
print "building %s because %s is missing" % (self.outnm, test)
return 1
if mtime(fnm) > mtime(test):
print "building %s because %s is more recent" % (self.outnm, fnm)
return 1
return 0
def assemble(self):
print "building BUNDLE", os.path.basename(self.out)
if os.path.exists(self.name):
shutil.rmtree(self.name)
# Create a minimal Mac bundle structure
os.makedirs(self.name)
os.makedirs(os.path.join(self.name, "Contents"))
os.makedirs(os.path.join(self.name, "Contents", "MacOS"))
os.makedirs(os.path.join(self.name, "Contents", "Resources"))
os.makedirs(os.path.join(self.name, "Contents", "Frameworks"))
# Key/values for a minimal Info.plist file
info_plist_dict = {"CFBundleDisplayName": self.appname,
"CFBundleName": self.appname,
"CFBundleExecutable": os.path.basename(self.exename),
"CFBundleIconFile": "App.icns",
"CFBundleInfoDictionaryVersion": "6.0",
"CFBundlePackageType": "APPL",
"CFBundleShortVersionString": self.version,
# Setting this to 1 will cause Mac OS X *not* to show
# a dock icon for the PyInstaller process which
# decompresses the real executable's contents -
# actually, it's not clear why the real executable
# gets instead an icon doing so.
"LSBackgroundOnly": "1",
}
info_plist = """<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>"""
for k, v in info_plist_dict.items():
info_plist += "<key>%s</key>\n<string>%s</string>\n" % (k, v)
info_plist += """</dict>
</plist>"""
f = open(os.path.join(self.name, "Contents", "Info.plist"), "w")
f.write(info_plist)
f.close()
for inm, fnm, typ in self.toc:
tofnm = os.path.join(self.name, "Contents", "MacOS", inm)
todir = os.path.dirname(tofnm)
if not os.path.exists(todir):
os.makedirs(todir)
shutil.copy2(fnm, tofnm)
return 1
class TOC(UserList.UserList):
def __init__(self, initlist=None):
UserList.UserList.__init__(self)
self.fltr = {}
if initlist:
for tpl in initlist:
self.append(tpl)
def append(self, tpl):
try:
fn = tpl[0]
if tpl[2] == "BINARY":
# Normalize the case for binary files only (to avoid duplicates
# for different cases under Windows). We can't do that for
# Python files because the import semantic (even at runtime)
# depends on the case.
fn = os.path.normcase(fn)
if not self.fltr.get(fn):
self.data.append(tpl)
self.fltr[fn] = 1
except TypeError:
print "TOC found a %s, not a tuple" % tpl
raise
def insert(self, pos, tpl):
fn = tpl[0]
if tpl[2] == "BINARY":
fn = os.path.normcase(fn)
if not self.fltr.get(fn):
self.data.insert(pos, tpl)
self.fltr[fn] = 1
def __add__(self, other):
rslt = TOC(self.data)
rslt.extend(other)
return rslt
def __radd__(self, other):
rslt = TOC(other)
rslt.extend(self.data)
return rslt
def extend(self, other):
for tpl in other:
self.append(tpl)
def __sub__(self, other):
fd = self.fltr.copy()
# remove from fd if it's in other
for tpl in other:
if fd.get(tpl[0],0):
del fd[tpl[0]]
rslt = TOC()
# return only those things still in fd (preserve order)
for tpl in self.data:
if fd.get(tpl[0],0):
rslt.append(tpl)
return rslt
def __rsub__(self, other):
rslt = TOC(other)
return rslt.__sub__(self)
def intersect(self, other):
rslt = TOC()
for tpl in other:
if self.fltr.get(tpl[0],0):
rslt.append(tpl)
return rslt
class Tree(Target, TOC):
def __init__(self, root=None, prefix=None, excludes=None):
Target.__init__(self)
TOC.__init__(self)
self.root = root
self.prefix = prefix
self.excludes = excludes
if excludes is None:
self.excludes = []
self.__postinit__()
GUTS = (('root', _check_guts_eq),
('prefix', _check_guts_eq),
('excludes', _check_guts_eq),
('toc', None),
)
def check_guts(self, last_build):
data = Target.get_guts(self, last_build)
if not data:
return True
stack = [ data[0] ] # root
toc = data[3] # toc
while stack:
d = stack.pop()
if mtime(d) > last_build:
print "building %s because directory %s changed" % (self.outnm, d)
return True
for nm in os.listdir(d):
path = os.path.join(d, nm)
if os.path.isdir(path):
stack.append(path)
self.data = toc
return False
def assemble(self):
print "building Tree", os.path.basename(self.out)
stack = [(self.root, self.prefix)]
excludes = {}
xexcludes = {}
for nm in self.excludes:
if nm[0] == '*':
xexcludes[nm[1:]] = 1
else:
excludes[nm] = 1
rslt = []
while stack:
dir, prefix = stack.pop()
for fnm in os.listdir(dir):
if excludes.get(fnm, 0) == 0:
ext = os.path.splitext(fnm)[1]
if xexcludes.get(ext,0) == 0:
fullfnm = os.path.join(dir, fnm)
rfnm = prefix and os.path.join(prefix, fnm) or fnm
if os.path.isdir(fullfnm):
stack.append((fullfnm, rfnm))
else:
rslt.append((rfnm, fullfnm, 'DATA'))
self.data = rslt
try:
oldstuff = _load_data(self.out)
except:
oldstuff = None
newstuff = (self.root, self.prefix, self.excludes, self.data)
if oldstuff != newstuff:
_save_data(self.out, newstuff)
return 1
print self.out, "no change!"
return 0
def TkTree():
tclroot = config['TCL_root']
tclnm = os.path.join('_MEI', os.path.basename(tclroot))
tkroot = config['TK_root']
tknm = os.path.join('_MEI', os.path.basename(tkroot))
tcltree = Tree(tclroot, tclnm, excludes=['demos','encoding','*.lib'])
tktree = Tree(tkroot, tknm, excludes=['demos','encoding','*.lib'])
return tcltree + tktree
def TkPKG():
return PKG(TkTree(), name='tk.pkg')
#---
def build(spec):
global SPECPATH, BUILDPATH, WARNFILE, rthooks
rthooks = _load_data(os.path.join(HOMEPATH, 'rthooks.dat'))
SPECPATH, specnm = os.path.split(spec)
specnm = os.path.splitext(specnm)[0]
if SPECPATH == '':
SPECPATH = os.getcwd()
WARNFILE = os.path.join(SPECPATH, 'warn%s.txt' % specnm)
BUILDPATH = os.path.join(SPECPATH, 'build',
"pyi." + config['target_platform'], specnm)
if '-o' in sys.argv:
bpath = sys.argv[sys.argv.index('-o')+1]
if os.path.isabs(bpath):
BUILDPATH = bpath
else:
BUILDPATH = os.path.join(SPECPATH, bpath)
if not os.path.exists(BUILDPATH):
os.makedirs(BUILDPATH)
execfile(spec)
def main(specfile, configfilename):
global target_platform, target_iswin, config
global icon, versionInfo
try:
config = _load_data(configfilename)
except IOError:
print "You must run Configure.py before building!"
sys.exit(1)
target_platform = config.get('target_platform', sys.platform)
target_iswin = target_platform[:3] == 'win'
if target_platform == sys.platform:
# _not_ cross compiling
if config['pythonVersion'] != sys.version:
print "The current version of Python is not the same with which PyInstaller was configured."
print "Please re-run Configure.py with this version."
sys.exit(1)
if config.setdefault('pythonDebug', None) != __debug__:
print "python optimization flags changed: rerun Configure.py with the same [-O] option"
print "Configure.py optimize=%s, Build.py optimize=%s" % (not config['pythonDebug'], not __debug__)
sys.exit(1)
if config['hasRsrcUpdate']:
import icon, versionInfo
if config['hasUPX']:
setupUPXFlags()
if not config['useELFEXE']:
EXE.append_pkg = 0
build(specfile)
if __name__ == '__main__':
from pyi_optparse import OptionParser
parser = OptionParser('%prog [options] specfile')
parser.add_option('-C', '--configfile',
default=os.path.join(HOMEPATH, 'config.dat'),
help='Name of generated configfile (default: %default)')
opts, args = parser.parse_args()
if len(args) != 1:
parser.error('Requires exactly one .spec-file')
main(args[0], configfilename=opts.configfile)
| gpl-2.0 | -168,800,585,230,335,500 | 35.244654 | 121 | 0.529785 | false |
gdi2290/django | django/contrib/gis/gdal/tests/test_driver.py | 335 | 1253 | import unittest
from django.contrib.gis.gdal import HAS_GDAL
if HAS_GDAL:
from django.contrib.gis.gdal import Driver, GDALException
valid_drivers = (
# vector
'ESRI Shapefile', 'MapInfo File', 'TIGER', 'S57', 'DGN', 'Memory', 'CSV',
'GML', 'KML',
# raster
'GTiff', 'JPEG', 'MEM', 'PNG',
)
invalid_drivers = ('Foo baz', 'clucka', 'ESRI Shp', 'ESRI rast')
aliases = {
'eSrI': 'ESRI Shapefile',
'TigER/linE': 'TIGER',
'SHAPE': 'ESRI Shapefile',
'sHp': 'ESRI Shapefile',
'tiFf': 'GTiff',
'tIf': 'GTiff',
'jPEg': 'JPEG',
'jpG': 'JPEG',
}
@unittest.skipUnless(HAS_GDAL, "GDAL is required")
class DriverTest(unittest.TestCase):
def test01_valid_driver(self):
"Testing valid GDAL/OGR Data Source Drivers."
for d in valid_drivers:
dr = Driver(d)
self.assertEqual(d, str(dr))
def test02_invalid_driver(self):
"Testing invalid GDAL/OGR Data Source Drivers."
for i in invalid_drivers:
self.assertRaises(GDALException, Driver, i)
def test03_aliases(self):
"Testing driver aliases."
for alias, full_name in aliases.items():
dr = Driver(alias)
self.assertEqual(full_name, str(dr))
| bsd-3-clause | -6,406,123,687,512,764,000 | 24.571429 | 77 | 0.60016 | false |
neumerance/cloudloon2 | .venv/lib/python2.7/site-packages/django/forms/extras/widgets.py | 110 | 5251 | """
Extra HTML Widget classes
"""
from __future__ import unicode_literals
import datetime
import re
from django.forms.widgets import Widget, Select
from django.utils import datetime_safe
from django.utils.dates import MONTHS
from django.utils.safestring import mark_safe
from django.utils.formats import get_format
from django.utils import six
from django.conf import settings
__all__ = ('SelectDateWidget',)
RE_DATE = re.compile(r'(\d{4})-(\d\d?)-(\d\d?)$')
def _parse_date_fmt():
fmt = get_format('DATE_FORMAT')
escaped = False
output = []
for char in fmt:
if escaped:
escaped = False
elif char == '\\':
escaped = True
elif char in 'Yy':
output.append('year')
#if not self.first_select: self.first_select = 'year'
elif char in 'bEFMmNn':
output.append('month')
#if not self.first_select: self.first_select = 'month'
elif char in 'dj':
output.append('day')
#if not self.first_select: self.first_select = 'day'
return output
class SelectDateWidget(Widget):
"""
A Widget that splits date input into three <select> boxes.
This also serves as an example of a Widget that has more than one HTML
element and hence implements value_from_datadict.
"""
none_value = (0, '---')
month_field = '%s_month'
day_field = '%s_day'
year_field = '%s_year'
def __init__(self, attrs=None, years=None, required=True):
# years is an optional list/tuple of years to use in the "year" select box.
self.attrs = attrs or {}
self.required = required
if years:
self.years = years
else:
this_year = datetime.date.today().year
self.years = range(this_year, this_year+10)
def render(self, name, value, attrs=None):
try:
year_val, month_val, day_val = value.year, value.month, value.day
except AttributeError:
year_val = month_val = day_val = None
if isinstance(value, six.string_types):
if settings.USE_L10N:
try:
input_format = get_format('DATE_INPUT_FORMATS')[0]
v = datetime.datetime.strptime(value, input_format)
year_val, month_val, day_val = v.year, v.month, v.day
except ValueError:
pass
else:
match = RE_DATE.match(value)
if match:
year_val, month_val, day_val = [int(v) for v in match.groups()]
choices = [(i, i) for i in self.years]
year_html = self.create_select(name, self.year_field, value, year_val, choices)
choices = list(six.iteritems(MONTHS))
month_html = self.create_select(name, self.month_field, value, month_val, choices)
choices = [(i, i) for i in range(1, 32)]
day_html = self.create_select(name, self.day_field, value, day_val, choices)
output = []
for field in _parse_date_fmt():
if field == 'year':
output.append(year_html)
elif field == 'month':
output.append(month_html)
elif field == 'day':
output.append(day_html)
return mark_safe('\n'.join(output))
def id_for_label(self, id_):
first_select = None
field_list = _parse_date_fmt()
if field_list:
first_select = field_list[0]
if first_select is not None:
return '%s_%s' % (id_, first_select)
else:
return '%s_month' % id_
def value_from_datadict(self, data, files, name):
y = data.get(self.year_field % name)
m = data.get(self.month_field % name)
d = data.get(self.day_field % name)
if y == m == d == "0":
return None
if y and m and d:
if settings.USE_L10N:
input_format = get_format('DATE_INPUT_FORMATS')[0]
try:
date_value = datetime.date(int(y), int(m), int(d))
except ValueError:
return '%s-%s-%s' % (y, m, d)
else:
date_value = datetime_safe.new_date(date_value)
return date_value.strftime(input_format)
else:
return '%s-%s-%s' % (y, m, d)
return data.get(name, None)
def create_select(self, name, field, value, val, choices):
if 'id' in self.attrs:
id_ = self.attrs['id']
else:
id_ = 'id_%s' % name
if not (self.required and val):
choices.insert(0, self.none_value)
local_attrs = self.build_attrs(id=field % id_)
s = Select(choices=choices)
select_html = s.render(field % name, val, local_attrs)
return select_html
def _has_changed(self, initial, data):
try:
input_format = get_format('DATE_INPUT_FORMATS')[0]
data = datetime_safe.datetime.strptime(data, input_format).date()
except (TypeError, ValueError):
pass
return super(SelectDateWidget, self)._has_changed(initial, data)
| apache-2.0 | 2,235,405,766,932,765,700 | 35.213793 | 90 | 0.546182 | false |
ismailsunni/healthsites | django_project/localities/tests/test_model_DomainArchive.py | 2 | 1095 | # -*- coding: utf-8 -*-
from django.test import TestCase
from .model_factories import DomainF
from ..models import DomainArchive
class TestModelDomainArchive(TestCase):
def test_domainArchive_fields(self):
self.assertListEqual(
[fld.name for fld in DomainArchive._meta.fields], [
u'id', 'changeset', 'version', 'content_type', 'object_id',
'name', 'description', 'template_fragment'
]
)
def test_archiving(self):
domain = DomainF.create(name='A domain')
domain.name = 'test'
domain.save()
domain.description = 'a description'
domain.save()
# test save with no changes, should not trigger model archival
domain.save()
self.assertEqual(DomainArchive.objects.count(), 3)
self.assertListEqual(
[dom.name for dom in DomainArchive.objects.all()],
['A domain', 'test', 'test']
)
self.assertListEqual(
[dom.version for dom in DomainArchive.objects.all()],
[1, 2, 3]
)
| bsd-2-clause | 1,310,634,890,075,636,200 | 26.375 | 75 | 0.582648 | false |
ltiao/networkx | networkx/algorithms/tests/test_distance_measures.py | 6 | 2145 | #!/usr/bin/env python
from nose.tools import *
import networkx
class TestDistance:
def setUp(self):
G=networkx.Graph()
from networkx import convert_node_labels_to_integers as cnlti
G=cnlti(networkx.grid_2d_graph(4,4),first_label=1,ordering="sorted")
self.G=G
def test_eccentricity(self):
assert_equal(networkx.eccentricity(self.G,1),6)
e=networkx.eccentricity(self.G)
assert_equal(e[1],6)
sp = dict(networkx.shortest_path_length(self.G))
e=networkx.eccentricity(self.G,sp=sp)
assert_equal(e[1],6)
e=networkx.eccentricity(self.G,v=1)
assert_equal(e,6)
e=networkx.eccentricity(self.G,v=[1,1]) #This behavior changed in version 1.8 (ticket #739)
assert_equal(e[1],6)
e=networkx.eccentricity(self.G,v=[1,2])
assert_equal(e[1],6)
# test against graph with one node
G=networkx.path_graph(1)
e=networkx.eccentricity(G)
assert_equal(e[0],0)
e=networkx.eccentricity(G,v=0)
assert_equal(e,0)
assert_raises(networkx.NetworkXError, networkx.eccentricity, G, 1)
# test against empty graph
G=networkx.empty_graph()
e=networkx.eccentricity(G)
assert_equal(e,{})
def test_diameter(self):
assert_equal(networkx.diameter(self.G),6)
def test_radius(self):
assert_equal(networkx.radius(self.G),4)
def test_periphery(self):
assert_equal(set(networkx.periphery(self.G)),set([1, 4, 13, 16]))
def test_center(self):
assert_equal(set(networkx.center(self.G)),set([6, 7, 10, 11]))
def test_radius_exception(self):
G=networkx.Graph()
G.add_edge(1,2)
G.add_edge(3,4)
assert_raises(networkx.NetworkXError, networkx.diameter, G)
@raises(networkx.NetworkXError)
def test_eccentricity_infinite(self):
G=networkx.Graph([(1,2),(3,4)])
e = networkx.eccentricity(G)
@raises(networkx.NetworkXError)
def test_eccentricity_invalid(self):
G=networkx.Graph([(1,2),(3,4)])
e = networkx.eccentricity(G,sp=1)
| bsd-3-clause | 4,124,456,332,500,469,000 | 30.086957 | 100 | 0.61958 | false |
idea4bsd/idea4bsd | python/helpers/py2only/docutils/languages/cs.py | 148 | 1928 | # $Id: cs.py 4564 2006-05-21 20:44:42Z wiemann $
# Author: Marek Blaha <[email protected]>
# Copyright: This module has been placed in the public domain.
# New language mappings are welcome. Before doing a new translation, please
# read <http://docutils.sf.net/docs/howto/i18n.html>. Two files must be
# translated for each language: one in docutils/languages, the other in
# docutils/parsers/rst/languages.
"""
Czech-language mappings for language-dependent features of Docutils.
"""
__docformat__ = 'reStructuredText'
labels = {
# fixed: language-dependent
'author': u'Autor',
'authors': u'Auto\u0159i',
'organization': u'Organizace',
'address': u'Adresa',
'contact': u'Kontakt',
'version': u'Verze',
'revision': u'Revize',
'status': u'Stav',
'date': u'Datum',
'copyright': u'Copyright',
'dedication': u'V\u011Bnov\u00E1n\u00ED',
'abstract': u'Abstrakt',
'attention': u'Pozor!',
'caution': u'Opatrn\u011B!',
'danger': u'!NEBEZPE\u010C\u00CD!',
'error': u'Chyba',
'hint': u'Rada',
'important': u'D\u016Fle\u017Eit\u00E9',
'note': u'Pozn\u00E1mka',
'tip': u'Tip',
'warning': u'Varov\u00E1n\u00ED',
'contents': u'Obsah'}
"""Mapping of node class name to label text."""
bibliographic_fields = {
# language-dependent: fixed
u'autor': 'author',
u'auto\u0159i': 'authors',
u'organizace': 'organization',
u'adresa': 'address',
u'kontakt': 'contact',
u'verze': 'version',
u'revize': 'revision',
u'stav': 'status',
u'datum': 'date',
u'copyright': 'copyright',
u'v\u011Bnov\u00E1n\u00ED': 'dedication',
u'abstrakt': 'abstract'}
"""Czech (lowcased) to canonical name mapping for bibliographic fields."""
author_separators = [';', ',']
"""List of separator strings for the 'Authors' bibliographic field. Tried in
order."""
| apache-2.0 | -6,255,073,848,322,778,000 | 31.133333 | 76 | 0.617739 | false |
cigroup-ol/metaopt | metaopt/tests/integration/optimizer/singleinvoke.py | 1 | 1233 | # -*- coding: utf-8 -*-
"""
Tests for the single invoke invoker.
"""
# Future
from __future__ import absolute_import, division, print_function, \
unicode_literals, with_statement
# Third Party
import nose
from mock import Mock
# First Party
from metaopt.concurrent.invoker.singleprocess import SingleProcessInvoker
from metaopt.core.arg.util.creator import ArgsCreator
from metaopt.core.paramspec.util import param
from metaopt.core.returnspec.util.wrapper import ReturnValuesWrapper
from metaopt.optimizer.singleinvoke import SingleInvokeOptimizer
@param.int("a", interval=(2, 2))
@param.int("b", interval=(1, 1))
def f(a, b):
return -(a + b)
def test_optimize_returns_result():
optimizer = SingleInvokeOptimizer()
optimizer.on_result = Mock()
optimizer.on_error = Mock()
invoker = SingleProcessInvoker()
invoker.f = f
optimizer.optimize(invoker=invoker, param_spec=f.param_spec,
return_spec=None)
args = ArgsCreator(f.param_spec).args()
assert not optimizer.on_error.called
optimizer.on_result.assert_called_with(value=ReturnValuesWrapper(None, -3),
fargs=args)
if __name__ == '__main__':
nose.runmodule()
| bsd-3-clause | -9,073,260,673,667,865,000 | 26.4 | 79 | 0.689376 | false |
SuperJohn/scikit-class | grid_search.py | 6 | 1243 | import pandas as pd
import numpy as np
df = pd.read_csv('tweets.csv')
target = df['is_there_an_emotion_directed_at_a_brand_or_product']
text = df['tweet_text']
fixed_text = text[pd.notnull(text)]
fixed_target = target[pd.notnull(text)]
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.naive_bayes import MultinomialNB
from sklearn.pipeline import Pipeline
from sklearn.feature_selection import SelectKBest
from sklearn.feature_selection import chi2
p = Pipeline(steps=[('counts', CountVectorizer()),
('feature_selection', SelectKBest(chi2)),
('multinomialnb', MultinomialNB())])
from sklearn.grid_search import GridSearchCV
parameters = {
'counts__max_df': (0.5, 0.75, 1.0),
'counts__min_df': (1, 2, 3),
'counts__ngram_range': ((1,1), (1,2)),
# 'feature_selection__k': (1000, 10000, 100000)
}
grid_search = GridSearchCV(p, parameters, n_jobs=1, verbose=1, cv=10)
grid_search.fit(fixed_text, fixed_target)
print("Best score: %0.3f" % grid_search.best_score_)
print("Best parameters set:")
best_parameters = grid_search.best_estimator_.get_params()
for param_name in sorted(parameters.keys()):
print("\t%s: %r" % (param_name, best_parameters[param_name]))
| gpl-2.0 | 1,503,810,818,064,182,000 | 30.871795 | 69 | 0.695897 | false |
fnouama/intellij-community | python/helpers/pydev/pydev_monkey_qt.py | 47 | 5183 | from __future__ import nested_scopes
def set_trace_in_qt():
import pydevd_tracing
from pydevd_comm import GetGlobalDebugger
debugger = GetGlobalDebugger()
if debugger is not None:
pydevd_tracing.SetTrace(debugger.trace_dispatch)
_patched_qt = False
def patch_qt():
'''
This method patches qt (PySide, PyQt4, PyQt5) so that we have hooks to set the tracing for QThread.
'''
# Avoid patching more than once
global _patched_qt
if _patched_qt:
return
_patched_qt = True
# Ok, we have an issue here:
# PyDev-452: Selecting PyQT API version using sip.setapi fails in debug mode
# http://pyqt.sourceforge.net/Docs/PyQt4/incompatible_apis.html
# Mostly, if the user uses a different API version (i.e.: v2 instead of v1),
# that has to be done before importing PyQt4/5 modules (PySide doesn't have this issue
# as it only implements v2).
patch_qt_on_import = None
try:
import PySide
except:
try:
import PyQt4
patch_qt_on_import = 'PyQt4'
except:
try:
import PyQt5
patch_qt_on_import = 'PyQt5'
except:
return
if patch_qt_on_import:
_patch_import_to_patch_pyqt_on_import(patch_qt_on_import)
else:
_internal_patch_qt()
def _patch_import_to_patch_pyqt_on_import(patch_qt_on_import):
# I don't like this approach very much as we have to patch __import__, but I like even less
# asking the user to configure something in the client side...
# So, our approach is to patch PyQt4/5 right before the user tries to import it (at which
# point he should've set the sip api version properly already anyways).
dotted = patch_qt_on_import + '.'
original_import = __import__
from _pydev_imps._pydev_sys_patch import patch_sys_module, patch_reload, cancel_patches_in_sys_module
patch_sys_module()
patch_reload()
def patched_import(name, *args, **kwargs):
if patch_qt_on_import == name or name.startswith(dotted):
builtins.__import__ = original_import
cancel_patches_in_sys_module()
_internal_patch_qt() # Patch it only when the user would import the qt module
return original_import(name, *args, **kwargs)
try:
import builtins
except ImportError:
import __builtin__ as builtins
builtins.__import__ = patched_import
def _internal_patch_qt():
try:
from PySide import QtCore
except:
try:
from PyQt4 import QtCore
except:
try:
from PyQt5 import QtCore
except:
return
_original_thread_init = QtCore.QThread.__init__
_original_runnable_init = QtCore.QRunnable.__init__
_original_QThread = QtCore.QThread
class FuncWrapper:
def __init__(self, original):
self._original = original
def __call__(self, *args, **kwargs):
set_trace_in_qt()
return self._original(*args, **kwargs)
class StartedSignalWrapper: # Wrapper for the QThread.started signal
def __init__(self, thread, original_started):
self.thread = thread
self.original_started = original_started
def connect(self, func, *args, **kwargs):
return self.original_started.connect(FuncWrapper(func), *args, **kwargs)
def disconnect(self, *args, **kwargs):
return self.original_started.disconnect(*args, **kwargs)
def emit(self, *args, **kwargs):
return self.original_started.emit(*args, **kwargs)
class ThreadWrapper(QtCore.QThread): # Wrapper for QThread
def __init__(self, *args, **kwargs):
_original_thread_init(self, *args, **kwargs)
# In PyQt5 the program hangs when we try to call original run method of QThread class.
# So we need to distinguish instances of QThread class and instances of QThread inheritors.
if self.__class__.run == _original_QThread.run:
self.run = self._exec_run
else:
self._original_run = self.run
self.run = self._new_run
self._original_started = self.started
self.started = StartedSignalWrapper(self, self.started)
def _exec_run(self):
set_trace_in_qt()
return self.exec_()
def _new_run(self):
set_trace_in_qt()
return self._original_run()
class RunnableWrapper(QtCore.QRunnable): # Wrapper for QRunnable
def __init__(self, *args, **kwargs):
_original_runnable_init(self, *args, **kwargs)
self._original_run = self.run
self.run = self._new_run
def _new_run(self):
set_trace_in_qt()
return self._original_run()
QtCore.QThread = ThreadWrapper
QtCore.QRunnable = RunnableWrapper
| apache-2.0 | 8,057,919,266,321,573,000 | 31.597484 | 105 | 0.580745 | false |
donnerluetjen/ardupilot | Tools/LogAnalyzer/tests/TestAutotune.py | 265 | 4748 | from LogAnalyzer import Test,TestResult
import DataflashLog
# from ArduCopter/defines.h
AUTOTUNE_INITIALISED = 30
AUTOTUNE_OFF = 31
AUTOTUNE_RESTART = 32
AUTOTUNE_SUCCESS = 33
AUTOTUNE_FAILED = 34
AUTOTUNE_REACHED_LIMIT = 35
AUTOTUNE_PILOT_TESTING = 36
AUTOTUNE_SAVEDGAINS = 37
AUTOTUNE_EVENTS = frozenset([AUTOTUNE_INITIALISED,
AUTOTUNE_OFF,
AUTOTUNE_RESTART,
AUTOTUNE_SUCCESS,
AUTOTUNE_FAILED,
AUTOTUNE_REACHED_LIMIT,
AUTOTUNE_PILOT_TESTING,
AUTOTUNE_SAVEDGAINS])
class TestAutotune(Test):
'''test for autotune success (copter only)'''
class AutotuneSession(object):
def __init__(self, events):
self.events = events
@property
def linestart(self):
return self.events[0][0]
@property
def linestop(self):
return self.events[-1][0]
@property
def success(self):
return AUTOTUNE_SUCCESS in [i for _,i in self.events]
@property
def failure(self):
return AUTOTUNE_FAILED in [i for _,i in self.events]
@property
def limit(self):
return AUTOTUNE_REACHED_LIMIT in [i for _,i in self.events]
def __repr__(self):
return "<AutotuneSession {}-{}>".format(self.linestart,self.linestop)
def __init__(self):
Test.__init__(self)
self.name = "Autotune"
def run(self, logdata, verbose):
self.result = TestResult()
self.result.status = TestResult.StatusType.GOOD
if logdata.vehicleType != "ArduCopter":
self.result.status = TestResult.StatusType.NA
return
for i in ['EV','ATDE','ATUN']:
r = False
if not i in logdata.channels:
self.result.status = TestResult.StatusType.UNKNOWN
self.result.statusMessage = "No {} log data".format(i)
r = True
if r:
return
events = filter(lambda x: x[1] in AUTOTUNE_EVENTS, logdata.channels["EV"]["Id"].listData)
attempts = []
j = None
for i in range(0,len(events)):
line,ev = events[i]
if ev == AUTOTUNE_INITIALISED:
if j is not None:
attempts.append(TestAutotune.AutotuneSession(events[j:i]))
j = i
# last attempt
if j is not None:
attempts.append(TestAutotune.AutotuneSession(events[j:]))
for a in attempts:
# this should not be necessary!
def class_from_channel(c):
members = dict({'__init__':lambda x: setattr(x,i,None) for i in logdata.channels[c]})
cls = type(\
'Channel__{:s}'.format(c),
(object,),
members
)
return cls
# last wins
if a.success:
self.result.status = TestResult.StatusType.GOOD
s = "[+]"
elif a.failure:
self.result.status = TestResult.StatusType.FAIL
s = "[-]"
else:
self.result.status = TestResult.StatusType.UNKNOWN
s = "[?]"
s += " Autotune {}-{}\n".format(a.linestart,a.linestop)
self.result.statusMessage += s
if verbose:
linenext = a.linestart + 1
while linenext < a.linestop:
try:
line = logdata.channels['ATUN']['RateMax'].getNearestValueFwd(linenext)[1]
if line > a.linestop:
break
except:
break
atun = class_from_channel('ATUN')()
for key in logdata.channels['ATUN']:
setattr(atun, key, logdata.channels['ATUN'][key].getNearestValueFwd(linenext)[0])
linenext = logdata.channels['ATUN'][key].getNearestValueFwd(linenext)[1] + 1
self.result.statusMessage += 'ATUN Axis:{atun.Axis} TuneStep:{atun.TuneStep} RateMin:{atun.RateMin:5.0f} RateMax:{atun.RateMax:5.0f} RPGain:{atun.RPGain:1.4f} RDGain:{atun.RDGain:1.4f} SPGain:{atun.SPGain:1.1f} (@line:{l})\n'.format(l=linenext,s=s, atun=atun)
self.result.statusMessage += '\n'
| gpl-3.0 | -6,637,155,107,806,410,000 | 36.984 | 283 | 0.491575 | false |
radiasoft/radtrack | experimental/hermite/testHermite02.py | 1 | 6919 | #
# Test executable #2 to exercise the Gauss-Hermite class
# Here, we fit a Gauss-Hermite expansion to an arbitrary profile.
# The SciPy least squares method is used.
#
# Copyright (c) 2013 RadiaBeam Technologies. All rights reserved
#
# python imports
import math
# SciPy imports
import numpy as np
import matplotlib.pyplot as plt
# RadiaBeam imports
from radtrack.fields import RbGaussHermiteMN
# SciPy imports
import numpy as np
import matplotlib.pyplot as plt
from scipy.optimize import leastsq
# ---------------------------------------------------------
# Make sure the residual() method has access to necessary
# 'global' data:
global mMax, nMax, numFuncCalls, hermiteSeries
# Specify the central laser wavelength
lambda0 = 10.e-06
# Need a place holder for the waist size
w0 = 10.*lambda0
# Define the maximum order(s) of the Hermite expansion
mMax = 0 # horizontal
nMax = 0 # vertical
# Create an instance of the Hermite expansion class
hermiteSeries = RbGaussHermiteMN.RbGaussHermiteMN(lambda0,w0,w0,0.)
# Specify the desired grid size
numX = 50
numY = 50
nCells = numX * numY
# load up the x,y locations of the mesh
xMin = -4.*w0
xMax = 4.*w0
yMin = xMin
yMax = xMax
xArr = np.zeros(numX)
for iLoop in range(numX):
xArr[iLoop] = xMin + iLoop * (xMax-xMin) / (numX-1)
yArr = np.zeros(numY)
for jLoop in range(numY):
yArr[jLoop] = yMin + jLoop * (yMax-yMin) / (numY-1)
xGrid = np.zeros((numX, numY))
yGrid = np.zeros((numX, numY))
for iLoop in range(numX):
for jLoop in range(numY):
xGrid[iLoop,jLoop] = xMin + iLoop * (xMax-xMin) / (numX-1)
yGrid[iLoop,jLoop] = yMin + jLoop * (yMax-yMin) / (numY-1)
# Create transverse field profile (#1 simple Gaussian)
ExGrid = np.zeros((numX, numY))
exMax = 1.0e+09 # this gets scaled out before plotting/fitting
phi1 = math.pi/17.5
xs1 = 1.07 * w0
ys1 = -0.98 * w0
waistx = 0.9 * w0
waisty = 1.8 * w0
maxValue = 0.
for iLoop in range(numX):
for jLoop in range(numY):
xArg = (xArr[iLoop]-xs1)*math.cos(phi1) + (yArr[jLoop]-ys1)*math.sin(phi1)
yArg = -(xArr[iLoop]-xs1)*math.sin(phi1) + (yArr[jLoop]-ys1)*math.cos(phi1)
ExGrid[iLoop, jLoop] = exMax*math.exp(-(xArg/waistx)**2)*math.exp(-(yArg/waisty)**2)
maxValue = max(ExGrid[iLoop, jLoop], maxValue)
# Divide out the maximum value
ExGrid /= maxValue
# Calculate residuals for the least squares analysis
# params - array of fitting parameters
numFuncCalls = 0
def residuals(params, e, x, y):
global mMax, nMax, numFuncCalls, hermiteSeries
hermiteSeries.setWaistX(params[0])
hermiteSeries.setWaistY(params[1])
hermiteSeries.setWRotAngle(params[2])
hermiteSeries.setXShift(params[3])
hermiteSeries.setYShift(params[4])
hermiteSeries.setMCoef(params[5:mMax+6])
hermiteSeries.setNCoef(params[mMax+6:mMax+nMax+7])
# let the user know what's going on if many function calls are required
if numFuncCalls == 0:
print ' '
print 'Number of calls to method residual():'
numFuncCalls += 1
if 10*int(numFuncCalls/10.) == numFuncCalls:
print ' ', numFuncCalls
return e-hermiteSeries.evaluateEx(x,y,0.,0.)
# plot the transverse field profile
ncLevels = 12
vLevels = [0.001, 0.05, 0.15, 0.25, 0.35, 0.45, 0.55, 0.65, 0.75, 0.85, 0.95, 1.05]
plt.figure(1)
cs1 = plt.contourf(xGrid, yGrid, ExGrid, vLevels)
plt.colorbar(cs1)
plt.axis([xMin, xMax, yMin, yMax])
plt.xlabel('x [m]')
plt.ylabel('y [m]')
plt.title('x-section #1: Gaussian w/ slight asymmetry & rotation')
# choose initial guesses for all fitting parameters
# also, specify the scale of variations for each
paramGuess = np.zeros(mMax+nMax+7)
paramGuess[0] = 1.2*w0 # horizontal waist
paramGuess[1] = 0.9*w0 # vertical waist
paramGuess[2] = 0.0 # rotation angle
paramGuess[3] = 0.0 # horizontal shift
paramGuess[4] = 0.0 # vertical shift
paramGuess[5] = 1.0 # 0th horiz. coeff
for iLoop in range(6,mMax+6):
paramGuess[iLoop] = 0.0 # other horiz. coeff's
paramGuess[mMax+6] = 1.0 # 0th vertical coeff
for iLoop in range(mMax+7,mMax+nMax+7):
paramGuess[iLoop] = 0.0 # other vertical coeff's
# invoke the least squares algorithm
result = leastsq(residuals, paramGuess, \
args=(np.reshape(ExGrid,nCells), \
np.reshape(xGrid,nCells), \
np.reshape(yGrid,nCells)), \
full_output=True, ftol=1e-6, \
maxfev=200)
parFit = result[0]
nEvals = result[2]['nfev']
resVals = result[2]['fvec']
message = result[3]
iError = result[4]
print ' '
print ' iError = ', iError
print ' message = ', message
print ' nEvals = ', nEvals
print ' resVals = ', resVals
# load the results into named variables (for clarity)
wxFit = parFit[0]
wyFit = parFit[1]
tmpPhi = parFit[2]
phiFit = tmpPhi - 2.*math.pi*int(0.5*tmpPhi/math.pi)
if phiFit > 2.*math.pi: phiFit -= 2.*math.pi
if phiFit < 0.: phiFit += 2.*math.pi
xsFit = parFit[3]
ysFit = parFit[4]
mCFit = np.zeros(mMax+1)
mCFit[0:mMax+1] = parFit[5:mMax+6]
nCFit = np.zeros(nMax+1)
nCFit[0:nMax+1] = parFit[mMax+6:mMax+nMax+7]
# check the results
print ' '
print 'The least squares minimimization has completed:'
print ' wx = ', waistx, '; ', wxFit
print ' wy = ', waisty, '; ', wyFit
print ' phi = ', phi1, '; ', phiFit
print ' xS = ', xs1, '; ', xsFit
print ' yS = ', ys1, '; ', ysFit
print ' C0x * C0y = 1.0; ', mCFit[0]*nCFit[0]
# print ' C1x = 0.0 ; ', mCFit[1]
# print ' C2x = 0.0 ; ', mCFit[2]
# print ' C3x = 0.0 ; ', mCFit[3]
# print ' C4x = 0.0 ; ', mCFit[4]
# print ' C1y = 0.0 ; ', nCFit[1]
# print ' C2y = 0.0 ; ', nCFit[2]
# print ' C3y = 0.0 ; ', nCFit[3]
# print ' C4y = 0.0 ; ', nCFit[4]
# load up the fitted electric field at all grid points
hermiteSeries.setWaistX(wxFit)
hermiteSeries.setWaistY(wyFit)
hermiteSeries.setWRotAngle(phiFit)
hermiteSeries.setXShift(xsFit)
hermiteSeries.setYShift(ysFit)
hermiteSeries.setMCoef(mCFit)
hermiteSeries.setNCoef(nCFit)
ExFit = np.reshape(hermiteSeries.evaluateEx(
np.reshape(xGrid,nCells), \
np.reshape(yGrid,nCells), 0., 0.), \
(numX, numY))
# plot the fitted transverse field profile
plt.figure(2)
cs2 = plt.contourf(xGrid, yGrid, ExFit, vLevels)
plt.colorbar(cs2)
plt.axis([xMin, xMax, yMin, yMax])
plt.xlabel('x [m]')
plt.ylabel('y [m]')
plt.title('x-section #1: Result of the least squares fit')
# plot the transverse profile of the difference
plt.figure(3)
cs3 = plt.contourf(xGrid, yGrid, ExFit-ExGrid, ncLevels)
plt.colorbar(cs3)
plt.axis([xMin, xMax, yMin, yMax])
plt.xlabel('x [m]')
plt.ylabel('y [m]')
plt.title('x-section #1: Absolute differences in Ex')
plt.show()
| apache-2.0 | -6,552,313,150,934,767,000 | 30.45 | 92 | 0.636653 | false |
OpenFacetracker/facetracker-core | lib/youtube-dl/youtube_dl/extractor/shared.py | 31 | 1936 | from __future__ import unicode_literals
import re
import base64
from .common import InfoExtractor
from ..compat import (
compat_urllib_parse,
compat_urllib_request,
)
from ..utils import (
ExtractorError,
int_or_none,
)
class SharedIE(InfoExtractor):
_VALID_URL = r'http://shared\.sx/(?P<id>[\da-z]{10})'
_TEST = {
'url': 'http://shared.sx/0060718775',
'md5': '106fefed92a8a2adb8c98e6a0652f49b',
'info_dict': {
'id': '0060718775',
'ext': 'mp4',
'title': 'Bmp4',
},
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
if '>File does not exist<' in webpage:
raise ExtractorError(
'Video %s does not exist' % video_id, expected=True)
download_form = dict(re.findall(
r'<input type="hidden" name="([^"]+)" value="([^"]*)"', webpage))
request = compat_urllib_request.Request(
url, compat_urllib_parse.urlencode(download_form))
request.add_header('Content-Type', 'application/x-www-form-urlencoded')
video_page = self._download_webpage(
request, video_id, 'Downloading video page')
video_url = self._html_search_regex(
r'data-url="([^"]+)"', video_page, 'video URL')
title = base64.b64decode(self._html_search_meta(
'full:title', webpage, 'title')).decode('utf-8')
filesize = int_or_none(self._html_search_meta(
'full:size', webpage, 'file size', fatal=False))
thumbnail = self._html_search_regex(
r'data-poster="([^"]+)"', video_page, 'thumbnail', default=None)
return {
'id': video_id,
'url': video_url,
'ext': 'mp4',
'filesize': filesize,
'title': title,
'thumbnail': thumbnail,
}
| gpl-2.0 | 649,525,378,515,909,100 | 29.730159 | 79 | 0.551136 | false |
ekwoodrich/nirha | nirhaweb/venv/lib/python2.7/site-packages/pip/req.py | 328 | 83557 | from email.parser import FeedParser
import os
import imp
import locale
import re
import sys
import shutil
import tempfile
import textwrap
import zipfile
from distutils.util import change_root
from pip.locations import (bin_py, running_under_virtualenv,PIP_DELETE_MARKER_FILENAME,
write_delete_marker_file, bin_user)
from pip.exceptions import (InstallationError, UninstallationError, UnsupportedWheel,
BestVersionAlreadyInstalled, InvalidWheelFilename,
DistributionNotFound, PreviousBuildDirError)
from pip.vcs import vcs
from pip.log import logger
from pip.util import (display_path, rmtree, ask, ask_path_exists, backup_dir,
is_installable_dir, is_local, dist_is_local,
dist_in_usersite, dist_in_site_packages, renames,
normalize_path, egg_link_path, make_path_relative,
call_subprocess, is_prerelease, normalize_name)
from pip.backwardcompat import (urlparse, urllib, uses_pycache,
ConfigParser, string_types, HTTPError,
get_python_version, b)
from pip.index import Link
from pip.locations import build_prefix
from pip.download import (PipSession, get_file_content, is_url, url_to_path,
path_to_url, is_archive_file,
unpack_vcs_link, is_vcs_url, is_file_url,
unpack_file_url, unpack_http_url)
import pip.wheel
from pip.wheel import move_wheel_files, Wheel, wheel_ext
from pip._vendor import pkg_resources, six
def read_text_file(filename):
"""Return the contents of *filename*.
Try to decode the file contents with utf-8, the preffered system encoding
(e.g., cp1252 on some Windows machines) and latin1, in that order. Decoding
a byte string with latin1 will never raise an error. In the worst case, the
returned string will contain some garbage characters.
"""
with open(filename, 'rb') as fp:
data = fp.read()
encodings = ['utf-8', locale.getpreferredencoding(False), 'latin1']
for enc in encodings:
try:
data = data.decode(enc)
except UnicodeDecodeError:
continue
break
assert type(data) != bytes # Latin1 should have worked.
return data
class InstallRequirement(object):
def __init__(self, req, comes_from, source_dir=None, editable=False,
url=None, as_egg=False, update=True, prereleases=None,
editable_options=None, from_bundle=False, pycompile=True):
self.extras = ()
if isinstance(req, string_types):
req = pkg_resources.Requirement.parse(req)
self.extras = req.extras
self.req = req
self.comes_from = comes_from
self.source_dir = source_dir
self.editable = editable
if editable_options is None:
editable_options = {}
self.editable_options = editable_options
self.url = url
self.as_egg = as_egg
self._egg_info_path = None
# This holds the pkg_resources.Distribution object if this requirement
# is already available:
self.satisfied_by = None
# This hold the pkg_resources.Distribution object if this requirement
# conflicts with another installed distribution:
self.conflicts_with = None
self._temp_build_dir = None
self._is_bundle = None
# True if the editable should be updated:
self.update = update
# Set to True after successful installation
self.install_succeeded = None
# UninstallPathSet of uninstalled distribution (for possible rollback)
self.uninstalled = None
self.use_user_site = False
self.target_dir = None
self.from_bundle = from_bundle
self.pycompile = pycompile
# True if pre-releases are acceptable
if prereleases:
self.prereleases = True
elif self.req is not None:
self.prereleases = any([is_prerelease(x[1]) and x[0] != "!=" for x in self.req.specs])
else:
self.prereleases = False
@classmethod
def from_editable(cls, editable_req, comes_from=None, default_vcs=None):
name, url, extras_override = parse_editable(editable_req, default_vcs)
if url.startswith('file:'):
source_dir = url_to_path(url)
else:
source_dir = None
res = cls(name, comes_from, source_dir=source_dir,
editable=True,
url=url,
editable_options=extras_override,
prereleases=True)
if extras_override is not None:
res.extras = extras_override
return res
@classmethod
def from_line(cls, name, comes_from=None, prereleases=None):
"""Creates an InstallRequirement from a name, which might be a
requirement, directory containing 'setup.py', filename, or URL.
"""
url = None
name = name.strip()
req = None
path = os.path.normpath(os.path.abspath(name))
link = None
if is_url(name):
link = Link(name)
elif os.path.isdir(path) and (os.path.sep in name or name.startswith('.')):
if not is_installable_dir(path):
raise InstallationError("Directory %r is not installable. File 'setup.py' not found." % name)
link = Link(path_to_url(name))
elif is_archive_file(path):
if not os.path.isfile(path):
logger.warn('Requirement %r looks like a filename, but the file does not exist', name)
link = Link(path_to_url(name))
# If the line has an egg= definition, but isn't editable, pull the requirement out.
# Otherwise, assume the name is the req for the non URL/path/archive case.
if link and req is None:
url = link.url_without_fragment
req = link.egg_fragment #when fragment is None, this will become an 'unnamed' requirement
# Handle relative file URLs
if link.scheme == 'file' and re.search(r'\.\./', url):
url = path_to_url(os.path.normpath(os.path.abspath(link.path)))
# fail early for invalid or unsupported wheels
if link.ext == wheel_ext:
wheel = Wheel(link.filename) # can raise InvalidWheelFilename
if not wheel.supported():
raise UnsupportedWheel("%s is not a supported wheel on this platform." % wheel.filename)
else:
req = name
return cls(req, comes_from, url=url, prereleases=prereleases)
def __str__(self):
if self.req:
s = str(self.req)
if self.url:
s += ' from %s' % self.url
else:
s = self.url
if self.satisfied_by is not None:
s += ' in %s' % display_path(self.satisfied_by.location)
if self.comes_from:
if isinstance(self.comes_from, string_types):
comes_from = self.comes_from
else:
comes_from = self.comes_from.from_path()
if comes_from:
s += ' (from %s)' % comes_from
return s
def from_path(self):
if self.req is None:
return None
s = str(self.req)
if self.comes_from:
if isinstance(self.comes_from, string_types):
comes_from = self.comes_from
else:
comes_from = self.comes_from.from_path()
if comes_from:
s += '->' + comes_from
return s
def build_location(self, build_dir, unpack=True):
if self._temp_build_dir is not None:
return self._temp_build_dir
if self.req is None:
self._temp_build_dir = tempfile.mkdtemp('-build', 'pip-')
self._ideal_build_dir = build_dir
return self._temp_build_dir
if self.editable:
name = self.name.lower()
else:
name = self.name
# FIXME: Is there a better place to create the build_dir? (hg and bzr need this)
if not os.path.exists(build_dir):
_make_build_dir(build_dir)
return os.path.join(build_dir, name)
def correct_build_location(self):
"""If the build location was a temporary directory, this will move it
to a new more permanent location"""
if self.source_dir is not None:
return
assert self.req is not None
assert self._temp_build_dir
old_location = self._temp_build_dir
new_build_dir = self._ideal_build_dir
del self._ideal_build_dir
if self.editable:
name = self.name.lower()
else:
name = self.name
new_location = os.path.join(new_build_dir, name)
if not os.path.exists(new_build_dir):
logger.debug('Creating directory %s' % new_build_dir)
_make_build_dir(new_build_dir)
if os.path.exists(new_location):
raise InstallationError(
'A package already exists in %s; please remove it to continue'
% display_path(new_location))
logger.debug('Moving package %s from %s to new location %s'
% (self, display_path(old_location), display_path(new_location)))
shutil.move(old_location, new_location)
self._temp_build_dir = new_location
self.source_dir = new_location
self._egg_info_path = None
@property
def name(self):
if self.req is None:
return None
return self.req.project_name
@property
def url_name(self):
if self.req is None:
return None
return urllib.quote(self.req.unsafe_name)
@property
def setup_py(self):
try:
import setuptools
except ImportError:
# Setuptools is not available
raise InstallationError(
"setuptools must be installed to install from a source "
"distribution"
)
setup_file = 'setup.py'
if self.editable_options and 'subdirectory' in self.editable_options:
setup_py = os.path.join(self.source_dir,
self.editable_options['subdirectory'],
setup_file)
else:
setup_py = os.path.join(self.source_dir, setup_file)
# Python2 __file__ should not be unicode
if six.PY2 and isinstance(setup_py, six.text_type):
setup_py = setup_py.encode(sys.getfilesystemencoding())
return setup_py
def run_egg_info(self, force_root_egg_info=False):
assert self.source_dir
if self.name:
logger.notify('Running setup.py (path:%s) egg_info for package %s' % (self.setup_py, self.name))
else:
logger.notify('Running setup.py (path:%s) egg_info for package from %s' % (self.setup_py, self.url))
logger.indent += 2
try:
# if it's distribute>=0.7, it won't contain an importable
# setuptools, and having an egg-info dir blocks the ability of
# setup.py to find setuptools plugins, so delete the egg-info dir if
# no setuptools. it will get recreated by the run of egg_info
# NOTE: this self.name check only works when installing from a specifier
# (not archive path/urls)
# TODO: take this out later
if self.name == 'distribute' and not os.path.isdir(os.path.join(self.source_dir, 'setuptools')):
rmtree(os.path.join(self.source_dir, 'distribute.egg-info'))
script = self._run_setup_py
script = script.replace('__SETUP_PY__', repr(self.setup_py))
script = script.replace('__PKG_NAME__', repr(self.name))
egg_info_cmd = [sys.executable, '-c', script, 'egg_info']
# We can't put the .egg-info files at the root, because then the source code will be mistaken
# for an installed egg, causing problems
if self.editable or force_root_egg_info:
egg_base_option = []
else:
egg_info_dir = os.path.join(self.source_dir, 'pip-egg-info')
if not os.path.exists(egg_info_dir):
os.makedirs(egg_info_dir)
egg_base_option = ['--egg-base', 'pip-egg-info']
call_subprocess(
egg_info_cmd + egg_base_option,
cwd=self.source_dir, filter_stdout=self._filter_install, show_stdout=False,
command_level=logger.VERBOSE_DEBUG,
command_desc='python setup.py egg_info')
finally:
logger.indent -= 2
if not self.req:
self.req = pkg_resources.Requirement.parse(
"%(Name)s==%(Version)s" % self.pkg_info())
self.correct_build_location()
## FIXME: This is a lame hack, entirely for PasteScript which has
## a self-provided entry point that causes this awkwardness
_run_setup_py = """
__file__ = __SETUP_PY__
from setuptools.command import egg_info
import pkg_resources
import os
import tokenize
def replacement_run(self):
self.mkpath(self.egg_info)
installer = self.distribution.fetch_build_egg
for ep in pkg_resources.iter_entry_points('egg_info.writers'):
# require=False is the change we're making:
writer = ep.load(require=False)
if writer:
writer(self, ep.name, os.path.join(self.egg_info,ep.name))
self.find_sources()
egg_info.egg_info.run = replacement_run
exec(compile(getattr(tokenize, 'open', open)(__file__).read().replace('\\r\\n', '\\n'), __file__, 'exec'))
"""
def egg_info_data(self, filename):
if self.satisfied_by is not None:
if not self.satisfied_by.has_metadata(filename):
return None
return self.satisfied_by.get_metadata(filename)
assert self.source_dir
filename = self.egg_info_path(filename)
if not os.path.exists(filename):
return None
data = read_text_file(filename)
return data
def egg_info_path(self, filename):
if self._egg_info_path is None:
if self.editable:
base = self.source_dir
else:
base = os.path.join(self.source_dir, 'pip-egg-info')
filenames = os.listdir(base)
if self.editable:
filenames = []
for root, dirs, files in os.walk(base):
for dir in vcs.dirnames:
if dir in dirs:
dirs.remove(dir)
# Iterate over a copy of ``dirs``, since mutating
# a list while iterating over it can cause trouble.
# (See https://github.com/pypa/pip/pull/462.)
for dir in list(dirs):
# Don't search in anything that looks like a virtualenv environment
if (os.path.exists(os.path.join(root, dir, 'bin', 'python'))
or os.path.exists(os.path.join(root, dir, 'Scripts', 'Python.exe'))):
dirs.remove(dir)
# Also don't search through tests
if dir == 'test' or dir == 'tests':
dirs.remove(dir)
filenames.extend([os.path.join(root, dir)
for dir in dirs])
filenames = [f for f in filenames if f.endswith('.egg-info')]
if not filenames:
raise InstallationError('No files/directories in %s (from %s)' % (base, filename))
assert filenames, "No files/directories in %s (from %s)" % (base, filename)
# if we have more than one match, we pick the toplevel one. This can
# easily be the case if there is a dist folder which contains an
# extracted tarball for testing purposes.
if len(filenames) > 1:
filenames.sort(key=lambda x: x.count(os.path.sep) +
(os.path.altsep and
x.count(os.path.altsep) or 0))
self._egg_info_path = os.path.join(base, filenames[0])
return os.path.join(self._egg_info_path, filename)
def egg_info_lines(self, filename):
data = self.egg_info_data(filename)
if not data:
return []
result = []
for line in data.splitlines():
line = line.strip()
if not line or line.startswith('#'):
continue
result.append(line)
return result
def pkg_info(self):
p = FeedParser()
data = self.egg_info_data('PKG-INFO')
if not data:
logger.warn('No PKG-INFO file found in %s' % display_path(self.egg_info_path('PKG-INFO')))
p.feed(data or '')
return p.close()
@property
def dependency_links(self):
return self.egg_info_lines('dependency_links.txt')
_requirements_section_re = re.compile(r'\[(.*?)\]')
def requirements(self, extras=()):
in_extra = None
for line in self.egg_info_lines('requires.txt'):
match = self._requirements_section_re.match(line.lower())
if match:
in_extra = match.group(1)
continue
if in_extra and in_extra not in extras:
logger.debug('skipping extra %s' % in_extra)
# Skip requirement for an extra we aren't requiring
continue
yield line
@property
def absolute_versions(self):
for qualifier, version in self.req.specs:
if qualifier == '==':
yield version
@property
def installed_version(self):
return self.pkg_info()['version']
def assert_source_matches_version(self):
assert self.source_dir
version = self.installed_version
if version not in self.req:
logger.warn('Requested %s, but installing version %s' % (self, self.installed_version))
else:
logger.debug('Source in %s has version %s, which satisfies requirement %s'
% (display_path(self.source_dir), version, self))
def update_editable(self, obtain=True):
if not self.url:
logger.info("Cannot update repository at %s; repository location is unknown" % self.source_dir)
return
assert self.editable
assert self.source_dir
if self.url.startswith('file:'):
# Static paths don't get updated
return
assert '+' in self.url, "bad url: %r" % self.url
if not self.update:
return
vc_type, url = self.url.split('+', 1)
backend = vcs.get_backend(vc_type)
if backend:
vcs_backend = backend(self.url)
if obtain:
vcs_backend.obtain(self.source_dir)
else:
vcs_backend.export(self.source_dir)
else:
assert 0, (
'Unexpected version control type (in %s): %s'
% (self.url, vc_type))
def uninstall(self, auto_confirm=False):
"""
Uninstall the distribution currently satisfying this requirement.
Prompts before removing or modifying files unless
``auto_confirm`` is True.
Refuses to delete or modify files outside of ``sys.prefix`` -
thus uninstallation within a virtual environment can only
modify that virtual environment, even if the virtualenv is
linked to global site-packages.
"""
if not self.check_if_exists():
raise UninstallationError("Cannot uninstall requirement %s, not installed" % (self.name,))
dist = self.satisfied_by or self.conflicts_with
paths_to_remove = UninstallPathSet(dist)
pip_egg_info_path = os.path.join(dist.location,
dist.egg_name()) + '.egg-info'
dist_info_path = os.path.join(dist.location,
'-'.join(dist.egg_name().split('-')[:2])
) + '.dist-info'
# workaround for http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=618367
debian_egg_info_path = pip_egg_info_path.replace(
'-py%s' % pkg_resources.PY_MAJOR, '')
easy_install_egg = dist.egg_name() + '.egg'
develop_egg_link = egg_link_path(dist)
pip_egg_info_exists = os.path.exists(pip_egg_info_path)
debian_egg_info_exists = os.path.exists(debian_egg_info_path)
dist_info_exists = os.path.exists(dist_info_path)
if pip_egg_info_exists or debian_egg_info_exists:
# package installed by pip
if pip_egg_info_exists:
egg_info_path = pip_egg_info_path
else:
egg_info_path = debian_egg_info_path
paths_to_remove.add(egg_info_path)
if dist.has_metadata('installed-files.txt'):
for installed_file in dist.get_metadata('installed-files.txt').splitlines():
path = os.path.normpath(os.path.join(egg_info_path, installed_file))
paths_to_remove.add(path)
#FIXME: need a test for this elif block
#occurs with --single-version-externally-managed/--record outside of pip
elif dist.has_metadata('top_level.txt'):
if dist.has_metadata('namespace_packages.txt'):
namespaces = dist.get_metadata('namespace_packages.txt')
else:
namespaces = []
for top_level_pkg in [p for p
in dist.get_metadata('top_level.txt').splitlines()
if p and p not in namespaces]:
path = os.path.join(dist.location, top_level_pkg)
paths_to_remove.add(path)
paths_to_remove.add(path + '.py')
paths_to_remove.add(path + '.pyc')
elif dist.location.endswith(easy_install_egg):
# package installed by easy_install
paths_to_remove.add(dist.location)
easy_install_pth = os.path.join(os.path.dirname(dist.location),
'easy-install.pth')
paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg)
elif develop_egg_link:
# develop egg
fh = open(develop_egg_link, 'r')
link_pointer = os.path.normcase(fh.readline().strip())
fh.close()
assert (link_pointer == dist.location), 'Egg-link %s does not match installed location of %s (at %s)' % (link_pointer, self.name, dist.location)
paths_to_remove.add(develop_egg_link)
easy_install_pth = os.path.join(os.path.dirname(develop_egg_link),
'easy-install.pth')
paths_to_remove.add_pth(easy_install_pth, dist.location)
elif dist_info_exists:
for path in pip.wheel.uninstallation_paths(dist):
paths_to_remove.add(path)
# find distutils scripts= scripts
if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'):
for script in dist.metadata_listdir('scripts'):
if dist_in_usersite(dist):
bin_dir = bin_user
else:
bin_dir = bin_py
paths_to_remove.add(os.path.join(bin_dir, script))
if sys.platform == 'win32':
paths_to_remove.add(os.path.join(bin_dir, script) + '.bat')
# find console_scripts
if dist.has_metadata('entry_points.txt'):
config = ConfigParser.SafeConfigParser()
config.readfp(FakeFile(dist.get_metadata_lines('entry_points.txt')))
if config.has_section('console_scripts'):
for name, value in config.items('console_scripts'):
if dist_in_usersite(dist):
bin_dir = bin_user
else:
bin_dir = bin_py
paths_to_remove.add(os.path.join(bin_dir, name))
if sys.platform == 'win32':
paths_to_remove.add(os.path.join(bin_dir, name) + '.exe')
paths_to_remove.add(os.path.join(bin_dir, name) + '.exe.manifest')
paths_to_remove.add(os.path.join(bin_dir, name) + '-script.py')
paths_to_remove.remove(auto_confirm)
self.uninstalled = paths_to_remove
def rollback_uninstall(self):
if self.uninstalled:
self.uninstalled.rollback()
else:
logger.error("Can't rollback %s, nothing uninstalled."
% (self.project_name,))
def commit_uninstall(self):
if self.uninstalled:
self.uninstalled.commit()
else:
logger.error("Can't commit %s, nothing uninstalled."
% (self.project_name,))
def archive(self, build_dir):
assert self.source_dir
create_archive = True
archive_name = '%s-%s.zip' % (self.name, self.installed_version)
archive_path = os.path.join(build_dir, archive_name)
if os.path.exists(archive_path):
response = ask_path_exists(
'The file %s exists. (i)gnore, (w)ipe, (b)ackup ' %
display_path(archive_path), ('i', 'w', 'b'))
if response == 'i':
create_archive = False
elif response == 'w':
logger.warn('Deleting %s' % display_path(archive_path))
os.remove(archive_path)
elif response == 'b':
dest_file = backup_dir(archive_path)
logger.warn('Backing up %s to %s'
% (display_path(archive_path), display_path(dest_file)))
shutil.move(archive_path, dest_file)
if create_archive:
zip = zipfile.ZipFile(archive_path, 'w', zipfile.ZIP_DEFLATED)
dir = os.path.normcase(os.path.abspath(self.source_dir))
for dirpath, dirnames, filenames in os.walk(dir):
if 'pip-egg-info' in dirnames:
dirnames.remove('pip-egg-info')
for dirname in dirnames:
dirname = os.path.join(dirpath, dirname)
name = self._clean_zip_name(dirname, dir)
zipdir = zipfile.ZipInfo(self.name + '/' + name + '/')
zipdir.external_attr = 0x1ED << 16 # 0o755
zip.writestr(zipdir, '')
for filename in filenames:
if filename == PIP_DELETE_MARKER_FILENAME:
continue
filename = os.path.join(dirpath, filename)
name = self._clean_zip_name(filename, dir)
zip.write(filename, self.name + '/' + name)
zip.close()
logger.indent -= 2
logger.notify('Saved %s' % display_path(archive_path))
def _clean_zip_name(self, name, prefix):
assert name.startswith(prefix+os.path.sep), (
"name %r doesn't start with prefix %r" % (name, prefix))
name = name[len(prefix)+1:]
name = name.replace(os.path.sep, '/')
return name
def install(self, install_options, global_options=(), root=None):
if self.editable:
self.install_editable(install_options, global_options)
return
if self.is_wheel:
version = pip.wheel.wheel_version(self.source_dir)
pip.wheel.check_compatibility(version, self.name)
self.move_wheel_files(self.source_dir, root=root)
self.install_succeeded = True
return
temp_location = tempfile.mkdtemp('-record', 'pip-')
record_filename = os.path.join(temp_location, 'install-record.txt')
try:
install_args = [sys.executable]
install_args.append('-c')
install_args.append(
"import setuptools, tokenize;__file__=%r;"\
"exec(compile(getattr(tokenize, 'open', open)(__file__).read().replace('\\r\\n', '\\n'), __file__, 'exec'))" % self.setup_py)
install_args += list(global_options) + ['install','--record', record_filename]
if not self.as_egg:
install_args += ['--single-version-externally-managed']
if root is not None:
install_args += ['--root', root]
if self.pycompile:
install_args += ["--compile"]
else:
install_args += ["--no-compile"]
if running_under_virtualenv():
## FIXME: I'm not sure if this is a reasonable location; probably not
## but we can't put it in the default location, as that is a virtualenv symlink that isn't writable
install_args += ['--install-headers',
os.path.join(sys.prefix, 'include', 'site',
'python' + get_python_version())]
logger.notify('Running setup.py install for %s' % self.name)
logger.indent += 2
try:
call_subprocess(install_args + install_options,
cwd=self.source_dir, filter_stdout=self._filter_install, show_stdout=False)
finally:
logger.indent -= 2
if not os.path.exists(record_filename):
logger.notify('Record file %s not found' % record_filename)
return
self.install_succeeded = True
if self.as_egg:
# there's no --always-unzip option we can pass to install command
# so we unable to save the installed-files.txt
return
def prepend_root(path):
if root is None or not os.path.isabs(path):
return path
else:
return change_root(root, path)
f = open(record_filename)
for line in f:
line = line.strip()
if line.endswith('.egg-info'):
egg_info_dir = prepend_root(line)
break
else:
logger.warn('Could not find .egg-info directory in install record for %s' % self)
## FIXME: put the record somewhere
## FIXME: should this be an error?
return
f.close()
new_lines = []
f = open(record_filename)
for line in f:
filename = line.strip()
if os.path.isdir(filename):
filename += os.path.sep
new_lines.append(make_path_relative(prepend_root(filename), egg_info_dir))
f.close()
f = open(os.path.join(egg_info_dir, 'installed-files.txt'), 'w')
f.write('\n'.join(new_lines)+'\n')
f.close()
finally:
if os.path.exists(record_filename):
os.remove(record_filename)
os.rmdir(temp_location)
def remove_temporary_source(self):
"""Remove the source files from this requirement, if they are marked
for deletion"""
if self.is_bundle or os.path.exists(self.delete_marker_filename):
logger.info('Removing source in %s' % self.source_dir)
if self.source_dir:
rmtree(self.source_dir)
self.source_dir = None
if self._temp_build_dir and os.path.exists(self._temp_build_dir):
rmtree(self._temp_build_dir)
self._temp_build_dir = None
def install_editable(self, install_options, global_options=()):
logger.notify('Running setup.py develop for %s' % self.name)
logger.indent += 2
try:
## FIXME: should we do --install-headers here too?
call_subprocess(
[sys.executable, '-c',
"import setuptools, tokenize; __file__=%r; exec(compile(getattr(tokenize, 'open', open)(__file__).read().replace('\\r\\n', '\\n'), __file__, 'exec'))" % self.setup_py]
+ list(global_options) + ['develop', '--no-deps'] + list(install_options),
cwd=self.source_dir, filter_stdout=self._filter_install,
show_stdout=False)
finally:
logger.indent -= 2
self.install_succeeded = True
def _filter_install(self, line):
level = logger.NOTIFY
for regex in [r'^running .*', r'^writing .*', '^creating .*', '^[Cc]opying .*',
r'^reading .*', r"^removing .*\.egg-info' \(and everything under it\)$",
r'^byte-compiling ',
# Not sure what this warning is, but it seems harmless:
r"^warning: manifest_maker: standard file '-c' not found$"]:
if re.search(regex, line.strip()):
level = logger.INFO
break
return (level, line)
def check_if_exists(self):
"""Find an installed distribution that satisfies or conflicts
with this requirement, and set self.satisfied_by or
self.conflicts_with appropriately."""
if self.req is None:
return False
try:
# DISTRIBUTE TO SETUPTOOLS UPGRADE HACK (1 of 3 parts)
# if we've already set distribute as a conflict to setuptools
# then this check has already run before. we don't want it to
# run again, and return False, since it would block the uninstall
# TODO: remove this later
if (self.req.project_name == 'setuptools'
and self.conflicts_with
and self.conflicts_with.project_name == 'distribute'):
return True
else:
self.satisfied_by = pkg_resources.get_distribution(self.req)
except pkg_resources.DistributionNotFound:
return False
except pkg_resources.VersionConflict:
existing_dist = pkg_resources.get_distribution(self.req.project_name)
if self.use_user_site:
if dist_in_usersite(existing_dist):
self.conflicts_with = existing_dist
elif running_under_virtualenv() and dist_in_site_packages(existing_dist):
raise InstallationError("Will not install to the user site because it will lack sys.path precedence to %s in %s"
%(existing_dist.project_name, existing_dist.location))
else:
self.conflicts_with = existing_dist
return True
@property
def is_wheel(self):
return self.url and '.whl' in self.url
@property
def is_bundle(self):
if self._is_bundle is not None:
return self._is_bundle
base = self._temp_build_dir
if not base:
## FIXME: this doesn't seem right:
return False
self._is_bundle = (os.path.exists(os.path.join(base, 'pip-manifest.txt'))
or os.path.exists(os.path.join(base, 'pyinstall-manifest.txt')))
return self._is_bundle
def bundle_requirements(self):
for dest_dir in self._bundle_editable_dirs:
package = os.path.basename(dest_dir)
## FIXME: svnism:
for vcs_backend in vcs.backends:
url = rev = None
vcs_bundle_file = os.path.join(
dest_dir, vcs_backend.bundle_file)
if os.path.exists(vcs_bundle_file):
vc_type = vcs_backend.name
fp = open(vcs_bundle_file)
content = fp.read()
fp.close()
url, rev = vcs_backend().parse_vcs_bundle_file(content)
break
if url:
url = '%s+%s@%s' % (vc_type, url, rev)
else:
url = None
yield InstallRequirement(
package, self, editable=True, url=url,
update=False, source_dir=dest_dir, from_bundle=True)
for dest_dir in self._bundle_build_dirs:
package = os.path.basename(dest_dir)
yield InstallRequirement(package, self,source_dir=dest_dir, from_bundle=True)
def move_bundle_files(self, dest_build_dir, dest_src_dir):
base = self._temp_build_dir
assert base
src_dir = os.path.join(base, 'src')
build_dir = os.path.join(base, 'build')
bundle_build_dirs = []
bundle_editable_dirs = []
for source_dir, dest_dir, dir_collection in [
(src_dir, dest_src_dir, bundle_editable_dirs),
(build_dir, dest_build_dir, bundle_build_dirs)]:
if os.path.exists(source_dir):
for dirname in os.listdir(source_dir):
dest = os.path.join(dest_dir, dirname)
dir_collection.append(dest)
if os.path.exists(dest):
logger.warn('The directory %s (containing package %s) already exists; cannot move source from bundle %s'
% (dest, dirname, self))
continue
if not os.path.exists(dest_dir):
logger.info('Creating directory %s' % dest_dir)
os.makedirs(dest_dir)
shutil.move(os.path.join(source_dir, dirname), dest)
if not os.listdir(source_dir):
os.rmdir(source_dir)
self._temp_build_dir = None
self._bundle_build_dirs = bundle_build_dirs
self._bundle_editable_dirs = bundle_editable_dirs
def move_wheel_files(self, wheeldir, root=None):
move_wheel_files(
self.name, self.req, wheeldir,
user=self.use_user_site,
home=self.target_dir,
root=root,
pycompile=self.pycompile,
)
@property
def delete_marker_filename(self):
assert self.source_dir
return os.path.join(self.source_dir, PIP_DELETE_MARKER_FILENAME)
class Requirements(object):
def __init__(self):
self._keys = []
self._dict = {}
def keys(self):
return self._keys
def values(self):
return [self._dict[key] for key in self._keys]
def __contains__(self, item):
return item in self._keys
def __setitem__(self, key, value):
if key not in self._keys:
self._keys.append(key)
self._dict[key] = value
def __getitem__(self, key):
return self._dict[key]
def __repr__(self):
values = ['%s: %s' % (repr(k), repr(self[k])) for k in self.keys()]
return 'Requirements({%s})' % ', '.join(values)
class RequirementSet(object):
def __init__(self, build_dir, src_dir, download_dir, download_cache=None,
upgrade=False, ignore_installed=False, as_egg=False,
target_dir=None, ignore_dependencies=False,
force_reinstall=False, use_user_site=False, session=None,
pycompile=True, wheel_download_dir=None):
self.build_dir = build_dir
self.src_dir = src_dir
self.download_dir = download_dir
if download_cache:
download_cache = os.path.expanduser(download_cache)
self.download_cache = download_cache
self.upgrade = upgrade
self.ignore_installed = ignore_installed
self.force_reinstall = force_reinstall
self.requirements = Requirements()
# Mapping of alias: real_name
self.requirement_aliases = {}
self.unnamed_requirements = []
self.ignore_dependencies = ignore_dependencies
self.successfully_downloaded = []
self.successfully_installed = []
self.reqs_to_cleanup = []
self.as_egg = as_egg
self.use_user_site = use_user_site
self.target_dir = target_dir #set from --target option
self.session = session or PipSession()
self.pycompile = pycompile
self.wheel_download_dir = wheel_download_dir
def __str__(self):
reqs = [req for req in self.requirements.values()
if not req.comes_from]
reqs.sort(key=lambda req: req.name.lower())
return ' '.join([str(req.req) for req in reqs])
def add_requirement(self, install_req):
name = install_req.name
install_req.as_egg = self.as_egg
install_req.use_user_site = self.use_user_site
install_req.target_dir = self.target_dir
install_req.pycompile = self.pycompile
if not name:
#url or path requirement w/o an egg fragment
self.unnamed_requirements.append(install_req)
else:
if self.has_requirement(name):
raise InstallationError(
'Double requirement given: %s (already in %s, name=%r)'
% (install_req, self.get_requirement(name), name))
self.requirements[name] = install_req
## FIXME: what about other normalizations? E.g., _ vs. -?
if name.lower() != name:
self.requirement_aliases[name.lower()] = name
def has_requirement(self, project_name):
for name in project_name, project_name.lower():
if name in self.requirements or name in self.requirement_aliases:
return True
return False
@property
def has_requirements(self):
return list(self.requirements.values()) or self.unnamed_requirements
@property
def has_editables(self):
if any(req.editable for req in self.requirements.values()):
return True
if any(req.editable for req in self.unnamed_requirements):
return True
return False
@property
def is_download(self):
if self.download_dir:
self.download_dir = os.path.expanduser(self.download_dir)
if os.path.exists(self.download_dir):
return True
else:
logger.fatal('Could not find download directory')
raise InstallationError(
"Could not find or access download directory '%s'"
% display_path(self.download_dir))
return False
def get_requirement(self, project_name):
for name in project_name, project_name.lower():
if name in self.requirements:
return self.requirements[name]
if name in self.requirement_aliases:
return self.requirements[self.requirement_aliases[name]]
raise KeyError("No project with the name %r" % project_name)
def uninstall(self, auto_confirm=False):
for req in self.requirements.values():
req.uninstall(auto_confirm=auto_confirm)
req.commit_uninstall()
def locate_files(self):
## FIXME: duplicates code from prepare_files; relevant code should
## probably be factored out into a separate method
unnamed = list(self.unnamed_requirements)
reqs = list(self.requirements.values())
while reqs or unnamed:
if unnamed:
req_to_install = unnamed.pop(0)
else:
req_to_install = reqs.pop(0)
install_needed = True
if not self.ignore_installed and not req_to_install.editable:
req_to_install.check_if_exists()
if req_to_install.satisfied_by:
if self.upgrade:
#don't uninstall conflict if user install and and conflict is not user install
if not (self.use_user_site and not dist_in_usersite(req_to_install.satisfied_by)):
req_to_install.conflicts_with = req_to_install.satisfied_by
req_to_install.satisfied_by = None
else:
install_needed = False
if req_to_install.satisfied_by:
logger.notify('Requirement already satisfied '
'(use --upgrade to upgrade): %s'
% req_to_install)
if req_to_install.editable:
if req_to_install.source_dir is None:
req_to_install.source_dir = req_to_install.build_location(self.src_dir)
elif install_needed:
req_to_install.source_dir = req_to_install.build_location(self.build_dir, not self.is_download)
if req_to_install.source_dir is not None and not os.path.isdir(req_to_install.source_dir):
raise InstallationError('Could not install requirement %s '
'because source folder %s does not exist '
'(perhaps --no-download was used without first running '
'an equivalent install with --no-install?)'
% (req_to_install, req_to_install.source_dir))
def prepare_files(self, finder, force_root_egg_info=False, bundle=False):
"""Prepare process. Create temp directories, download and/or unpack files."""
unnamed = list(self.unnamed_requirements)
reqs = list(self.requirements.values())
while reqs or unnamed:
if unnamed:
req_to_install = unnamed.pop(0)
else:
req_to_install = reqs.pop(0)
install = True
best_installed = False
not_found = None
if not self.ignore_installed and not req_to_install.editable:
req_to_install.check_if_exists()
if req_to_install.satisfied_by:
if self.upgrade:
if not self.force_reinstall and not req_to_install.url:
try:
url = finder.find_requirement(
req_to_install, self.upgrade)
except BestVersionAlreadyInstalled:
best_installed = True
install = False
except DistributionNotFound:
not_found = sys.exc_info()[1]
else:
# Avoid the need to call find_requirement again
req_to_install.url = url.url
if not best_installed:
#don't uninstall conflict if user install and conflict is not user install
if not (self.use_user_site and not dist_in_usersite(req_to_install.satisfied_by)):
req_to_install.conflicts_with = req_to_install.satisfied_by
req_to_install.satisfied_by = None
else:
install = False
if req_to_install.satisfied_by:
if best_installed:
logger.notify('Requirement already up-to-date: %s'
% req_to_install)
else:
logger.notify('Requirement already satisfied '
'(use --upgrade to upgrade): %s'
% req_to_install)
if req_to_install.editable:
logger.notify('Obtaining %s' % req_to_install)
elif install:
if req_to_install.url and req_to_install.url.lower().startswith('file:'):
logger.notify('Unpacking %s' % display_path(url_to_path(req_to_install.url)))
else:
logger.notify('Downloading/unpacking %s' % req_to_install)
logger.indent += 2
try:
is_bundle = False
is_wheel = False
if req_to_install.editable:
if req_to_install.source_dir is None:
location = req_to_install.build_location(self.src_dir)
req_to_install.source_dir = location
else:
location = req_to_install.source_dir
if not os.path.exists(self.build_dir):
_make_build_dir(self.build_dir)
req_to_install.update_editable(not self.is_download)
if self.is_download:
req_to_install.run_egg_info()
req_to_install.archive(self.download_dir)
else:
req_to_install.run_egg_info()
elif install:
##@@ if filesystem packages are not marked
##editable in a req, a non deterministic error
##occurs when the script attempts to unpack the
##build directory
# NB: This call can result in the creation of a temporary build directory
location = req_to_install.build_location(self.build_dir, not self.is_download)
unpack = True
url = None
# In the case where the req comes from a bundle, we should
# assume a build dir exists and move on
if req_to_install.from_bundle:
pass
# If a checkout exists, it's unwise to keep going. version
# inconsistencies are logged later, but do not fail the
# installation.
elif os.path.exists(os.path.join(location, 'setup.py')):
raise PreviousBuildDirError(textwrap.dedent("""
pip can't proceed with requirement '%s' due to a pre-existing build directory.
location: %s
This is likely due to a previous installation that failed.
pip is being responsible and not assuming it can delete this.
Please delete it and try again.
""" % (req_to_install, location)))
else:
## FIXME: this won't upgrade when there's an existing package unpacked in `location`
if req_to_install.url is None:
if not_found:
raise not_found
url = finder.find_requirement(req_to_install, upgrade=self.upgrade)
else:
## FIXME: should req_to_install.url already be a link?
url = Link(req_to_install.url)
assert url
if url:
try:
if (
url.filename.endswith(wheel_ext)
and self.wheel_download_dir
):
# when doing 'pip wheel`
download_dir = self.wheel_download_dir
do_download = True
else:
download_dir = self.download_dir
do_download = self.is_download
self.unpack_url(
url, location, download_dir,
do_download,
)
except HTTPError as exc:
logger.fatal(
'Could not install requirement %s because '
'of error %s' % (req_to_install, exc)
)
raise InstallationError(
'Could not install requirement %s because of HTTP error %s for URL %s'
% (req_to_install, e, url))
else:
unpack = False
if unpack:
is_bundle = req_to_install.is_bundle
is_wheel = url and url.filename.endswith(wheel_ext)
if is_bundle:
req_to_install.move_bundle_files(self.build_dir, self.src_dir)
for subreq in req_to_install.bundle_requirements():
reqs.append(subreq)
self.add_requirement(subreq)
elif self.is_download:
req_to_install.source_dir = location
if not is_wheel:
# FIXME: see https://github.com/pypa/pip/issues/1112
req_to_install.run_egg_info()
if url and url.scheme in vcs.all_schemes:
req_to_install.archive(self.download_dir)
elif is_wheel:
req_to_install.source_dir = location
req_to_install.url = url.url
else:
req_to_install.source_dir = location
req_to_install.run_egg_info()
if force_root_egg_info:
# We need to run this to make sure that the .egg-info/
# directory is created for packing in the bundle
req_to_install.run_egg_info(force_root_egg_info=True)
req_to_install.assert_source_matches_version()
#@@ sketchy way of identifying packages not grabbed from an index
if bundle and req_to_install.url:
self.copy_to_build_dir(req_to_install)
install = False
# req_to_install.req is only avail after unpack for URL pkgs
# repeat check_if_exists to uninstall-on-upgrade (#14)
if not self.ignore_installed:
req_to_install.check_if_exists()
if req_to_install.satisfied_by:
if self.upgrade or self.ignore_installed:
#don't uninstall conflict if user install and and conflict is not user install
if not (self.use_user_site and not dist_in_usersite(req_to_install.satisfied_by)):
req_to_install.conflicts_with = req_to_install.satisfied_by
req_to_install.satisfied_by = None
else:
logger.notify(
'Requirement already satisfied (use '
'--upgrade to upgrade): %s' %
req_to_install
)
install = False
if is_wheel:
dist = list(
pkg_resources.find_distributions(location)
)[0]
if not req_to_install.req:
req_to_install.req = dist.as_requirement()
self.add_requirement(req_to_install)
if not self.ignore_dependencies:
for subreq in dist.requires(
req_to_install.extras):
if self.has_requirement(
subreq.project_name):
continue
subreq = InstallRequirement(str(subreq),
req_to_install)
reqs.append(subreq)
self.add_requirement(subreq)
# sdists
elif not is_bundle:
## FIXME: shouldn't be globally added:
finder.add_dependency_links(req_to_install.dependency_links)
if (req_to_install.extras):
logger.notify("Installing extra requirements: %r" % ','.join(req_to_install.extras))
if not self.ignore_dependencies:
for req in req_to_install.requirements(req_to_install.extras):
try:
name = pkg_resources.Requirement.parse(req).project_name
except ValueError:
e = sys.exc_info()[1]
## FIXME: proper warning
logger.error('Invalid requirement: %r (%s) in requirement %s' % (req, e, req_to_install))
continue
if self.has_requirement(name):
## FIXME: check for conflict
continue
subreq = InstallRequirement(req, req_to_install)
reqs.append(subreq)
self.add_requirement(subreq)
if not self.has_requirement(req_to_install.name):
#'unnamed' requirements will get added here
self.add_requirement(req_to_install)
# cleanup tmp src
if not is_bundle:
if (
self.is_download or
req_to_install._temp_build_dir is not None
):
self.reqs_to_cleanup.append(req_to_install)
if install:
self.successfully_downloaded.append(req_to_install)
if bundle and (req_to_install.url and req_to_install.url.startswith('file:///')):
self.copy_to_build_dir(req_to_install)
finally:
logger.indent -= 2
def cleanup_files(self, bundle=False):
"""Clean up files, remove builds."""
logger.notify('Cleaning up...')
logger.indent += 2
for req in self.reqs_to_cleanup:
req.remove_temporary_source()
remove_dir = []
if self._pip_has_created_build_dir():
remove_dir.append(self.build_dir)
# The source dir of a bundle can always be removed.
# FIXME: not if it pre-existed the bundle!
if bundle:
remove_dir.append(self.src_dir)
for dir in remove_dir:
if os.path.exists(dir):
logger.info('Removing temporary dir %s...' % dir)
rmtree(dir)
logger.indent -= 2
def _pip_has_created_build_dir(self):
return (self.build_dir == build_prefix and
os.path.exists(os.path.join(self.build_dir, PIP_DELETE_MARKER_FILENAME)))
def copy_to_build_dir(self, req_to_install):
target_dir = req_to_install.editable and self.src_dir or self.build_dir
logger.info("Copying %s to %s" % (req_to_install.name, target_dir))
dest = os.path.join(target_dir, req_to_install.name)
shutil.copytree(req_to_install.source_dir, dest)
call_subprocess(["python", "%s/setup.py" % dest, "clean"], cwd=dest,
command_desc='python setup.py clean')
def unpack_url(self, link, location, download_dir=None,
only_download=False):
if download_dir is None:
download_dir = self.download_dir
# non-editable vcs urls
if is_vcs_url(link):
if only_download:
loc = download_dir
else:
loc = location
unpack_vcs_link(link, loc, only_download)
# file urls
elif is_file_url(link):
unpack_file_url(link, location, download_dir)
if only_download:
write_delete_marker_file(location)
# http urls
else:
unpack_http_url(
link,
location,
self.download_cache,
download_dir,
self.session,
)
if only_download:
write_delete_marker_file(location)
def install(self, install_options, global_options=(), *args, **kwargs):
"""Install everything in this set (after having downloaded and unpacked the packages)"""
to_install = [r for r in self.requirements.values()
if not r.satisfied_by]
# DISTRIBUTE TO SETUPTOOLS UPGRADE HACK (1 of 3 parts)
# move the distribute-0.7.X wrapper to the end because it does not
# install a setuptools package. by moving it to the end, we ensure it's
# setuptools dependency is handled first, which will provide the
# setuptools package
# TODO: take this out later
distribute_req = pkg_resources.Requirement.parse("distribute>=0.7")
for req in to_install:
if req.name == 'distribute' and req.installed_version in distribute_req:
to_install.remove(req)
to_install.append(req)
if to_install:
logger.notify('Installing collected packages: %s' % ', '.join([req.name for req in to_install]))
logger.indent += 2
try:
for requirement in to_install:
# DISTRIBUTE TO SETUPTOOLS UPGRADE HACK (1 of 3 parts)
# when upgrading from distribute-0.6.X to the new merged
# setuptools in py2, we need to force setuptools to uninstall
# distribute. In py3, which is always using distribute, this
# conversion is already happening in distribute's pkg_resources.
# It's ok *not* to check if setuptools>=0.7 because if someone
# were actually trying to ugrade from distribute to setuptools
# 0.6.X, then all this could do is actually help, although that
# upgade path was certainly never "supported"
# TODO: remove this later
if requirement.name == 'setuptools':
try:
# only uninstall distribute<0.7. For >=0.7, setuptools
# will also be present, and that's what we need to
# uninstall
distribute_requirement = pkg_resources.Requirement.parse("distribute<0.7")
existing_distribute = pkg_resources.get_distribution("distribute")
if existing_distribute in distribute_requirement:
requirement.conflicts_with = existing_distribute
except pkg_resources.DistributionNotFound:
# distribute wasn't installed, so nothing to do
pass
if requirement.conflicts_with:
logger.notify('Found existing installation: %s'
% requirement.conflicts_with)
logger.indent += 2
try:
requirement.uninstall(auto_confirm=True)
finally:
logger.indent -= 2
try:
requirement.install(install_options, global_options, *args, **kwargs)
except:
# if install did not succeed, rollback previous uninstall
if requirement.conflicts_with and not requirement.install_succeeded:
requirement.rollback_uninstall()
raise
else:
if requirement.conflicts_with and requirement.install_succeeded:
requirement.commit_uninstall()
requirement.remove_temporary_source()
finally:
logger.indent -= 2
self.successfully_installed = to_install
def create_bundle(self, bundle_filename):
## FIXME: can't decide which is better; zip is easier to read
## random files from, but tar.bz2 is smaller and not as lame a
## format.
## FIXME: this file should really include a manifest of the
## packages, maybe some other metadata files. It would make
## it easier to detect as well.
zip = zipfile.ZipFile(bundle_filename, 'w', zipfile.ZIP_DEFLATED)
vcs_dirs = []
for dir, basename in (self.build_dir, 'build'), (self.src_dir, 'src'):
dir = os.path.normcase(os.path.abspath(dir))
for dirpath, dirnames, filenames in os.walk(dir):
for backend in vcs.backends:
vcs_backend = backend()
vcs_url = vcs_rev = None
if vcs_backend.dirname in dirnames:
for vcs_dir in vcs_dirs:
if dirpath.startswith(vcs_dir):
# vcs bundle file already in parent directory
break
else:
vcs_url, vcs_rev = vcs_backend.get_info(
os.path.join(dir, dirpath))
vcs_dirs.append(dirpath)
vcs_bundle_file = vcs_backend.bundle_file
vcs_guide = vcs_backend.guide % {'url': vcs_url,
'rev': vcs_rev}
dirnames.remove(vcs_backend.dirname)
break
if 'pip-egg-info' in dirnames:
dirnames.remove('pip-egg-info')
for dirname in dirnames:
dirname = os.path.join(dirpath, dirname)
name = self._clean_zip_name(dirname, dir)
zip.writestr(basename + '/' + name + '/', '')
for filename in filenames:
if filename == PIP_DELETE_MARKER_FILENAME:
continue
filename = os.path.join(dirpath, filename)
name = self._clean_zip_name(filename, dir)
zip.write(filename, basename + '/' + name)
if vcs_url:
name = os.path.join(dirpath, vcs_bundle_file)
name = self._clean_zip_name(name, dir)
zip.writestr(basename + '/' + name, vcs_guide)
zip.writestr('pip-manifest.txt', self.bundle_requirements())
zip.close()
BUNDLE_HEADER = '''\
# This is a pip bundle file, that contains many source packages
# that can be installed as a group. You can install this like:
# pip this_file.zip
# The rest of the file contains a list of all the packages included:
'''
def bundle_requirements(self):
parts = [self.BUNDLE_HEADER]
for req in [req for req in self.requirements.values()
if not req.comes_from]:
parts.append('%s==%s\n' % (req.name, req.installed_version))
parts.append('# These packages were installed to satisfy the above requirements:\n')
for req in [req for req in self.requirements.values()
if req.comes_from]:
parts.append('%s==%s\n' % (req.name, req.installed_version))
## FIXME: should we do something with self.unnamed_requirements?
return ''.join(parts)
def _clean_zip_name(self, name, prefix):
assert name.startswith(prefix+os.path.sep), (
"name %r doesn't start with prefix %r" % (name, prefix))
name = name[len(prefix)+1:]
name = name.replace(os.path.sep, '/')
return name
def _make_build_dir(build_dir):
os.makedirs(build_dir)
write_delete_marker_file(build_dir)
_scheme_re = re.compile(r'^(http|https|file):', re.I)
def parse_requirements(filename, finder=None, comes_from=None, options=None,
session=None):
if session is None:
session = PipSession()
skip_match = None
skip_regex = options.skip_requirements_regex if options else None
if skip_regex:
skip_match = re.compile(skip_regex)
reqs_file_dir = os.path.dirname(os.path.abspath(filename))
filename, content = get_file_content(filename,
comes_from=comes_from,
session=session,
)
for line_number, line in enumerate(content.splitlines()):
line_number += 1
line = line.strip()
# Remove comments from file
line = re.sub(r"(^|\s)#.*$", "", line)
if not line or line.startswith('#'):
continue
if skip_match and skip_match.search(line):
continue
if line.startswith('-r') or line.startswith('--requirement'):
if line.startswith('-r'):
req_url = line[2:].strip()
else:
req_url = line[len('--requirement'):].strip().strip('=')
if _scheme_re.search(filename):
# Relative to a URL
req_url = urlparse.urljoin(filename, req_url)
elif not _scheme_re.search(req_url):
req_url = os.path.join(os.path.dirname(filename), req_url)
for item in parse_requirements(req_url, finder, comes_from=filename, options=options, session=session):
yield item
elif line.startswith('-Z') or line.startswith('--always-unzip'):
# No longer used, but previously these were used in
# requirement files, so we'll ignore.
pass
elif line.startswith('-f') or line.startswith('--find-links'):
if line.startswith('-f'):
line = line[2:].strip()
else:
line = line[len('--find-links'):].strip().lstrip('=')
## FIXME: it would be nice to keep track of the source of
## the find_links:
# support a find-links local path relative to a requirements file
relative_to_reqs_file = os.path.join(reqs_file_dir, line)
if os.path.exists(relative_to_reqs_file):
line = relative_to_reqs_file
if finder:
finder.find_links.append(line)
elif line.startswith('-i') or line.startswith('--index-url'):
if line.startswith('-i'):
line = line[2:].strip()
else:
line = line[len('--index-url'):].strip().lstrip('=')
if finder:
finder.index_urls = [line]
elif line.startswith('--extra-index-url'):
line = line[len('--extra-index-url'):].strip().lstrip('=')
if finder:
finder.index_urls.append(line)
elif line.startswith('--use-wheel'):
finder.use_wheel = True
elif line.startswith('--no-index'):
finder.index_urls = []
elif line.startswith("--allow-external"):
line = line[len("--allow-external"):].strip().lstrip("=")
finder.allow_external |= set([normalize_name(line).lower()])
elif line.startswith("--allow-all-external"):
finder.allow_all_external = True
# Remove in 1.7
elif line.startswith("--no-allow-external"):
pass
# Remove in 1.7
elif line.startswith("--no-allow-insecure"):
pass
# Remove after 1.7
elif line.startswith("--allow-insecure"):
line = line[len("--allow-insecure"):].strip().lstrip("=")
finder.allow_unverified |= set([normalize_name(line).lower()])
elif line.startswith("--allow-unverified"):
line = line[len("--allow-unverified"):].strip().lstrip("=")
finder.allow_unverified |= set([normalize_name(line).lower()])
else:
comes_from = '-r %s (line %s)' % (filename, line_number)
if line.startswith('-e') or line.startswith('--editable'):
if line.startswith('-e'):
line = line[2:].strip()
else:
line = line[len('--editable'):].strip().lstrip('=')
req = InstallRequirement.from_editable(
line, comes_from=comes_from, default_vcs=options.default_vcs if options else None)
else:
req = InstallRequirement.from_line(line, comes_from, prereleases=getattr(options, "pre", None))
yield req
def _strip_postfix(req):
"""
Strip req postfix ( -dev, 0.2, etc )
"""
## FIXME: use package_to_requirement?
match = re.search(r'^(.*?)(?:-dev|-\d.*)$', req)
if match:
# Strip off -dev, -0.2, etc.
req = match.group(1)
return req
def _build_req_from_url(url):
parts = [p for p in url.split('#', 1)[0].split('/') if p]
req = None
if parts[-2] in ('tags', 'branches', 'tag', 'branch'):
req = parts[-3]
elif parts[-1] == 'trunk':
req = parts[-2]
return req
def _build_editable_options(req):
"""
This method generates a dictionary of the query string
parameters contained in a given editable URL.
"""
regexp = re.compile(r"[\?#&](?P<name>[^&=]+)=(?P<value>[^&=]+)")
matched = regexp.findall(req)
if matched:
ret = dict()
for option in matched:
(name, value) = option
if name in ret:
raise Exception("%s option already defined" % name)
ret[name] = value
return ret
return None
def parse_editable(editable_req, default_vcs=None):
"""Parses svn+http://blahblah@rev#egg=Foobar into a requirement
(Foobar) and a URL"""
url = editable_req
extras = None
# If a file path is specified with extras, strip off the extras.
m = re.match(r'^(.+)(\[[^\]]+\])$', url)
if m:
url_no_extras = m.group(1)
extras = m.group(2)
else:
url_no_extras = url
if os.path.isdir(url_no_extras):
if not os.path.exists(os.path.join(url_no_extras, 'setup.py')):
raise InstallationError("Directory %r is not installable. File 'setup.py' not found." % url_no_extras)
# Treating it as code that has already been checked out
url_no_extras = path_to_url(url_no_extras)
if url_no_extras.lower().startswith('file:'):
if extras:
return None, url_no_extras, pkg_resources.Requirement.parse('__placeholder__' + extras).extras
else:
return None, url_no_extras, None
for version_control in vcs:
if url.lower().startswith('%s:' % version_control):
url = '%s+%s' % (version_control, url)
break
if '+' not in url:
if default_vcs:
url = default_vcs + '+' + url
else:
raise InstallationError(
'%s should either be a path to a local project or a VCS url beginning with svn+, git+, hg+, or bzr+' % editable_req)
vc_type = url.split('+', 1)[0].lower()
if not vcs.get_backend(vc_type):
error_message = 'For --editable=%s only ' % editable_req + \
', '.join([backend.name + '+URL' for backend in vcs.backends]) + \
' is currently supported'
raise InstallationError(error_message)
try:
options = _build_editable_options(editable_req)
except Exception:
message = sys.exc_info()[1]
raise InstallationError(
'--editable=%s error in editable options:%s' % (editable_req, message))
if not options or 'egg' not in options:
req = _build_req_from_url(editable_req)
if not req:
raise InstallationError('--editable=%s is not the right format; it must have #egg=Package' % editable_req)
else:
req = options['egg']
package = _strip_postfix(req)
return package, url, options
class UninstallPathSet(object):
"""A set of file paths to be removed in the uninstallation of a
requirement."""
def __init__(self, dist):
self.paths = set()
self._refuse = set()
self.pth = {}
self.dist = dist
self.save_dir = None
self._moved_paths = []
def _permitted(self, path):
"""
Return True if the given path is one we are permitted to
remove/modify, False otherwise.
"""
return is_local(path)
def _can_uninstall(self):
if not dist_is_local(self.dist):
logger.notify("Not uninstalling %s at %s, outside environment %s"
% (self.dist.project_name, normalize_path(self.dist.location), sys.prefix))
return False
return True
def add(self, path):
path = normalize_path(path)
if not os.path.exists(path):
return
if self._permitted(path):
self.paths.add(path)
else:
self._refuse.add(path)
# __pycache__ files can show up after 'installed-files.txt' is created, due to imports
if os.path.splitext(path)[1] == '.py' and uses_pycache:
self.add(imp.cache_from_source(path))
def add_pth(self, pth_file, entry):
pth_file = normalize_path(pth_file)
if self._permitted(pth_file):
if pth_file not in self.pth:
self.pth[pth_file] = UninstallPthEntries(pth_file)
self.pth[pth_file].add(entry)
else:
self._refuse.add(pth_file)
def compact(self, paths):
"""Compact a path set to contain the minimal number of paths
necessary to contain all paths in the set. If /a/path/ and
/a/path/to/a/file.txt are both in the set, leave only the
shorter path."""
short_paths = set()
for path in sorted(paths, key=len):
if not any([(path.startswith(shortpath) and
path[len(shortpath.rstrip(os.path.sep))] == os.path.sep)
for shortpath in short_paths]):
short_paths.add(path)
return short_paths
def _stash(self, path):
return os.path.join(
self.save_dir, os.path.splitdrive(path)[1].lstrip(os.path.sep))
def remove(self, auto_confirm=False):
"""Remove paths in ``self.paths`` with confirmation (unless
``auto_confirm`` is True)."""
if not self._can_uninstall():
return
if not self.paths:
logger.notify("Can't uninstall '%s'. No files were found to uninstall." % self.dist.project_name)
return
logger.notify('Uninstalling %s:' % self.dist.project_name)
logger.indent += 2
paths = sorted(self.compact(self.paths))
try:
if auto_confirm:
response = 'y'
else:
for path in paths:
logger.notify(path)
response = ask('Proceed (y/n)? ', ('y', 'n'))
if self._refuse:
logger.notify('Not removing or modifying (outside of prefix):')
for path in self.compact(self._refuse):
logger.notify(path)
if response == 'y':
self.save_dir = tempfile.mkdtemp(suffix='-uninstall',
prefix='pip-')
for path in paths:
new_path = self._stash(path)
logger.info('Removing file or directory %s' % path)
self._moved_paths.append(path)
renames(path, new_path)
for pth in self.pth.values():
pth.remove()
logger.notify('Successfully uninstalled %s' % self.dist.project_name)
finally:
logger.indent -= 2
def rollback(self):
"""Rollback the changes previously made by remove()."""
if self.save_dir is None:
logger.error("Can't roll back %s; was not uninstalled" % self.dist.project_name)
return False
logger.notify('Rolling back uninstall of %s' % self.dist.project_name)
for path in self._moved_paths:
tmp_path = self._stash(path)
logger.info('Replacing %s' % path)
renames(tmp_path, path)
for pth in self.pth:
pth.rollback()
def commit(self):
"""Remove temporary save dir: rollback will no longer be possible."""
if self.save_dir is not None:
rmtree(self.save_dir)
self.save_dir = None
self._moved_paths = []
class UninstallPthEntries(object):
def __init__(self, pth_file):
if not os.path.isfile(pth_file):
raise UninstallationError("Cannot remove entries from nonexistent file %s" % pth_file)
self.file = pth_file
self.entries = set()
self._saved_lines = None
def add(self, entry):
entry = os.path.normcase(entry)
# On Windows, os.path.normcase converts the entry to use
# backslashes. This is correct for entries that describe absolute
# paths outside of site-packages, but all the others use forward
# slashes.
if sys.platform == 'win32' and not os.path.splitdrive(entry)[0]:
entry = entry.replace('\\', '/')
self.entries.add(entry)
def remove(self):
logger.info('Removing pth entries from %s:' % self.file)
fh = open(self.file, 'rb')
# windows uses '\r\n' with py3k, but uses '\n' with py2.x
lines = fh.readlines()
self._saved_lines = lines
fh.close()
if any(b('\r\n') in line for line in lines):
endline = '\r\n'
else:
endline = '\n'
for entry in self.entries:
try:
logger.info('Removing entry: %s' % entry)
lines.remove(b(entry + endline))
except ValueError:
pass
fh = open(self.file, 'wb')
fh.writelines(lines)
fh.close()
def rollback(self):
if self._saved_lines is None:
logger.error('Cannot roll back changes to %s, none were made' % self.file)
return False
logger.info('Rolling %s back to previous state' % self.file)
fh = open(self.file, 'wb')
fh.writelines(self._saved_lines)
fh.close()
return True
class FakeFile(object):
"""Wrap a list of lines in an object with readline() to make
ConfigParser happy."""
def __init__(self, lines):
self._gen = (l for l in lines)
def readline(self):
try:
try:
return next(self._gen)
except NameError:
return self._gen.next()
except StopIteration:
return ''
def __iter__(self):
return self._gen
| apache-2.0 | 6,842,031,458,859,234,000 | 42.271362 | 184 | 0.532068 | false |
pgonda/servo | tests/wpt/css-tests/tools/pywebsocket/src/test/test_handshake_hybi.py | 413 | 22552 | #!/usr/bin/env python
#
# Copyright 2011, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests for handshake module."""
import unittest
import set_sys_path # Update sys.path to locate mod_pywebsocket module.
from mod_pywebsocket import common
from mod_pywebsocket.handshake._base import AbortedByUserException
from mod_pywebsocket.handshake._base import HandshakeException
from mod_pywebsocket.handshake._base import VersionException
from mod_pywebsocket.handshake.hybi import Handshaker
import mock
class RequestDefinition(object):
"""A class for holding data for constructing opening handshake strings for
testing the opening handshake processor.
"""
def __init__(self, method, uri, headers):
self.method = method
self.uri = uri
self.headers = headers
def _create_good_request_def():
return RequestDefinition(
'GET', '/demo',
{'Host': 'server.example.com',
'Upgrade': 'websocket',
'Connection': 'Upgrade',
'Sec-WebSocket-Key': 'dGhlIHNhbXBsZSBub25jZQ==',
'Sec-WebSocket-Version': '13',
'Origin': 'http://example.com'})
def _create_request(request_def):
conn = mock.MockConn('')
return mock.MockRequest(
method=request_def.method,
uri=request_def.uri,
headers_in=request_def.headers,
connection=conn)
def _create_handshaker(request):
handshaker = Handshaker(request, mock.MockDispatcher())
return handshaker
class SubprotocolChoosingDispatcher(object):
"""A dispatcher for testing. This dispatcher sets the i-th subprotocol
of requested ones to ws_protocol where i is given on construction as index
argument. If index is negative, default_value will be set to ws_protocol.
"""
def __init__(self, index, default_value=None):
self.index = index
self.default_value = default_value
def do_extra_handshake(self, conn_context):
if self.index >= 0:
conn_context.ws_protocol = conn_context.ws_requested_protocols[
self.index]
else:
conn_context.ws_protocol = self.default_value
def transfer_data(self, conn_context):
pass
class HandshakeAbortedException(Exception):
pass
class AbortingDispatcher(object):
"""A dispatcher for testing. This dispatcher raises an exception in
do_extra_handshake to reject the request.
"""
def do_extra_handshake(self, conn_context):
raise HandshakeAbortedException('An exception to reject the request')
def transfer_data(self, conn_context):
pass
class AbortedByUserDispatcher(object):
"""A dispatcher for testing. This dispatcher raises an
AbortedByUserException in do_extra_handshake to reject the request.
"""
def do_extra_handshake(self, conn_context):
raise AbortedByUserException('An AbortedByUserException to reject the '
'request')
def transfer_data(self, conn_context):
pass
_EXPECTED_RESPONSE = (
'HTTP/1.1 101 Switching Protocols\r\n'
'Upgrade: websocket\r\n'
'Connection: Upgrade\r\n'
'Sec-WebSocket-Accept: s3pPLMBiTxaQ9kYGzzhZRbK+xOo=\r\n\r\n')
class HandshakerTest(unittest.TestCase):
"""A unittest for draft-ietf-hybi-thewebsocketprotocol-06 and later
handshake processor.
"""
def test_do_handshake(self):
request = _create_request(_create_good_request_def())
dispatcher = mock.MockDispatcher()
handshaker = Handshaker(request, dispatcher)
handshaker.do_handshake()
self.assertTrue(dispatcher.do_extra_handshake_called)
self.assertEqual(
_EXPECTED_RESPONSE, request.connection.written_data())
self.assertEqual('/demo', request.ws_resource)
self.assertEqual('http://example.com', request.ws_origin)
self.assertEqual(None, request.ws_protocol)
self.assertEqual(None, request.ws_extensions)
self.assertEqual(common.VERSION_HYBI_LATEST, request.ws_version)
def test_do_handshake_with_extra_headers(self):
request_def = _create_good_request_def()
# Add headers not related to WebSocket opening handshake.
request_def.headers['FooKey'] = 'BarValue'
request_def.headers['EmptyKey'] = ''
request = _create_request(request_def)
handshaker = _create_handshaker(request)
handshaker.do_handshake()
self.assertEqual(
_EXPECTED_RESPONSE, request.connection.written_data())
def test_do_handshake_with_capitalized_value(self):
request_def = _create_good_request_def()
request_def.headers['upgrade'] = 'WEBSOCKET'
request = _create_request(request_def)
handshaker = _create_handshaker(request)
handshaker.do_handshake()
self.assertEqual(
_EXPECTED_RESPONSE, request.connection.written_data())
request_def = _create_good_request_def()
request_def.headers['Connection'] = 'UPGRADE'
request = _create_request(request_def)
handshaker = _create_handshaker(request)
handshaker.do_handshake()
self.assertEqual(
_EXPECTED_RESPONSE, request.connection.written_data())
def test_do_handshake_with_multiple_connection_values(self):
request_def = _create_good_request_def()
request_def.headers['Connection'] = 'Upgrade, keep-alive, , '
request = _create_request(request_def)
handshaker = _create_handshaker(request)
handshaker.do_handshake()
self.assertEqual(
_EXPECTED_RESPONSE, request.connection.written_data())
def test_aborting_handshake(self):
handshaker = Handshaker(
_create_request(_create_good_request_def()),
AbortingDispatcher())
# do_extra_handshake raises an exception. Check that it's not caught by
# do_handshake.
self.assertRaises(HandshakeAbortedException, handshaker.do_handshake)
def test_do_handshake_with_protocol(self):
request_def = _create_good_request_def()
request_def.headers['Sec-WebSocket-Protocol'] = 'chat, superchat'
request = _create_request(request_def)
handshaker = Handshaker(request, SubprotocolChoosingDispatcher(0))
handshaker.do_handshake()
EXPECTED_RESPONSE = (
'HTTP/1.1 101 Switching Protocols\r\n'
'Upgrade: websocket\r\n'
'Connection: Upgrade\r\n'
'Sec-WebSocket-Accept: s3pPLMBiTxaQ9kYGzzhZRbK+xOo=\r\n'
'Sec-WebSocket-Protocol: chat\r\n\r\n')
self.assertEqual(EXPECTED_RESPONSE, request.connection.written_data())
self.assertEqual('chat', request.ws_protocol)
def test_do_handshake_protocol_not_in_request_but_in_response(self):
request_def = _create_good_request_def()
request = _create_request(request_def)
handshaker = Handshaker(
request, SubprotocolChoosingDispatcher(-1, 'foobar'))
# No request has been made but ws_protocol is set. HandshakeException
# must be raised.
self.assertRaises(HandshakeException, handshaker.do_handshake)
def test_do_handshake_with_protocol_no_protocol_selection(self):
request_def = _create_good_request_def()
request_def.headers['Sec-WebSocket-Protocol'] = 'chat, superchat'
request = _create_request(request_def)
handshaker = _create_handshaker(request)
# ws_protocol is not set. HandshakeException must be raised.
self.assertRaises(HandshakeException, handshaker.do_handshake)
def test_do_handshake_with_extensions(self):
request_def = _create_good_request_def()
request_def.headers['Sec-WebSocket-Extensions'] = (
'permessage-compress; method=deflate, unknown')
EXPECTED_RESPONSE = (
'HTTP/1.1 101 Switching Protocols\r\n'
'Upgrade: websocket\r\n'
'Connection: Upgrade\r\n'
'Sec-WebSocket-Accept: s3pPLMBiTxaQ9kYGzzhZRbK+xOo=\r\n'
'Sec-WebSocket-Extensions: permessage-compress; method=deflate\r\n'
'\r\n')
request = _create_request(request_def)
handshaker = _create_handshaker(request)
handshaker.do_handshake()
self.assertEqual(EXPECTED_RESPONSE, request.connection.written_data())
self.assertEqual(1, len(request.ws_extensions))
extension = request.ws_extensions[0]
self.assertEqual(common.PERMESSAGE_COMPRESSION_EXTENSION,
extension.name())
self.assertEqual(['method'], extension.get_parameter_names())
self.assertEqual('deflate', extension.get_parameter_value('method'))
self.assertEqual(1, len(request.ws_extension_processors))
self.assertEqual(common.PERMESSAGE_COMPRESSION_EXTENSION,
request.ws_extension_processors[0].name())
def test_do_handshake_with_permessage_compress(self):
request_def = _create_good_request_def()
request_def.headers['Sec-WebSocket-Extensions'] = (
'permessage-compress; method=deflate')
request = _create_request(request_def)
handshaker = _create_handshaker(request)
handshaker.do_handshake()
self.assertEqual(1, len(request.ws_extensions))
self.assertEqual(common.PERMESSAGE_COMPRESSION_EXTENSION,
request.ws_extensions[0].name())
self.assertEqual(1, len(request.ws_extension_processors))
self.assertEqual(common.PERMESSAGE_COMPRESSION_EXTENSION,
request.ws_extension_processors[0].name())
def test_do_handshake_with_quoted_extensions(self):
request_def = _create_good_request_def()
request_def.headers['Sec-WebSocket-Extensions'] = (
'permessage-compress; method=deflate, , '
'unknown; e = "mc^2"; ma="\r\n \\\rf "; pv=nrt')
request = _create_request(request_def)
handshaker = _create_handshaker(request)
handshaker.do_handshake()
self.assertEqual(2, len(request.ws_requested_extensions))
first_extension = request.ws_requested_extensions[0]
self.assertEqual('permessage-compress', first_extension.name())
self.assertEqual(['method'], first_extension.get_parameter_names())
self.assertEqual('deflate',
first_extension.get_parameter_value('method'))
second_extension = request.ws_requested_extensions[1]
self.assertEqual('unknown', second_extension.name())
self.assertEqual(
['e', 'ma', 'pv'], second_extension.get_parameter_names())
self.assertEqual('mc^2', second_extension.get_parameter_value('e'))
self.assertEqual(' \rf ', second_extension.get_parameter_value('ma'))
self.assertEqual('nrt', second_extension.get_parameter_value('pv'))
def test_do_handshake_with_optional_headers(self):
request_def = _create_good_request_def()
request_def.headers['EmptyValue'] = ''
request_def.headers['AKey'] = 'AValue'
request = _create_request(request_def)
handshaker = _create_handshaker(request)
handshaker.do_handshake()
self.assertEqual(
'AValue', request.headers_in['AKey'])
self.assertEqual(
'', request.headers_in['EmptyValue'])
def test_abort_extra_handshake(self):
handshaker = Handshaker(
_create_request(_create_good_request_def()),
AbortedByUserDispatcher())
# do_extra_handshake raises an AbortedByUserException. Check that it's
# not caught by do_handshake.
self.assertRaises(AbortedByUserException, handshaker.do_handshake)
def test_do_handshake_with_mux_and_deflate_frame(self):
request_def = _create_good_request_def()
request_def.headers['Sec-WebSocket-Extensions'] = ('%s, %s' % (
common.MUX_EXTENSION,
common.DEFLATE_FRAME_EXTENSION))
request = _create_request(request_def)
handshaker = _create_handshaker(request)
handshaker.do_handshake()
# mux should be rejected.
self.assertEqual(1, len(request.ws_extensions))
self.assertEqual(common.DEFLATE_FRAME_EXTENSION,
request.ws_extensions[0].name())
self.assertEqual(2, len(request.ws_extension_processors))
self.assertEqual(common.MUX_EXTENSION,
request.ws_extension_processors[0].name())
self.assertEqual(common.DEFLATE_FRAME_EXTENSION,
request.ws_extension_processors[1].name())
self.assertFalse(hasattr(request, 'mux_processor'))
def test_do_handshake_with_deflate_frame_and_mux(self):
request_def = _create_good_request_def()
request_def.headers['Sec-WebSocket-Extensions'] = ('%s, %s' % (
common.DEFLATE_FRAME_EXTENSION,
common.MUX_EXTENSION))
request = _create_request(request_def)
handshaker = _create_handshaker(request)
handshaker.do_handshake()
# mux should be rejected.
self.assertEqual(1, len(request.ws_extensions))
first_extension = request.ws_extensions[0]
self.assertEqual(common.DEFLATE_FRAME_EXTENSION,
first_extension.name())
self.assertEqual(2, len(request.ws_extension_processors))
self.assertEqual(common.DEFLATE_FRAME_EXTENSION,
request.ws_extension_processors[0].name())
self.assertEqual(common.MUX_EXTENSION,
request.ws_extension_processors[1].name())
self.assertFalse(hasattr(request, 'mux'))
def test_do_handshake_with_permessage_compress_and_mux(self):
request_def = _create_good_request_def()
request_def.headers['Sec-WebSocket-Extensions'] = (
'%s; method=deflate, %s' % (
common.PERMESSAGE_COMPRESSION_EXTENSION,
common.MUX_EXTENSION))
request = _create_request(request_def)
handshaker = _create_handshaker(request)
handshaker.do_handshake()
self.assertEqual(1, len(request.ws_extensions))
self.assertEqual(common.MUX_EXTENSION,
request.ws_extensions[0].name())
self.assertEqual(2, len(request.ws_extension_processors))
self.assertEqual(common.PERMESSAGE_COMPRESSION_EXTENSION,
request.ws_extension_processors[0].name())
self.assertEqual(common.MUX_EXTENSION,
request.ws_extension_processors[1].name())
self.assertTrue(hasattr(request, 'mux_processor'))
self.assertTrue(request.mux_processor.is_active())
mux_extensions = request.mux_processor.extensions()
self.assertEqual(1, len(mux_extensions))
self.assertEqual(common.PERMESSAGE_COMPRESSION_EXTENSION,
mux_extensions[0].name())
def test_do_handshake_with_mux_and_permessage_compress(self):
request_def = _create_good_request_def()
request_def.headers['Sec-WebSocket-Extensions'] = (
'%s, %s; method=deflate' % (
common.MUX_EXTENSION,
common.PERMESSAGE_COMPRESSION_EXTENSION))
request = _create_request(request_def)
handshaker = _create_handshaker(request)
handshaker.do_handshake()
# mux should be rejected.
self.assertEqual(1, len(request.ws_extensions))
first_extension = request.ws_extensions[0]
self.assertEqual(common.PERMESSAGE_COMPRESSION_EXTENSION,
first_extension.name())
self.assertEqual(2, len(request.ws_extension_processors))
self.assertEqual(common.MUX_EXTENSION,
request.ws_extension_processors[0].name())
self.assertEqual(common.PERMESSAGE_COMPRESSION_EXTENSION,
request.ws_extension_processors[1].name())
self.assertFalse(hasattr(request, 'mux_processor'))
def test_bad_requests(self):
bad_cases = [
('HTTP request',
RequestDefinition(
'GET', '/demo',
{'Host': 'www.google.com',
'User-Agent':
'Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.5;'
' en-US; rv:1.9.1.3) Gecko/20090824 Firefox/3.5.3'
' GTB6 GTBA',
'Accept':
'text/html,application/xhtml+xml,application/xml;q=0.9,'
'*/*;q=0.8',
'Accept-Language': 'en-us,en;q=0.5',
'Accept-Encoding': 'gzip,deflate',
'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.7',
'Keep-Alive': '300',
'Connection': 'keep-alive'}), None, True)]
request_def = _create_good_request_def()
request_def.method = 'POST'
bad_cases.append(('Wrong method', request_def, None, True))
request_def = _create_good_request_def()
del request_def.headers['Host']
bad_cases.append(('Missing Host', request_def, None, True))
request_def = _create_good_request_def()
del request_def.headers['Upgrade']
bad_cases.append(('Missing Upgrade', request_def, None, True))
request_def = _create_good_request_def()
request_def.headers['Upgrade'] = 'nonwebsocket'
bad_cases.append(('Wrong Upgrade', request_def, None, True))
request_def = _create_good_request_def()
del request_def.headers['Connection']
bad_cases.append(('Missing Connection', request_def, None, True))
request_def = _create_good_request_def()
request_def.headers['Connection'] = 'Downgrade'
bad_cases.append(('Wrong Connection', request_def, None, True))
request_def = _create_good_request_def()
del request_def.headers['Sec-WebSocket-Key']
bad_cases.append(('Missing Sec-WebSocket-Key', request_def, 400, True))
request_def = _create_good_request_def()
request_def.headers['Sec-WebSocket-Key'] = (
'dGhlIHNhbXBsZSBub25jZQ==garbage')
bad_cases.append(('Wrong Sec-WebSocket-Key (with garbage on the tail)',
request_def, 400, True))
request_def = _create_good_request_def()
request_def.headers['Sec-WebSocket-Key'] = 'YQ==' # BASE64 of 'a'
bad_cases.append(
('Wrong Sec-WebSocket-Key (decoded value is not 16 octets long)',
request_def, 400, True))
request_def = _create_good_request_def()
# The last character right before == must be any of A, Q, w and g.
request_def.headers['Sec-WebSocket-Key'] = (
'AQIDBAUGBwgJCgsMDQ4PEC==')
bad_cases.append(
('Wrong Sec-WebSocket-Key (padding bits are not zero)',
request_def, 400, True))
request_def = _create_good_request_def()
request_def.headers['Sec-WebSocket-Key'] = (
'dGhlIHNhbXBsZSBub25jZQ==,dGhlIHNhbXBsZSBub25jZQ==')
bad_cases.append(
('Wrong Sec-WebSocket-Key (multiple values)',
request_def, 400, True))
request_def = _create_good_request_def()
del request_def.headers['Sec-WebSocket-Version']
bad_cases.append(('Missing Sec-WebSocket-Version', request_def, None,
True))
request_def = _create_good_request_def()
request_def.headers['Sec-WebSocket-Version'] = '3'
bad_cases.append(('Wrong Sec-WebSocket-Version', request_def, None,
False))
request_def = _create_good_request_def()
request_def.headers['Sec-WebSocket-Version'] = '13, 13'
bad_cases.append(('Wrong Sec-WebSocket-Version (multiple values)',
request_def, 400, True))
request_def = _create_good_request_def()
request_def.headers['Sec-WebSocket-Protocol'] = 'illegal\x09protocol'
bad_cases.append(('Illegal Sec-WebSocket-Protocol',
request_def, 400, True))
request_def = _create_good_request_def()
request_def.headers['Sec-WebSocket-Protocol'] = ''
bad_cases.append(('Empty Sec-WebSocket-Protocol',
request_def, 400, True))
for (case_name, request_def, expected_status,
expect_handshake_exception) in bad_cases:
request = _create_request(request_def)
handshaker = Handshaker(request, mock.MockDispatcher())
try:
handshaker.do_handshake()
self.fail('No exception thrown for \'%s\' case' % case_name)
except HandshakeException, e:
self.assertTrue(expect_handshake_exception)
self.assertEqual(expected_status, e.status)
except VersionException, e:
self.assertFalse(expect_handshake_exception)
if __name__ == '__main__':
unittest.main()
# vi:sts=4 sw=4 et
| mpl-2.0 | -4,909,348,532,559,709,000 | 41.23221 | 79 | 0.636618 | false |
spanner888/madparts | coffee/library.py | 1 | 1940 | # (c) 2013 Joost Yervante Damad <[email protected]>
# License: GPL
import os, os.path, glob
import coffee.pycoffee as pycoffee
class Meta:
def __init__(self, meta):
if not 'desc' in meta:
meta['desc'] = ''
if not 'parent' in meta:
meta['parent'] = None
self.meta = meta
for k in meta:
self.__dict__[k] = meta[k]
self.child_ids = []
class Library:
def __init__(self, name, directory):
self.name = name
self.directory = directory
self.exists = os.path.exists(self.directory)
self.is_dir = True
self.readonly = False
if self.exists:
self.is_dir = os.path.isdir(self.directory)
self.readonly = not os.access(self.directory, os.W_OK)
self.meta_list = []
self.fail_list = []
self.meta_by_id = {}
self.scan()
def scan(self, select_id = None):
self.meta_list = []
self.fail_list = []
if not self.exists: return
for path in glob.glob(self.directory + '/*.coffee'):
with open(path) as f:
code = f.read()
meta = pycoffee.eval_coffee_meta(code)
if not 'name' in meta or not 'id' in meta:
self.fail_list.append(meta)
continue
meta['readonly'] = not os.access(path, os.W_OK)
meta['filename'] = path
self.meta_list.append(meta)
self.meta_list = [Meta(meta) for meta in self.meta_list]
self.meta_list.sort(key=lambda x: x.name)
self.meta_by_id = {}
for meta in self.meta_list:
self.meta_by_id[meta.id] = meta
self.meta_by_name = {}
for meta in self.meta_list:
self.meta_by_name[meta.name] = meta
# scan child relationships
found_as_child = []
for meta in self.meta_list:
if meta.parent != None and meta.parent in self.meta_by_id:
self.meta_by_id[meta.parent].child_ids.append(meta.id)
found_as_child.append(meta.id)
self.root_meta_list = filter(lambda meta: meta.id not in found_as_child, self.meta_list)
| gpl-3.0 | -8,598,978,038,622,864,000 | 29.3125 | 92 | 0.612887 | false |
martynovp/edx-platform | cms/djangoapps/contentstore/features/course-updates.py | 95 | 4707 | # pylint: disable=missing-docstring
# pylint: disable=redefined-outer-name
from lettuce import world, step
from selenium.webdriver.common.keys import Keys
from common import type_in_codemirror, get_codemirror_value
from nose.tools import assert_in # pylint: disable=no-name-in-module
@step(u'I go to the course updates page')
def go_to_updates(_step):
menu_css = 'li.nav-course-courseware'
updates_css = 'li.nav-course-courseware-updates a'
world.css_click(menu_css)
world.css_click(updates_css)
@step(u'I add a new update with the text "([^"]*)"$')
def add_update(_step, text):
update_css = 'a.new-update-button'
world.css_click(update_css)
change_text(text)
@step(u'I should see the update "([^"]*)"$')
def check_update(_step, text):
update_css = 'div.update-contents'
update_html = world.css_find(update_css).html
assert_in(text, update_html)
@step(u'I should see the asset update to "([^"]*)"$')
def check_asset_update(_step, asset_file):
update_css = 'div.update-contents'
update_html = world.css_find(update_css).html
asset_key = world.scenario_dict['COURSE'].id.make_asset_key(asset_type='asset', path=asset_file)
assert_in(unicode(asset_key), update_html)
@step(u'I should not see the update "([^"]*)"$')
def check_no_update(_step, text):
update_css = 'div.update-contents'
assert world.is_css_not_present(update_css)
@step(u'I modify the text to "([^"]*)"$')
def modify_update(_step, text):
button_css = 'div.post-preview .edit-button'
world.css_click(button_css)
change_text(text)
@step(u'I change the update from "([^"]*)" to "([^"]*)"$')
def change_existing_update(_step, before, after):
verify_text_in_editor_and_update('div.post-preview .edit-button', before, after)
@step(u'I change the handout from "([^"]*)" to "([^"]*)"$')
def change_existing_handout(_step, before, after):
verify_text_in_editor_and_update('div.course-handouts .edit-button', before, after)
@step(u'I delete the update$')
def click_button(_step):
button_css = 'div.post-preview .delete-button'
world.css_click(button_css)
@step(u'I edit the date to "([^"]*)"$')
def change_date(_step, new_date):
button_css = 'div.post-preview .edit-button'
world.css_click(button_css)
date_css = 'input.date'
date = world.css_find(date_css)
for i in range(len(date.value)):
date._element.send_keys(Keys.END, Keys.BACK_SPACE)
date._element.send_keys(new_date)
save_css = '.save-button'
world.css_click(save_css)
@step(u'I should see the date "([^"]*)"$')
def check_date(_step, date):
date_css = 'span.date-display'
assert_in(date, world.css_html(date_css))
@step(u'I modify the handout to "([^"]*)"$')
def edit_handouts(_step, text):
edit_css = 'div.course-handouts > .edit-button'
world.css_click(edit_css)
change_text(text)
@step(u'I see the handout "([^"]*)"$')
def check_handout(_step, handout):
handout_css = 'div.handouts-content'
assert_in(handout, world.css_html(handout_css))
@step(u'I see the handout image link "([^"]*)"$')
def check_handout_image_link(_step, image_file):
handout_css = 'div.handouts-content'
handout_html = world.css_html(handout_css)
asset_key = world.scenario_dict['COURSE'].id.make_asset_key(asset_type='asset', path=image_file)
assert_in(unicode(asset_key), handout_html)
@step(u'I see the handout error text')
def check_handout_error(_step):
handout_error_css = 'div#handout_error'
assert world.css_has_class(handout_error_css, 'is-shown')
@step(u'I see handout save button disabled')
def check_handout_error(_step):
handout_save_button = 'form.edit-handouts-form .save-button'
assert world.css_has_class(handout_save_button, 'is-disabled')
@step(u'I edit the handout to "([^"]*)"$')
def edit_handouts(_step, text):
type_in_codemirror(0, text)
@step(u'I see handout save button re-enabled')
def check_handout_error(_step):
handout_save_button = 'form.edit-handouts-form .save-button'
assert not world.css_has_class(handout_save_button, 'is-disabled')
@step(u'I save handout edit')
def check_handout_error(_step):
save_css = '.save-button'
world.css_click(save_css)
def change_text(text):
type_in_codemirror(0, text)
save_css = '.save-button'
world.css_click(save_css)
def verify_text_in_editor_and_update(button_css, before, after):
world.css_click(button_css)
text = get_codemirror_value()
assert_in(before, text)
change_text(after)
@step('I see a "(saving|deleting)" notification')
def i_see_a_mini_notification(_step, _type):
saving_css = '.wrapper-notification-mini'
assert world.is_css_present(saving_css)
| agpl-3.0 | -2,502,510,750,937,288,000 | 29.564935 | 100 | 0.673678 | false |
taedla01/MissionPlanner | Lib/unittest/util.py | 60 | 4762 | """Various utility functions."""
from collections import namedtuple, OrderedDict
__unittest = True
_MAX_LENGTH = 80
def safe_repr(obj, short=False):
try:
result = repr(obj)
except Exception:
result = object.__repr__(obj)
if not short or len(result) < _MAX_LENGTH:
return result
return result[:_MAX_LENGTH] + ' [truncated]...'
def strclass(cls):
return "%s.%s" % (cls.__module__, cls.__name__)
def sorted_list_difference(expected, actual):
"""Finds elements in only one or the other of two, sorted input lists.
Returns a two-element tuple of lists. The first list contains those
elements in the "expected" list but not in the "actual" list, and the
second contains those elements in the "actual" list but not in the
"expected" list. Duplicate elements in either input list are ignored.
"""
i = j = 0
missing = []
unexpected = []
while True:
try:
e = expected[i]
a = actual[j]
if e < a:
missing.append(e)
i += 1
while expected[i] == e:
i += 1
elif e > a:
unexpected.append(a)
j += 1
while actual[j] == a:
j += 1
else:
i += 1
try:
while expected[i] == e:
i += 1
finally:
j += 1
while actual[j] == a:
j += 1
except IndexError:
missing.extend(expected[i:])
unexpected.extend(actual[j:])
break
return missing, unexpected
def unorderable_list_difference(expected, actual, ignore_duplicate=False):
"""Same behavior as sorted_list_difference but
for lists of unorderable items (like dicts).
As it does a linear search per item (remove) it
has O(n*n) performance.
"""
missing = []
unexpected = []
while expected:
item = expected.pop()
try:
actual.remove(item)
except ValueError:
missing.append(item)
if ignore_duplicate:
for lst in expected, actual:
try:
while True:
lst.remove(item)
except ValueError:
pass
if ignore_duplicate:
while actual:
item = actual.pop()
unexpected.append(item)
try:
while True:
actual.remove(item)
except ValueError:
pass
return missing, unexpected
# anything left in actual is unexpected
return missing, actual
_Mismatch = namedtuple('Mismatch', 'actual expected value')
def _count_diff_all_purpose(actual, expected):
'Returns list of (cnt_act, cnt_exp, elem) triples where the counts differ'
# elements need not be hashable
s, t = list(actual), list(expected)
m, n = len(s), len(t)
NULL = object()
result = []
for i, elem in enumerate(s):
if elem is NULL:
continue
cnt_s = cnt_t = 0
for j in range(i, m):
if s[j] == elem:
cnt_s += 1
s[j] = NULL
for j, other_elem in enumerate(t):
if other_elem == elem:
cnt_t += 1
t[j] = NULL
if cnt_s != cnt_t:
diff = _Mismatch(cnt_s, cnt_t, elem)
result.append(diff)
for i, elem in enumerate(t):
if elem is NULL:
continue
cnt_t = 0
for j in range(i, n):
if t[j] == elem:
cnt_t += 1
t[j] = NULL
diff = _Mismatch(0, cnt_t, elem)
result.append(diff)
return result
def _ordered_count(iterable):
'Return dict of element counts, in the order they were first seen'
c = OrderedDict()
for elem in iterable:
c[elem] = c.get(elem, 0) + 1
return c
def _count_diff_hashable(actual, expected):
'Returns list of (cnt_act, cnt_exp, elem) triples where the counts differ'
# elements must be hashable
s, t = _ordered_count(actual), _ordered_count(expected)
result = []
for elem, cnt_s in s.items():
cnt_t = t.get(elem, 0)
if cnt_s != cnt_t:
diff = _Mismatch(cnt_s, cnt_t, elem)
result.append(diff)
for elem, cnt_t in t.items():
if elem not in s:
diff = _Mismatch(0, cnt_t, elem)
result.append(diff)
return result
| gpl-3.0 | 1,211,435,173,391,829,500 | 28.525641 | 78 | 0.4958 | false |
aristotle-tek/cuny-bdif | AWS/ec2/lib/boto-2.34.0/boto/cloudfront/origin.py | 153 | 6060 | # Copyright (c) 2006-2010 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2010, Eucalyptus Systems, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from boto.cloudfront.identity import OriginAccessIdentity
def get_oai_value(origin_access_identity):
if isinstance(origin_access_identity, OriginAccessIdentity):
return origin_access_identity.uri()
else:
return origin_access_identity
class S3Origin(object):
"""
Origin information to associate with the distribution.
If your distribution will use an Amazon S3 origin,
then you use the S3Origin element.
"""
def __init__(self, dns_name=None, origin_access_identity=None):
"""
:param dns_name: The DNS name of your Amazon S3 bucket to
associate with the distribution.
For example: mybucket.s3.amazonaws.com.
:type dns_name: str
:param origin_access_identity: The CloudFront origin access
identity to associate with the
distribution. If you want the
distribution to serve private content,
include this element; if you want the
distribution to serve public content,
remove this element.
:type origin_access_identity: str
"""
self.dns_name = dns_name
self.origin_access_identity = origin_access_identity
def __repr__(self):
return '<S3Origin: %s>' % self.dns_name
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'DNSName':
self.dns_name = value
elif name == 'OriginAccessIdentity':
self.origin_access_identity = value
else:
setattr(self, name, value)
def to_xml(self):
s = ' <S3Origin>\n'
s += ' <DNSName>%s</DNSName>\n' % self.dns_name
if self.origin_access_identity:
val = get_oai_value(self.origin_access_identity)
s += ' <OriginAccessIdentity>%s</OriginAccessIdentity>\n' % val
s += ' </S3Origin>\n'
return s
class CustomOrigin(object):
"""
Origin information to associate with the distribution.
If your distribution will use a non-Amazon S3 origin,
then you use the CustomOrigin element.
"""
def __init__(self, dns_name=None, http_port=80, https_port=443,
origin_protocol_policy=None):
"""
:param dns_name: The DNS name of your Amazon S3 bucket to
associate with the distribution.
For example: mybucket.s3.amazonaws.com.
:type dns_name: str
:param http_port: The HTTP port the custom origin listens on.
:type http_port: int
:param https_port: The HTTPS port the custom origin listens on.
:type http_port: int
:param origin_protocol_policy: The origin protocol policy to
apply to your origin. If you
specify http-only, CloudFront
will use HTTP only to access the origin.
If you specify match-viewer, CloudFront
will fetch from your origin using HTTP
or HTTPS, based on the protocol of the
viewer request.
:type origin_protocol_policy: str
"""
self.dns_name = dns_name
self.http_port = http_port
self.https_port = https_port
self.origin_protocol_policy = origin_protocol_policy
def __repr__(self):
return '<CustomOrigin: %s>' % self.dns_name
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'DNSName':
self.dns_name = value
elif name == 'HTTPPort':
try:
self.http_port = int(value)
except ValueError:
self.http_port = value
elif name == 'HTTPSPort':
try:
self.https_port = int(value)
except ValueError:
self.https_port = value
elif name == 'OriginProtocolPolicy':
self.origin_protocol_policy = value
else:
setattr(self, name, value)
def to_xml(self):
s = ' <CustomOrigin>\n'
s += ' <DNSName>%s</DNSName>\n' % self.dns_name
s += ' <HTTPPort>%d</HTTPPort>\n' % self.http_port
s += ' <HTTPSPort>%d</HTTPSPort>\n' % self.https_port
s += ' <OriginProtocolPolicy>%s</OriginProtocolPolicy>\n' % self.origin_protocol_policy
s += ' </CustomOrigin>\n'
return s
| mit | -6,619,549,686,793,587,000 | 39.4 | 98 | 0.578548 | false |
xuxiao19910803/edx-platform | cms/djangoapps/contentstore/tests/test_clone_course.py | 147 | 6812 | """
Unit tests for cloning a course between the same and different module stores.
"""
import json
from django.conf import settings
from opaque_keys.edx.locator import CourseLocator
from xmodule.modulestore import ModuleStoreEnum, EdxJSONEncoder
from contentstore.tests.utils import CourseTestCase
from contentstore.tasks import rerun_course
from student.auth import has_course_author_access
from course_action_state.models import CourseRerunState
from course_action_state.managers import CourseRerunUIStateManager
from mock import patch, Mock
from xmodule.contentstore.content import StaticContent
from xmodule.contentstore.django import contentstore
from xmodule.modulestore.tests.factories import CourseFactory
TEST_DATA_DIR = settings.COMMON_TEST_DATA_ROOT
class CloneCourseTest(CourseTestCase):
"""
Unit tests for cloning a course
"""
def test_clone_course(self):
"""Tests cloning of a course as follows: XML -> Mongo (+ data) -> Mongo -> Split -> Split"""
# 1. import and populate test toy course
mongo_course1_id = self.import_and_populate_course()
# 2. clone course (mongo -> mongo)
# TODO - This is currently failing since clone_course doesn't handle Private content - fails on Publish
# mongo_course2_id = SlashSeparatedCourseKey('edX2', 'toy2', '2013_Fall')
# self.store.clone_course(mongo_course1_id, mongo_course2_id, self.user.id)
# self.assertCoursesEqual(mongo_course1_id, mongo_course2_id)
# self.check_populated_course(mongo_course2_id)
# NOTE: When the code above is uncommented this can be removed.
mongo_course2_id = mongo_course1_id
# 3. clone course (mongo -> split)
with self.store.default_store(ModuleStoreEnum.Type.split):
split_course3_id = CourseLocator(
org="edx3", course="split3", run="2013_Fall"
)
self.store.clone_course(mongo_course2_id, split_course3_id, self.user.id)
self.assertCoursesEqual(mongo_course2_id, split_course3_id)
# 4. clone course (split -> split)
split_course4_id = CourseLocator(
org="edx4", course="split4", run="2013_Fall"
)
self.store.clone_course(split_course3_id, split_course4_id, self.user.id)
self.assertCoursesEqual(split_course3_id, split_course4_id)
def test_space_in_asset_name_for_rerun_course(self):
"""
Tests check the scenario where one course which has an asset with percentage(%) in its
name, it should re-run successfully.
"""
org = 'edX'
course_number = 'CS101'
course_run = '2015_Q1'
display_name = 'rerun'
fields = {'display_name': display_name}
course_assets = set([u'subs_Introduction%20To%20New.srt.sjson'], )
# Create a course using split modulestore
course = CourseFactory.create(
org=org,
number=course_number,
run=course_run,
display_name=display_name,
default_store=ModuleStoreEnum.Type.split
)
# add an asset
asset_key = course.id.make_asset_key('asset', 'subs_Introduction%20To%20New.srt.sjson')
content = StaticContent(
asset_key, 'Dummy assert', 'application/json', 'dummy data',
)
contentstore().save(content)
# Get & verify all assets of the course
assets, count = contentstore().get_all_content_for_course(course.id)
self.assertEqual(count, 1)
self.assertEqual(set([asset['asset_key'].block_id for asset in assets]), course_assets)
# rerun from split into split
split_rerun_id = CourseLocator(org=org, course=course_number, run="2012_Q2")
CourseRerunState.objects.initiated(course.id, split_rerun_id, self.user, fields['display_name'])
result = rerun_course.delay(
unicode(course.id),
unicode(split_rerun_id),
self.user.id,
json.dumps(fields, cls=EdxJSONEncoder)
)
# Check if re-run was successful
self.assertEqual(result.get(), "succeeded")
rerun_state = CourseRerunState.objects.find_first(course_key=split_rerun_id)
self.assertEqual(rerun_state.state, CourseRerunUIStateManager.State.SUCCEEDED)
def test_rerun_course(self):
"""
Unit tests for :meth: `contentstore.tasks.rerun_course`
"""
mongo_course1_id = self.import_and_populate_course()
# rerun from mongo into split
split_course3_id = CourseLocator(
org="edx3", course="split3", run="rerun_test"
)
# Mark the action as initiated
fields = {'display_name': 'rerun'}
CourseRerunState.objects.initiated(mongo_course1_id, split_course3_id, self.user, fields['display_name'])
result = rerun_course.delay(unicode(mongo_course1_id), unicode(split_course3_id), self.user.id,
json.dumps(fields, cls=EdxJSONEncoder))
self.assertEqual(result.get(), "succeeded")
self.assertTrue(has_course_author_access(self.user, split_course3_id), "Didn't grant access")
rerun_state = CourseRerunState.objects.find_first(course_key=split_course3_id)
self.assertEqual(rerun_state.state, CourseRerunUIStateManager.State.SUCCEEDED)
# try creating rerunning again to same name and ensure it generates error
result = rerun_course.delay(unicode(mongo_course1_id), unicode(split_course3_id), self.user.id)
self.assertEqual(result.get(), "duplicate course")
# the below will raise an exception if the record doesn't exist
CourseRerunState.objects.find_first(
course_key=split_course3_id,
state=CourseRerunUIStateManager.State.FAILED
)
# try to hit the generic exception catch
with patch('xmodule.modulestore.split_mongo.mongo_connection.MongoConnection.insert_course_index', Mock(side_effect=Exception)):
split_course4_id = CourseLocator(org="edx3", course="split3", run="rerun_fail")
fields = {'display_name': 'total failure'}
CourseRerunState.objects.initiated(split_course3_id, split_course4_id, self.user, fields['display_name'])
result = rerun_course.delay(unicode(split_course3_id), unicode(split_course4_id), self.user.id,
json.dumps(fields, cls=EdxJSONEncoder))
self.assertIn("exception: ", result.get())
self.assertIsNone(self.store.get_course(split_course4_id), "Didn't delete course after error")
CourseRerunState.objects.find_first(
course_key=split_course4_id,
state=CourseRerunUIStateManager.State.FAILED
)
| agpl-3.0 | -6,159,081,650,392,122,000 | 45.657534 | 136 | 0.655314 | false |
mephizzle/wagtail | wagtail/wagtailadmin/tests/test_account_management.py | 25 | 19261 | from __future__ import unicode_literals
from django.test import TestCase
from django.core.urlresolvers import reverse
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Group, Permission
from django.contrib.auth.tokens import PasswordResetTokenGenerator
from django.core import mail
from wagtail.tests.utils import WagtailTestUtils
from wagtail.wagtailusers.models import UserProfile
class TestAuthentication(TestCase, WagtailTestUtils):
"""
This tests that users can login and logout of the admin interface
"""
def test_login_view(self):
"""
This tests that the login view responds with a login page
"""
# Get login page
response = self.client.get(reverse('wagtailadmin_login'))
# Check that the user recieved a login page
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/login.html')
def test_login_view_post(self):
"""
This posts user credentials to the login view and checks that
the user was logged in successfully
"""
# Create user to log in with
get_user_model().objects.create_superuser(username='test', email='[email protected]', password='password')
# Post credentials to the login page
response = self.client.post(reverse('wagtailadmin_login'), {
'username': 'test',
'password': 'password',
# NOTE: This is set using a hidden field in reality
'next': reverse('wagtailadmin_home'),
})
# Check that the user was redirected to the dashboard
self.assertRedirects(response, reverse('wagtailadmin_home'))
# Check that the user was logged in
self.assertTrue('_auth_user_id' in self.client.session)
self.assertEqual(str(self.client.session['_auth_user_id']), str(get_user_model().objects.get(username='test').id))
def test_already_logged_in_redirect(self):
"""
This tests that a user who is already logged in is automatically
redirected to the admin dashboard if they try to access the login
page
"""
# Login
self.login()
# Get login page
response = self.client.get(reverse('wagtailadmin_login'))
# Check that the user was redirected to the dashboard
self.assertRedirects(response, reverse('wagtailadmin_home'))
def test_logged_in_as_non_privileged_user_doesnt_redirect(self):
"""
This tests that if the user is logged in but hasn't got permission
to access the admin, they are not redirected to the admin
This tests issue #431
"""
# Login as unprivileged user
get_user_model().objects.create(username='unprivileged', password='123')
self.client.login(username='unprivileged', password='123')
# Get login page
response = self.client.get(reverse('wagtailadmin_login'))
# Check that the user recieved a login page and was not redirected
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/login.html')
def test_logout(self):
"""
This tests that the user can logout
"""
# Login
self.login()
# Get logout page
response = self.client.get(reverse('wagtailadmin_logout'))
# Check that the user was redirected to the login page
self.assertRedirects(response, reverse('wagtailadmin_login'))
# Check that the user was logged out
self.assertFalse('_auth_user_id' in self.client.session)
def test_not_logged_in_redirect(self):
"""
This tests that a not logged in user is redirected to the
login page
"""
# Get dashboard
response = self.client.get(reverse('wagtailadmin_home'))
# Check that the user was redirected to the login page and that next was set correctly
self.assertRedirects(response, reverse('wagtailadmin_login') + '?next=' + reverse('wagtailadmin_home'))
def test_not_logged_in_redirect_default_settings(self):
"""
This does the same as the above test but checks that it
redirects to the correct place when the user has not set
the LOGIN_URL setting correctly
"""
# Get dashboard with default LOGIN_URL setting
with self.settings(LOGIN_URL='django.contrib.auth.views.login'):
response = self.client.get(reverse('wagtailadmin_home'))
# Check that the user was redirected to the login page and that next was set correctly
# Note: The user will be redirected to 'django.contrib.auth.views.login' but
# this must be the same URL as 'wagtailadmin_login'
self.assertEqual(response.status_code, 302)
self.assertRedirects(response, reverse('wagtailadmin_login') + '?next=' + reverse('wagtailadmin_home'))
class TestAccountSection(TestCase, WagtailTestUtils):
"""
This tests that the accounts section is working
"""
def setUp(self):
self.login()
def test_account_view(self):
"""
This tests that the login view responds with a login page
"""
# Get account page
response = self.client.get(reverse('wagtailadmin_account'))
# Check that the user recieved an account page
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/account/account.html')
def test_change_password_view(self):
"""
This tests that the change password view responds with a change password page
"""
# Get change password page
response = self.client.get(reverse('wagtailadmin_account_change_password'))
# Check that the user recieved a change password page
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/account/change_password.html')
def test_change_password_view_post(self):
"""
This posts a new password to the change password view and checks
that the users password was changed
"""
# Post new password to change password page
post_data = {
'new_password1': 'newpassword',
'new_password2': 'newpassword',
}
response = self.client.post(reverse('wagtailadmin_account_change_password'), post_data)
# Check that the user was redirected to the account page
self.assertRedirects(response, reverse('wagtailadmin_account'))
# Check that the password was changed
self.assertTrue(get_user_model().objects.get(username='test').check_password('newpassword'))
def test_change_password_view_post_password_mismatch(self):
"""
This posts a two passwords that don't match to the password change
view and checks that a validation error was raised
"""
# Post new password to change password page
post_data = {
'new_password1': 'newpassword',
'new_password2': 'badpassword',
}
response = self.client.post(reverse('wagtailadmin_account_change_password'), post_data)
# Check that the user wasn't redirected
self.assertEqual(response.status_code, 200)
# Check that a validation error was raised
self.assertTrue('new_password2' in response.context['form'].errors.keys())
self.assertTrue("The two password fields didn't match." in response.context['form'].errors['new_password2'])
# Check that the password was not changed
self.assertTrue(get_user_model().objects.get(username='test').check_password('password'))
def test_notification_preferences_view(self):
"""
This tests that the notification preferences view responds with the
notification preferences page
"""
# Get notification preferences page
response = self.client.get(reverse('wagtailadmin_account_notification_preferences'))
# Check that the user recieved a notification preferences page
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/account/notification_preferences.html')
def test_notification_preferences_view_post(self):
"""
This posts to the notification preferences view and checks that the
user's profile is updated
"""
# Post new values to the notification preferences page
post_data = {
'submitted_notifications': 'false',
'approved_notifications': 'false',
'rejected_notifications': 'true',
}
response = self.client.post(reverse('wagtailadmin_account_notification_preferences'), post_data)
# Check that the user was redirected to the account page
self.assertRedirects(response, reverse('wagtailadmin_account'))
profile = UserProfile.get_for_user(get_user_model().objects.get(username='test'))
# Check that the notification preferences are as submitted
self.assertFalse(profile.submitted_notifications)
self.assertFalse(profile.approved_notifications)
self.assertTrue(profile.rejected_notifications)
class TestAccountManagementForNonModerator(TestCase, WagtailTestUtils):
"""
Tests of reduced-functionality for editors
"""
def setUp(self):
# Create a non-moderator user
self.submitter = get_user_model().objects.create_user('submitter', '[email protected]', 'password')
self.submitter.groups.add(Group.objects.get(name='Editors'))
self.client.login(username=self.submitter.username, password='password')
def test_notification_preferences_form_is_reduced_for_non_moderators(self):
"""
This tests that a user without publish permissions is not shown the
notification preference for 'submitted' items
"""
response = self.client.get(reverse('wagtailadmin_account_notification_preferences'))
self.assertIn('approved_notifications', response.context['form'].fields.keys())
self.assertIn('rejected_notifications', response.context['form'].fields.keys())
self.assertNotIn('submitted_notifications', response.context['form'].fields.keys())
class TestAccountManagementForAdminOnlyUser(TestCase, WagtailTestUtils):
"""
Tests for users with no edit/publish permissions at all
"""
def setUp(self):
# Create a non-moderator user
admin_only_group = Group.objects.create(name='Admin Only')
admin_only_group.permissions.add(Permission.objects.get(codename='access_admin'))
self.admin_only_user = get_user_model().objects.create_user('admin_only_user', '[email protected]', 'password')
self.admin_only_user.groups.add(admin_only_group)
self.client.login(username=self.admin_only_user.username, password='password')
def test_notification_preferences_view_redirects_for_admin_only_users(self):
"""
Test that the user is not shown the notification preferences view but instead
redirected to the account page
"""
response = self.client.get(reverse('wagtailadmin_account_notification_preferences'))
self.assertRedirects(response, reverse('wagtailadmin_account'))
def test_notification_preferences_link_not_shown_for_admin_only_users(self):
"""
Test that the user is not even shown the link to the notification
preferences view
"""
response = self.client.get(reverse('wagtailadmin_account'))
self.assertEqual(response.context['show_notification_preferences'], False)
self.assertNotContains(response, reverse('wagtailadmin_account_notification_preferences'))
# safety check that checking for absence/presence of urls works
self.assertContains(response, reverse('wagtailadmin_home'))
class TestPasswordReset(TestCase, WagtailTestUtils):
"""
This tests that the password reset is working
"""
def setUp(self):
# Create a user
get_user_model().objects.create_superuser(username='test', email='[email protected]', password='password')
def test_password_reset_view(self):
"""
This tests that the password reset view returns a password reset page
"""
# Get password reset page
response = self.client.get(reverse('wagtailadmin_password_reset'))
# Check that the user recieved a password reset page
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/account/password_reset/form.html')
def test_password_reset_view_post(self):
"""
This posts an email address to the password reset view and
checks that a password reset email was sent
"""
# Post email address to password reset view
post_data = {
'email': '[email protected]',
}
response = self.client.post(reverse('wagtailadmin_password_reset'), post_data)
# Check that the user was redirected to the done page
self.assertRedirects(response, reverse('wagtailadmin_password_reset_done'))
# Check that a password reset email was sent to the user
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, ['[email protected]'])
self.assertEqual(mail.outbox[0].subject, "Password reset")
def test_password_reset_view_post_unknown_email(self):
"""
This posts an unknown email address to the password reset view and
checks that the password reset form raises a validation error
"""
post_data = {
'email': '[email protected]',
}
response = self.client.post(reverse('wagtailadmin_password_reset'), post_data)
# Check that the user wasn't redirected
self.assertEqual(response.status_code, 200)
# Check that a validation error was raised
self.assertTrue('__all__' in response.context['form'].errors.keys())
self.assertTrue("This email address is not recognised." in response.context['form'].errors['__all__'])
# Check that an email was not sent
self.assertEqual(len(mail.outbox), 0)
def test_password_reset_view_post_invalid_email(self):
"""
This posts an incalid email address to the password reset view and
checks that the password reset form raises a validation error
"""
post_data = {
'email': 'Hello world!',
}
response = self.client.post(reverse('wagtailadmin_password_reset'), post_data)
# Check that the user wasn't redirected
self.assertEqual(response.status_code, 200)
# Check that a validation error was raised
self.assertTrue('email' in response.context['form'].errors.keys())
self.assertTrue("Enter a valid email address." in response.context['form'].errors['email'])
# Check that an email was not sent
self.assertEqual(len(mail.outbox), 0)
def setup_password_reset_confirm_tests(self):
from django.utils.encoding import force_bytes
from django.utils.http import urlsafe_base64_encode
# Get user
self.user = get_user_model().objects.get(username='test')
# Generate a password reset token
self.password_reset_token = PasswordResetTokenGenerator().make_token(self.user)
# Generate a password reset uid
self.password_reset_uid = urlsafe_base64_encode(force_bytes(self.user.pk))
# Create url_args
self.url_kwargs = dict(uidb64=self.password_reset_uid, token=self.password_reset_token)
def test_password_reset_confirm_view(self):
"""
This tests that the password reset confirm view returns a password reset confirm page
"""
self.setup_password_reset_confirm_tests()
# Get password reset confirm page
response = self.client.get(reverse('wagtailadmin_password_reset_confirm', kwargs=self.url_kwargs))
# Check that the user recieved a password confirm done page
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/account/password_reset/confirm.html')
def test_password_reset_confirm_view_post(self):
"""
This posts a new password to the password reset confirm view and checks
that the users password was changed
"""
self.setup_password_reset_confirm_tests()
# Post new password to change password page
post_data = {
'new_password1': 'newpassword',
'new_password2': 'newpassword',
}
response = self.client.post(reverse('wagtailadmin_password_reset_confirm', kwargs=self.url_kwargs), post_data)
# Check that the user was redirected to the complete page
self.assertRedirects(response, reverse('wagtailadmin_password_reset_complete'))
# Check that the password was changed
self.assertTrue(get_user_model().objects.get(username='test').check_password('newpassword'))
def test_password_reset_confirm_view_post_password_mismatch(self):
"""
This posts a two passwords that don't match to the password reset
confirm view and checks that a validation error was raised
"""
self.setup_password_reset_confirm_tests()
# Post new password to change password page
post_data = {
'new_password1': 'newpassword',
'new_password2': 'badpassword',
}
response = self.client.post(reverse('wagtailadmin_password_reset_confirm', kwargs=self.url_kwargs), post_data)
# Check that the user wasn't redirected
self.assertEqual(response.status_code, 200)
# Check that a validation error was raised
self.assertTrue('new_password2' in response.context['form'].errors.keys())
self.assertTrue("The two password fields didn't match." in response.context['form'].errors['new_password2'])
# Check that the password was not changed
self.assertTrue(get_user_model().objects.get(username='test').check_password('password'))
def test_password_reset_done_view(self):
"""
This tests that the password reset done view returns a password reset done page
"""
# Get password reset done page
response = self.client.get(reverse('wagtailadmin_password_reset_done'))
# Check that the user recieved a password reset done page
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/account/password_reset/done.html')
def test_password_reset_complete_view(self):
"""
This tests that the password reset complete view returns a password reset complete page
"""
# Get password reset complete page
response = self.client.get(reverse('wagtailadmin_password_reset_complete'))
# Check that the user recieved a password reset complete page
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/account/password_reset/complete.html')
| bsd-3-clause | 5,356,536,825,420,037,000 | 40.690476 | 129 | 0.667047 | false |
amisrs/angular-flask | angular_flask/lib/python2.7/site-packages/requests/packages/urllib3/_collections.py | 68 | 2903 | # urllib3/_collections.py
# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
#
# This module is part of urllib3 and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from collections import MutableMapping
from threading import Lock
try: # Python 2.7+
from collections import OrderedDict
except ImportError:
from .packages.ordered_dict import OrderedDict
__all__ = ['RecentlyUsedContainer']
_Null = object()
class RecentlyUsedContainer(MutableMapping):
"""
Provides a thread-safe dict-like container which maintains up to
``maxsize`` keys while throwing away the least-recently-used keys beyond
``maxsize``.
:param maxsize:
Maximum number of recent elements to retain.
:param dispose_func:
Every time an item is evicted from the container,
``dispose_func(value)`` is called. Callback which will get called
"""
ContainerCls = OrderedDict
def __init__(self, maxsize=10, dispose_func=None):
self._maxsize = maxsize
self.dispose_func = dispose_func
self._container = self.ContainerCls()
self._lock = Lock()
def __getitem__(self, key):
# Re-insert the item, moving it to the end of the eviction line.
with self._lock:
item = self._container.pop(key)
self._container[key] = item
return item
def __setitem__(self, key, value):
evicted_value = _Null
with self._lock:
# Possibly evict the existing value of 'key'
evicted_value = self._container.get(key, _Null)
self._container[key] = value
# If we didn't evict an existing value, we might have to evict the
# least recently used item from the beginning of the container.
if len(self._container) > self._maxsize:
_key, evicted_value = self._container.popitem(last=False)
if self.dispose_func and evicted_value is not _Null:
self.dispose_func(evicted_value)
def __delitem__(self, key):
with self._lock:
value = self._container.pop(key)
if self.dispose_func:
self.dispose_func(value)
def __len__(self):
with self._lock:
return len(self._container)
def __iter__(self):
raise NotImplementedError('Iteration over this class is unlikely to be threadsafe.')
def clear(self):
with self._lock:
# Copy pointers to all values, then wipe the mapping
# under Python 2, this copies the list of values twice :-|
values = list(self._container.values())
self._container.clear()
if self.dispose_func:
for value in values:
self.dispose_func(value)
def keys(self):
with self._lock:
return self._container.keys()
| mit | -4,805,558,773,128,745,000 | 29.882979 | 92 | 0.622804 | false |
vrenaville/project-service | project_stage_state/project.py | 3 | 1221 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Daniel Reis
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields
_TASK_STATE = [
('draft', 'New'),
('open', 'In Progress'),
('pending', 'Pending'),
('done', 'Done'),
('cancelled', 'Cancelled')]
class ProjectTaskType(models.Model):
_inherit = 'project.task.type'
state = fields.Selection(_TASK_STATE, 'State')
| agpl-3.0 | -4,785,380,885,133,112,000 | 34.911765 | 78 | 0.593776 | false |
rmoorman/feedhq | feedhq/storage.py | 1 | 2371 | import tempfile
import os
import errno
from django.conf import settings
from django.core.files import locks
from django.core.files.move import file_move_safe
from django.utils.text import get_valid_filename
from django.core.files.storage import FileSystemStorage
class OverwritingStorage(FileSystemStorage):
"""
File storage that allows overwriting of stored files.
"""
def get_available_name(self, name, max_length=None):
return name
def _save(self, name, content):
"""
Lifted partially from django/core/files/storage.py
"""
full_path = self.path(name)
directory = os.path.dirname(full_path)
if not os.path.exists(directory):
try:
os.makedirs(directory)
except OSError as e:
if e.errno != errno.EEXIST:
raise
if not os.path.isdir(directory):
raise IOError("%s exists and is not a directory." % directory)
# This file has a file path that we can move.
if hasattr(content, 'temporary_file_path'):
temp_data_location = content.temporary_file_path()
else:
tmp_prefix = "tmp_%s" % (get_valid_filename(name), )
temp_data_location = tempfile.mktemp(prefix=tmp_prefix,
dir=self.location)
try:
# This is a normal uploadedfile that we can stream.
# This fun binary flag incantation makes os.open throw an
# OSError if the file already exists before we open it.
fd = os.open(temp_data_location,
os.O_WRONLY | os.O_CREAT |
os.O_EXCL | getattr(os, 'O_BINARY', 0))
locks.lock(fd, locks.LOCK_EX)
for chunk in content.chunks():
os.write(fd, chunk)
locks.unlock(fd)
os.close(fd)
except Exception:
if os.path.exists(temp_data_location):
os.remove(temp_data_location)
raise
file_move_safe(temp_data_location, full_path, allow_overwrite=True)
content.close()
if settings.FILE_UPLOAD_PERMISSIONS is not None:
os.chmod(full_path, settings.FILE_UPLOAD_PERMISSIONS)
return name
| bsd-3-clause | -6,892,540,781,654,032,000 | 35.476923 | 75 | 0.567693 | false |
PhenomX1998/FRACTALX-OP3 | scripts/build-all.py | 162 | 14627 | #! /usr/bin/env python2
# Copyright (c) 2009-2015, The Linux Foundation. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of The Linux Foundation nor
# the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NON-INFRINGEMENT ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Build the kernel for all targets using the Android build environment.
from collections import namedtuple
import glob
from optparse import OptionParser
import os
import re
import shutil
import subprocess
import sys
import threading
import Queue
version = 'build-all.py, version 1.99'
build_dir = '../all-kernels'
make_command = ["vmlinux", "modules", "dtbs"]
all_options = {}
compile64 = os.environ.get('CROSS_COMPILE64')
def error(msg):
sys.stderr.write("error: %s\n" % msg)
def fail(msg):
"""Fail with a user-printed message"""
error(msg)
sys.exit(1)
if not os.environ.get('CROSS_COMPILE'):
fail("CROSS_COMPILE must be set in the environment")
def check_kernel():
"""Ensure that PWD is a kernel directory"""
if (not os.path.isfile('MAINTAINERS') or
not os.path.isfile('arch/arm/mach-msm/Kconfig')):
fail("This doesn't seem to be an MSM kernel dir")
def check_build():
"""Ensure that the build directory is present."""
if not os.path.isdir(build_dir):
try:
os.makedirs(build_dir)
except OSError as exc:
if exc.errno == errno.EEXIST:
pass
else:
raise
failed_targets = []
BuildResult = namedtuple('BuildResult', ['status', 'messages'])
class BuildSequence(namedtuple('BuildSequence', ['log_name', 'short_name', 'steps'])):
def set_width(self, width):
self.width = width
def __enter__(self):
self.log = open(self.log_name, 'w')
def __exit__(self, type, value, traceback):
self.log.close()
def run(self):
self.status = None
messages = ["Building: " + self.short_name]
def printer(line):
text = "[%-*s] %s" % (self.width, self.short_name, line)
messages.append(text)
self.log.write(text)
self.log.write('\n')
for step in self.steps:
st = step.run(printer)
if st:
self.status = BuildResult(self.short_name, messages)
break
if not self.status:
self.status = BuildResult(None, messages)
class BuildTracker:
"""Manages all of the steps necessary to perform a build. The
build consists of one or more sequences of steps. The different
sequences can be processed independently, while the steps within a
sequence must be done in order."""
def __init__(self, parallel_builds):
self.sequence = []
self.lock = threading.Lock()
self.parallel_builds = parallel_builds
def add_sequence(self, log_name, short_name, steps):
self.sequence.append(BuildSequence(log_name, short_name, steps))
def longest_name(self):
longest = 0
for seq in self.sequence:
longest = max(longest, len(seq.short_name))
return longest
def __repr__(self):
return "BuildTracker(%s)" % self.sequence
def run_child(self, seq):
seq.set_width(self.longest)
tok = self.build_tokens.get()
with self.lock:
print "Building:", seq.short_name
with seq:
seq.run()
self.results.put(seq.status)
self.build_tokens.put(tok)
def run(self):
self.longest = self.longest_name()
self.results = Queue.Queue()
children = []
errors = []
self.build_tokens = Queue.Queue()
nthreads = self.parallel_builds
print "Building with", nthreads, "threads"
for i in range(nthreads):
self.build_tokens.put(True)
for seq in self.sequence:
child = threading.Thread(target=self.run_child, args=[seq])
children.append(child)
child.start()
for child in children:
stats = self.results.get()
if all_options.verbose:
with self.lock:
for line in stats.messages:
print line
sys.stdout.flush()
if stats.status:
errors.append(stats.status)
for child in children:
child.join()
if errors:
fail("\n ".join(["Failed targets:"] + errors))
class PrintStep:
"""A step that just prints a message"""
def __init__(self, message):
self.message = message
def run(self, outp):
outp(self.message)
class MkdirStep:
"""A step that makes a directory"""
def __init__(self, direc):
self.direc = direc
def run(self, outp):
outp("mkdir %s" % self.direc)
os.mkdir(self.direc)
class RmtreeStep:
def __init__(self, direc):
self.direc = direc
def run(self, outp):
outp("rmtree %s" % self.direc)
shutil.rmtree(self.direc, ignore_errors=True)
class CopyfileStep:
def __init__(self, src, dest):
self.src = src
self.dest = dest
def run(self, outp):
outp("cp %s %s" % (self.src, self.dest))
shutil.copyfile(self.src, self.dest)
class ExecStep:
def __init__(self, cmd, **kwargs):
self.cmd = cmd
self.kwargs = kwargs
def run(self, outp):
outp("exec: %s" % (" ".join(self.cmd),))
with open('/dev/null', 'r') as devnull:
proc = subprocess.Popen(self.cmd, stdin=devnull,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
**self.kwargs)
stdout = proc.stdout
while True:
line = stdout.readline()
if not line:
break
line = line.rstrip('\n')
outp(line)
result = proc.wait()
if result != 0:
return ('error', result)
else:
return None
class Builder():
def __init__(self, name, defconfig):
self.name = name
self.defconfig = defconfig
self.confname = self.defconfig.split('/')[-1]
# Determine if this is a 64-bit target based on the location
# of the defconfig.
self.make_env = os.environ.copy()
if "/arm64/" in defconfig:
if compile64:
self.make_env['CROSS_COMPILE'] = compile64
else:
fail("Attempting to build 64-bit, without setting CROSS_COMPILE64")
self.make_env['ARCH'] = 'arm64'
else:
self.make_env['ARCH'] = 'arm'
self.make_env['KCONFIG_NOTIMESTAMP'] = 'true'
self.log_name = "%s/log-%s.log" % (build_dir, self.name)
def build(self):
steps = []
dest_dir = os.path.join(build_dir, self.name)
log_name = "%s/log-%s.log" % (build_dir, self.name)
steps.append(PrintStep('Building %s in %s log %s' %
(self.name, dest_dir, log_name)))
if not os.path.isdir(dest_dir):
steps.append(MkdirStep(dest_dir))
defconfig = self.defconfig
dotconfig = '%s/.config' % dest_dir
savedefconfig = '%s/defconfig' % dest_dir
staging_dir = 'install_staging'
modi_dir = '%s' % staging_dir
hdri_dir = '%s/usr' % staging_dir
steps.append(RmtreeStep(os.path.join(dest_dir, staging_dir)))
steps.append(ExecStep(['make', 'O=%s' % dest_dir,
self.confname], env=self.make_env))
if not all_options.updateconfigs:
# Build targets can be dependent upon the completion of
# previous build targets, so build them one at a time.
cmd_line = ['make',
'INSTALL_HDR_PATH=%s' % hdri_dir,
'INSTALL_MOD_PATH=%s' % modi_dir,
'O=%s' % dest_dir]
build_targets = []
for c in make_command:
if re.match(r'^-{1,2}\w', c):
cmd_line.append(c)
else:
build_targets.append(c)
for t in build_targets:
steps.append(ExecStep(cmd_line + [t], env=self.make_env))
# Copy the defconfig back.
if all_options.configs or all_options.updateconfigs:
steps.append(ExecStep(['make', 'O=%s' % dest_dir,
'savedefconfig'], env=self.make_env))
steps.append(CopyfileStep(savedefconfig, defconfig))
return steps
def update_config(file, str):
print 'Updating %s with \'%s\'\n' % (file, str)
with open(file, 'a') as defconfig:
defconfig.write(str + '\n')
def scan_configs():
"""Get the full list of defconfigs appropriate for this tree."""
names = []
arch_pats = (
r'[fm]sm[0-9]*_defconfig',
r'apq*_defconfig',
r'qsd*_defconfig',
r'mdm*_defconfig',
r'mpq*_defconfig',
)
arch64_pats = (
r'msm*_defconfig',
)
for p in arch_pats:
for n in glob.glob('arch/arm/configs/' + p):
name = os.path.basename(n)[:-10]
names.append(Builder(name, n))
if 'CROSS_COMPILE64' in os.environ:
for p in arch64_pats:
for n in glob.glob('arch/arm64/configs/' + p):
name = os.path.basename(n)[:-10] + "-64"
names.append(Builder(name, n))
return names
def build_many(targets):
print "Building %d target(s)" % len(targets)
# To try and make up for the link phase being serial, try to do
# two full builds in parallel. Don't do too many because lots of
# parallel builds tends to use up available memory rather quickly.
parallel = 2
if all_options.jobs and all_options.jobs > 1:
j = max(all_options.jobs / parallel, 2)
make_command.append("-j" + str(j))
tracker = BuildTracker(parallel)
for target in targets:
if all_options.updateconfigs:
update_config(target.defconfig, all_options.updateconfigs)
steps = target.build()
tracker.add_sequence(target.log_name, target.name, steps)
tracker.run()
def main():
global make_command
check_kernel()
check_build()
configs = scan_configs()
usage = ("""
%prog [options] all -- Build all targets
%prog [options] target target ... -- List specific targets
%prog [options] perf -- Build all perf targets
%prog [options] noperf -- Build all non-perf targets""")
parser = OptionParser(usage=usage, version=version)
parser.add_option('--configs', action='store_true',
dest='configs',
help="Copy configs back into tree")
parser.add_option('--list', action='store_true',
dest='list',
help='List available targets')
parser.add_option('-v', '--verbose', action='store_true',
dest='verbose',
help='Output to stdout in addition to log file')
parser.add_option('--oldconfig', action='store_true',
dest='oldconfig',
help='Only process "make oldconfig"')
parser.add_option('--updateconfigs',
dest='updateconfigs',
help="Update defconfigs with provided option setting, "
"e.g. --updateconfigs=\'CONFIG_USE_THING=y\'")
parser.add_option('-j', '--jobs', type='int', dest="jobs",
help="Number of simultaneous jobs")
parser.add_option('-l', '--load-average', type='int',
dest='load_average',
help="Don't start multiple jobs unless load is below LOAD_AVERAGE")
parser.add_option('-k', '--keep-going', action='store_true',
dest='keep_going', default=False,
help="Keep building other targets if a target fails")
parser.add_option('-m', '--make-target', action='append',
help='Build the indicated make target (default: %s)' %
' '.join(make_command))
(options, args) = parser.parse_args()
global all_options
all_options = options
if options.list:
print "Available targets:"
for target in configs:
print " %s" % target.name
sys.exit(0)
if options.oldconfig:
make_command = ["oldconfig"]
elif options.make_target:
make_command = options.make_target
if args == ['all']:
build_many(configs)
elif args == ['perf']:
targets = []
for t in configs:
if "perf" in t.name:
targets.append(t)
build_many(targets)
elif args == ['noperf']:
targets = []
for t in configs:
if "perf" not in t.name:
targets.append(t)
build_many(targets)
elif len(args) > 0:
all_configs = {}
for t in configs:
all_configs[t.name] = t
targets = []
for t in args:
if t not in all_configs:
parser.error("Target '%s' not one of %s" % (t, all_configs.keys()))
targets.append(all_configs[t])
build_many(targets)
else:
parser.error("Must specify a target to build, or 'all'")
if __name__ == "__main__":
main()
| gpl-2.0 | -5,473,670,827,612,744,000 | 33.335681 | 86 | 0.58016 | false |
ritviksahajpal/Py6S | Py6S/SixSHelpers/all_angles.py | 1 | 13499 | # This file is part of Py6S.
#
# Copyright 2012 Robin Wilson and contributors listed in the CONTRIBUTORS file.
#
# Py6S is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Py6S is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Py6S. If not, see <http://www.gnu.org/licenses/>.
import numpy as np
from matplotlib.pyplot import *
import itertools
from multiprocessing.dummy import Pool
import copy
class Angles:
@classmethod
def run360(cls, s, solar_or_view, na=36, nz=10, output_name=None, n=None):
"""Runs Py6S for lots of angles to produce a polar contour plot.
The calls to 6S for each angle will be run in parallel, making this function far faster than simply
running a for loop over all of the angles.
Arguments:
* ``s`` -- A :class:`.SixS` instance configured with all of the parameters you want to run the simulation with
* ``solar_or_view`` -- Set to ``'solar'`` if you want to iterate over the solar zenith/azimuth angles or ``'view'`` if you want to iterate over the view zenith/azimuth angles
* ``output_name`` -- (Optional) The name of the output from the 6S simulation to plot. This should be a string containing exactly what you would put after ``s.outputs`` to print the output. For example `pixel_reflectance`.
* ``na`` -- (Optional) The number of azimuth angles to iterate over to generate the data for the plot (defaults to 36, giving data every 10 degrees)
* ``nz`` -- (Optional) The number of zenith angles to iterate over to generate the data for the plot (defaults to 10, giving data every 10 degrees)
* ``n`` -- (Optional) The number of threads to run in parallel. This defaults to the number of CPU cores in your system, and is unlikely to need changing.
For example::
s = SixS()
s.ground_reflectance = GroundReflectance.HomogeneousWalthall(0.48, 0.50, 2.95, 0.6)
s.geometry.solar_z = 30
s.geometry.solar_a = 0
data = SixSHelpers.Angles.run360(s, 'view', output_name='pixel_reflectance')
"""
results = []
azimuths = np.linspace(0, 360, na)
zeniths = np.linspace(0, 89, nz)
def f(args):
azimuth, zenith = args
s.outputs = None
a = copy.deepcopy(s)
if solar_or_view == 'view':
a.geometry.view_a = azimuth
a.geometry.view_z = zenith
elif solar_or_view == 'solar':
a.geometry.solar_a = azimuth
a.geometry.solar_z = zenith
else:
raise ParameterException("all_angles", "You must choose to vary either the solar or view angle.")
a.run()
if output_name is None:
return a.outputs
else:
return getattr(a.outputs, output_name)
# Run the map
if n is None:
pool = Pool()
else:
pool = Pool(n)
print "Running for many angles - this may take a long time"
results = pool.map(f, itertools.product(azimuths, zeniths))
results = np.array(results)
return (results, azimuths, zeniths, s.geometry.solar_a, s.geometry.solar_z)
@classmethod
def plot360(cls, data, output_name=None, show_sun=True, colorbarlabel=None):
"""Plot the data returned from :meth:`run360` as a polar contour plot, selecting an output if required.
Arguments:
* ``data`` -- The return value from :meth:`run360`
* ``output_name`` -- (Optional) The output name to extract (eg. "pixel_reflectance") if the given data is provided as instances of the Outputs class
* ``show_sun`` -- (Optional) Whether to show the location of the sun on the resulting polar plot.
* ``colorbarlabel`` -- (Optional) The label to use on the color bar shown with the plot
"""
results, azimuths, zeniths, sa, sz = data
if not isinstance(results[0], float):
# The results are not floats, so a float must be extracted from the output
if output_name is None:
raise ParameterException("output_name", "You must specify an output name when plotting data which is given as Outputs instances")
results = cls.extract_output(results, output_name)
fig, ax, cax = cls.plot_polar_contour(results, azimuths, zeniths, colorbarlabel=colorbarlabel)
if show_sun:
ax.autoscale(False)
ax.plot(np.radians(sa), sz, '*', markersize=20, markerfacecolor='yellow', markeredgecolor='red')
show()
return fig, ax
@classmethod
def run_and_plot_360(cls, s, solar_or_view, output_name, show_sun=True, na=36, nz=10, colorbarlabel=None):
"""Runs Py6S for lots of angles to produce a polar contour plot.
Arguments:
* ``s`` -- A :class:`.SixS` instance configured with all of the parameters you want to run the simulation with
* ``solar_or_view`` -- Set to ``'solar'`` if you want to iterate over the solar zenith/azimuth angles or ``'view'`` if you want to iterate over the view zenith/azimuth angles
* ``output_name`` -- The name of the output from SixS to plot. This should be a string containing exactly what you would put after ``s.outputs`` to print the output. For example `pixel_reflectance`.
* ``show_sun`` -- (Optional) Whether to place a marker showing the location of the sun on the contour plot (defaults to True, has no effect when ``solar_or_view`` set to ``'solar'``.)
* ``na`` -- (Optional) The number of azimuth angles to iterate over to generate the data for the plot (defaults to 36, giving data every 10 degrees)
* ``nz`` -- (Optional) The number of zenith angles to iterate over to generate the data for the plot (defaults to 10, giving data every 10 degrees)
* ``colorbarlabel`` -- (Optional) The label to use on the color bar shown with the plot
For example::
s = SixS()
s.ground_reflectance = GroundReflectance.HomogeneousWalthall(0.48, 0.50, 2.95, 0.6)
s.geometry.solar_z = 30
s.geometry.solar_a = 0
SixSHelpers.Angles.run_and_plot_360(s, 'view', 'pixel_reflectance')
"""
if solar_or_view == 'solar':
show_sun = False
res = cls.run360(s, solar_or_view, na, nz)
plot_res = cls.plot360(res, output_name, show_sun, colorbarlabel=colorbarlabel)
return plot_res
@classmethod
def extract_output(cls, results, output_name):
"""Extracts data for one particular SixS output from a list of SixS.Outputs instances.
Basically just a wrapper around a list comprehension.
Arguments:
* ``results`` -- A list of :class:`.SixS.Outputs` instances
* ``output_name`` -- The name of the output to extract. This should be a string containing whatever is put after the `s.outputs` when printing the output, for example `'pixel_reflectance'`.
"""
results_output = [getattr(r, output_name) for r in results]
return results_output
@classmethod
def plot_polar_contour(cls, values, azimuths, zeniths, filled=True, colorbarlabel=""):
"""Plot a polar contour plot, with 0 degrees at the North.
Arguments:
* ``values`` -- A list (or other iterable - eg. a NumPy array) of the values to plot on the contour plot (the `z` values)
* ``azimuths`` -- A list of azimuths (in degrees)
* ``zeniths`` -- A list of zeniths (that is, radii)
* ``filled`` -- (Optional) Whether to plot a filled contour plot, or just the contours (defaults to filled)
* ``yaxislabel`` -- (Optional) The label to use for the colorbar
* ``colorbarlabel`` -- (Optional) The label to use on the color bar shown with the plot
The shapes of these lists are important, and are designed for a particular use case (but should be more generally useful).
The values list should be `len(azimuths) * len(zeniths)` long with data for the first azimuth for all the zeniths, then
the second azimuth for all the zeniths etc.
This is designed to work nicely with data that is produced using a loop as follows::
values = []
for azimuth in azimuths:
for zenith in zeniths:
# Do something and get a result
values.append(result)
After that code the azimuths, zeniths and values lists will be ready to be passed into this function.
"""
theta = np.radians(azimuths)
zeniths = np.array(zeniths)
values = np.array(values)
values = values.reshape(len(azimuths), len(zeniths))
r, theta = np.meshgrid(zeniths, np.radians(azimuths))
fig, ax = subplots(subplot_kw=dict(projection='polar'))
ax.set_theta_zero_location("N")
ax.set_theta_direction(-1)
if filled:
cax = ax.contourf(theta, r, values, 30)
else:
cax = ax.contour(theta, r, values, 30)
cb = fig.colorbar(cax)
cb.set_label(colorbarlabel)
return fig, ax, cax
@classmethod
def run_principal_plane(cls, s, output_name=None, n=None):
"""Runs the given 6S simulation to get the outputs for the solar principal plane.
This function runs the simulation for all zenith angles in the azimuthal line of the sun. For example,
if the solar azimuth is 90 degrees, this function will run simulations for::
Azimuth Zenith
90 85
90 80
90 75
90 70
90 65
90 60
90 55
... ..
90 0
270 5
270 10
270 15
... ..
270 80
270 85
The calls to 6S for each angle will be run in parallel, making this function far faster than simply
running a for loop over each angle.
Arguments:
* ``s`` -- A :class:`.SixS` instance configured with all of the parameters you want to run the simulation with
* ``output_name`` -- (Optional) The output name to extract (eg. "pixel_reflectance") if the given data is provided as instances of the Outputs class
* ``n`` -- (Optional) The number of threads to run in parallel. This defaults to the number of CPU cores in your system, and is unlikely to need changing.
Return values:
A tuple containing zenith angles and the corresponding values or Outputs instances (depending on the arguments given).
The zenith angles returned have been modified so that the zenith angles on the 'sun-side' are positive, and those
on the other side (ie. past the vertical) are negative, for ease of plotting.
"""
# Get the solar azimuth and zenith angles from the SixS instance
sa = s.geometry.solar_a
# Compute the angles in the principal plane
# Get the solar azimuth on the opposite side for the other half of the principal plane
opp_sa = (sa + 180) % 360
# Calculate the first side (the solar zenith angle side)
first_side_z = np.arange(85, -5, -5)
first_side_a = np.repeat(sa, len(first_side_z))
# Calculate the other side
temp = first_side_z[:-1]
second_side_z = temp[::-1] # Reverse array
second_side_a = np.repeat(opp_sa, len(second_side_z))
# Join the two sides together
all_zeniths = np.hstack((first_side_z, second_side_z))
all_zeniths_for_return = np.hstack((first_side_z, -1 * second_side_z))
all_azimuths = np.hstack((first_side_a, second_side_a))
def f(arg):
zenith, azimuth = arg
s.outputs = None
a = copy.deepcopy(s)
a.geometry.view_z = zenith
a.geometry.view_a = azimuth
a.run()
if output_name is None:
return a.outputs
else:
return getattr(a.outputs, output_name)
# Run the map
if n is None:
pool = Pool()
else:
pool = Pool(n)
print "Running for many angles - this may take a long time"
results = pool.map(f, zip(all_zeniths, all_azimuths))
results = np.array(results)
results = np.array(results)
return all_zeniths_for_return, results
def plot_principal_plane(zeniths, values, y_axis_label):
"""Plot the results from a principal plane simulation (eg. a run of :meth:`.run_principal_plane`).
Arguments:
* ``zeniths`` -- A list of view zenith angles in degrees
* ``values`` -- A list of simulated values for each of these angles
* ``y_axis_label`` -- A string to use as the label for the y axis
"""
plot(zeniths, values)
xlabel("View zenith angle (degrees)")
ylabel(y_axis_label)
show()
| lgpl-3.0 | 589,351,517,914,651,600 | 41.05296 | 230 | 0.621009 | false |
Flamacue/pretix | src/tests/plugins/banktransfer/test_actions.py | 2 | 9548 | import json
from datetime import timedelta
import pytest
from django.utils.timezone import now
from pretix.base.models import (
Event, EventPermission, Item, Order, OrderPosition, Organizer, Quota, User,
)
from pretix.plugins.banktransfer.models import BankImportJob, BankTransaction
@pytest.fixture
def env():
o = Organizer.objects.create(name='Dummy', slug='dummy')
event = Event.objects.create(
organizer=o, name='Dummy', slug='dummy',
date_from=now(), plugins='pretix.plugins.banktransfer'
)
user = User.objects.create_user('[email protected]', 'dummy')
EventPermission.objects.create(user=user, event=event)
o1 = Order.objects.create(
code='1Z3AS', event=event,
status=Order.STATUS_PENDING,
datetime=now(), expires=now() + timedelta(days=10),
total=23, payment_provider='banktransfer'
)
o2 = Order.objects.create(
code='6789Z', event=event,
status=Order.STATUS_CANCELED,
datetime=now(), expires=now() + timedelta(days=10),
total=23, payment_provider='banktransfer'
)
quota = Quota.objects.create(name="Test", size=2, event=event)
item1 = Item.objects.create(event=event, name="Ticket", default_price=23)
quota.items.add(item1)
OrderPosition.objects.create(order=o1, item=item1, variation=None, price=23)
return event, user, o1, o2
@pytest.mark.django_db
def test_discard(env, client):
job = BankImportJob.objects.create(event=env[0])
trans = BankTransaction.objects.create(event=env[0], import_job=job, payer='Foo',
state=BankTransaction.STATE_ERROR,
amount=0, date='unknown')
client.login(email='[email protected]', password='dummy')
r = json.loads(client.post('/control/event/{}/{}/banktransfer/action/'.format(env[0].organizer.slug, env[0].slug), {
'action_{}'.format(trans.pk): 'discard',
}).content.decode('utf-8'))
assert r['status'] == 'ok'
trans.refresh_from_db()
assert trans.state == BankTransaction.STATE_DISCARDED
assert trans.payer == ''
@pytest.mark.django_db
def test_accept_wrong_amount(env, client):
job = BankImportJob.objects.create(event=env[0])
trans = BankTransaction.objects.create(event=env[0], import_job=job, payer='Foo',
state=BankTransaction.STATE_INVALID,
amount=12, date='unknown', order=env[2])
client.login(email='[email protected]', password='dummy')
r = json.loads(client.post('/control/event/{}/{}/banktransfer/action/'.format(env[0].organizer.slug, env[0].slug), {
'action_{}'.format(trans.pk): 'accept',
}).content.decode('utf-8'))
assert r['status'] == 'ok'
trans.refresh_from_db()
assert trans.state == BankTransaction.STATE_VALID
env[2].refresh_from_db()
assert env[2].status == Order.STATUS_PAID
@pytest.mark.django_db
def test_assign_order(env, client):
job = BankImportJob.objects.create(event=env[0])
trans = BankTransaction.objects.create(event=env[0], import_job=job, payer='Foo',
state=BankTransaction.STATE_NOMATCH,
amount=23, date='unknown')
client.login(email='[email protected]', password='dummy')
r = json.loads(client.post('/control/event/{}/{}/banktransfer/action/'.format(env[0].organizer.slug, env[0].slug), {
'action_{}'.format(trans.pk): 'assign:{}'.format(env[2].code),
}).content.decode('utf-8'))
assert r['status'] == 'ok'
trans.refresh_from_db()
assert trans.state == BankTransaction.STATE_VALID
env[2].refresh_from_db()
assert env[2].status == Order.STATUS_PAID
@pytest.mark.django_db
def test_assign_order_unknown(env, client):
job = BankImportJob.objects.create(event=env[0])
trans = BankTransaction.objects.create(event=env[0], import_job=job, payer='Foo',
state=BankTransaction.STATE_NOMATCH,
amount=23, date='unknown')
client.login(email='[email protected]', password='dummy')
r = json.loads(client.post('/control/event/{}/{}/banktransfer/action/'.format(env[0].organizer.slug, env[0].slug), {
'action_{}'.format(trans.pk): 'assign:FOO'
}).content.decode('utf-8'))
assert r['status'] == 'error'
trans.refresh_from_db()
assert trans.state == BankTransaction.STATE_NOMATCH
@pytest.mark.django_db
def test_assign_order_amount_incorrect(env, client):
job = BankImportJob.objects.create(event=env[0])
trans = BankTransaction.objects.create(event=env[0], import_job=job, payer='Foo',
state=BankTransaction.STATE_NOMATCH,
amount=12, date='unknown')
client.login(email='[email protected]', password='dummy')
r = json.loads(client.post('/control/event/{}/{}/banktransfer/action/'.format(env[0].organizer.slug, env[0].slug), {
'action_{}'.format(trans.pk): 'assign:{}'.format(env[2].code)
}).content.decode('utf-8'))
assert r['status'] == 'error'
trans.refresh_from_db()
assert trans.state == BankTransaction.STATE_NOMATCH
@pytest.mark.django_db
def test_comment(env, client):
job = BankImportJob.objects.create(event=env[0])
trans = BankTransaction.objects.create(event=env[0], import_job=job, payer='Foo',
state=BankTransaction.STATE_NOMATCH,
amount=12, date='unknown')
client.login(email='[email protected]', password='dummy')
r = json.loads(client.post('/control/event/{}/{}/banktransfer/action/'.format(env[0].organizer.slug, env[0].slug), {
'action_{}'.format(trans.pk): 'comment:This is my comment'.format(env[2].code)
}).content.decode('utf-8'))
assert r['status'] == 'ok'
trans.refresh_from_db()
assert trans.comment == 'This is my comment'
assert trans.state == BankTransaction.STATE_NOMATCH
@pytest.mark.django_db
def test_retry_success(env, client):
job = BankImportJob.objects.create(event=env[0])
trans = BankTransaction.objects.create(event=env[0], import_job=job, payer='Foo',
state=BankTransaction.STATE_DUPLICATE,
amount=23, date='unknown', order=env[3])
client.login(email='[email protected]', password='dummy')
env[3].status = Order.STATUS_PENDING
env[3].save()
r = json.loads(client.post('/control/event/{}/{}/banktransfer/action/'.format(env[0].organizer.slug, env[0].slug), {
'action_{}'.format(trans.pk): 'retry',
}).content.decode('utf-8'))
assert r['status'] == 'ok'
trans.refresh_from_db()
assert trans.state == BankTransaction.STATE_VALID
env[3].refresh_from_db()
assert env[3].status == Order.STATUS_PAID
@pytest.mark.django_db
def test_retry_canceled(env, client):
job = BankImportJob.objects.create(event=env[0])
trans = BankTransaction.objects.create(event=env[0], import_job=job, payer='Foo',
state=BankTransaction.STATE_ERROR,
amount=23, date='unknown', order=env[3])
client.login(email='[email protected]', password='dummy')
r = json.loads(client.post('/control/event/{}/{}/banktransfer/action/'.format(env[0].organizer.slug, env[0].slug), {
'action_{}'.format(trans.pk): 'retry',
}).content.decode('utf-8'))
assert r['status'] == 'error'
trans.refresh_from_db()
assert trans.state == BankTransaction.STATE_ERROR
env[3].refresh_from_db()
assert env[3].status == Order.STATUS_CANCELED
@pytest.mark.django_db
def test_retry_refunded(env, client):
job = BankImportJob.objects.create(event=env[0])
trans = BankTransaction.objects.create(event=env[0], import_job=job, payer='Foo',
state=BankTransaction.STATE_ERROR,
amount=23, date='unknown', order=env[3])
client.login(email='[email protected]', password='dummy')
env[3].status = Order.STATUS_REFUNDED
env[3].save()
r = json.loads(client.post('/control/event/{}/{}/banktransfer/action/'.format(env[0].organizer.slug, env[0].slug), {
'action_{}'.format(trans.pk): 'retry',
}).content.decode('utf-8'))
assert r['status'] == 'error'
trans.refresh_from_db()
assert trans.state == BankTransaction.STATE_ERROR
env[3].refresh_from_db()
assert env[3].status == Order.STATUS_REFUNDED
@pytest.mark.django_db
def test_retry_paid(env, client):
job = BankImportJob.objects.create(event=env[0])
trans = BankTransaction.objects.create(event=env[0], import_job=job, payer='Foo',
state=BankTransaction.STATE_ERROR,
amount=23, date='unknown', order=env[3])
client.login(email='[email protected]', password='dummy')
env[3].status = Order.STATUS_PAID
env[3].save()
r = json.loads(client.post('/control/event/{}/{}/banktransfer/action/'.format(env[0].organizer.slug, env[0].slug), {
'action_{}'.format(trans.pk): 'retry',
}).content.decode('utf-8'))
assert r['status'] == 'error'
trans.refresh_from_db()
assert trans.state == BankTransaction.STATE_ERROR
env[3].refresh_from_db()
assert env[3].status == Order.STATUS_PAID
| apache-2.0 | 4,349,866,452,003,900,000 | 44.903846 | 120 | 0.619397 | false |
undefinedv/Jingubang | sqlmap/tamper/equaltolike.py | 2 | 1136 | #!/usr/bin/env python
"""
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
import os
import re
from lib.core.common import singleTimeWarnMessage
from lib.core.enums import DBMS
from lib.core.enums import PRIORITY
__priority__ = PRIORITY.HIGHEST
def dependencies():
singleTimeWarnMessage("tamper script '%s' is unlikely to work against %s" % (os.path.basename(__file__).split(".")[0], DBMS.PGSQL))
def tamper(payload, **kwargs):
"""
Replaces all occurances of operator equal ('=') with operator 'LIKE'
Tested against:
* Microsoft SQL Server 2005
* MySQL 4, 5.0 and 5.5
Notes:
* Useful to bypass weak and bespoke web application firewalls that
filter the equal character ('=')
* The LIKE operator is SQL standard. Hence, this tamper script
should work against all (?) databases
>>> tamper('SELECT * FROM users WHERE id=1')
'SELECT * FROM users WHERE id LIKE 1'
"""
retVal = payload
if payload:
retVal = re.sub(r"\s*=\s*", " LIKE ", retVal)
return retVal
| gpl-3.0 | -8,846,319,924,585,905,000 | 25.418605 | 135 | 0.65493 | false |
tahnok/react-native | JSCLegacyProfiler/trace_data.py | 375 | 8013 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import re
import unittest
"""
# _-----=> irqs-off
# / _----=> need-resched
# | / _---=> hardirq/softirq
# || / _--=> preempt-depth
# ||| / delay
# TASK-PID CPU# |||| TIMESTAMP FUNCTION
# | | | |||| | |
<idle>-0 [001] ...2 3269.291072: sched_switch: prev_comm=swapper/1 prev_pid=0 prev_prio=120 prev_state=R ==> next_comm=mmcqd/0 next_pid=120 next_prio=120
"""
TRACE_LINE_PATTERN = re.compile(
r'^\s*(?P<task>.+)-(?P<pid>\d+)\s+(?:\((?P<tgid>.+)\)\s+)?\[(?P<cpu>\d+)\]\s+(?:(?P<flags>\S{4})\s+)?(?P<timestamp>[0-9.]+):\s+(?P<function>.+)$')
"""
Example lines from custom app traces:
0: B|27295|providerRemove
0: E
tracing_mark_write: S|27311|NNFColdStart<D-7744962>|1112249168
"""
APP_TRACE_LINE_PATTERN = re.compile(
r'^(?P<type>.+?): (?P<args>.+)$')
"""
Example section names:
NNFColdStart
NNFColdStart<0><T7744962>
NNFColdStart<X>
NNFColdStart<T7744962>
"""
DECORATED_SECTION_NAME_PATTERN = re.compile(r'^(?P<section_name>.*?)(?:<0>)?(?:<(?P<command>.)(?P<argument>.*?)>)?$')
SYSTRACE_LINE_TYPES = set(['0', 'tracing_mark_write'])
class TraceLine(object):
def __init__(self, task, pid, tgid, cpu, flags, timestamp, function):
self.task = task
self.pid = pid
self.tgid = tgid
self.cpu = cpu
self.flags = flags
self.timestamp = timestamp
self.function = function
self.canceled = False
@property
def is_app_trace_line(self):
return isinstance(self.function, AppTraceFunction)
def cancel(self):
self.canceled = True
def __str__(self):
if self.canceled:
return ""
elif self.tgid:
return "{task:>16s}-{pid:<5d} ({tgid:5s}) [{cpu:03d}] {flags:4s} {timestamp:12f}: {function}\n".format(**vars(self))
elif self.flags:
return "{task:>16s}-{pid:<5d} [{cpu:03d}] {flags:4s} {timestamp:12f}: {function}\n".format(**vars(self))
else:
return "{task:>16s}-{pid:<5d} [{cpu:03d}] {timestamp:12.6f}: {function}\n".format(**vars(self))
class AppTraceFunction(object):
def __init__(self, type, args):
self.type = type
self.args = args
self.operation = args[0]
if len(args) >= 2 and args[1]:
self.pid = int(args[1])
if len(args) >= 3:
self._section_name, self.command, self.argument = _parse_section_name(args[2])
args[2] = self._section_name
else:
self._section_name = None
self.command = None
self.argument = None
self.cookie = None
@property
def section_name(self):
return self._section_name
@section_name.setter
def section_name(self, value):
self._section_name = value
self.args[2] = value
def __str__(self):
return "{type}: {args}".format(type=self.type, args='|'.join(self.args))
class AsyncTraceFunction(AppTraceFunction):
def __init__(self, type, args):
super(AsyncTraceFunction, self).__init__(type, args)
self.cookie = int(args[3])
TRACE_TYPE_MAP = {
'S': AsyncTraceFunction,
'T': AsyncTraceFunction,
'F': AsyncTraceFunction,
}
def parse_line(line):
match = TRACE_LINE_PATTERN.match(line.strip())
if not match:
return None
task = match.group("task")
pid = int(match.group("pid"))
tgid = match.group("tgid")
cpu = int(match.group("cpu"))
flags = match.group("flags")
timestamp = float(match.group("timestamp"))
function = match.group("function")
app_trace = _parse_function(function)
if app_trace:
function = app_trace
return TraceLine(task, pid, tgid, cpu, flags, timestamp, function)
def parse_dextr_line(line):
task = line["name"]
pid = line["pid"]
tgid = line["tid"]
cpu = None
flags = None
timestamp = line["ts"]
function = AppTraceFunction("DextrTrace", [line["ph"], line["pid"], line["name"]])
return TraceLine(task, pid, tgid, cpu, flags, timestamp, function)
def _parse_function(function):
line_match = APP_TRACE_LINE_PATTERN.match(function)
if not line_match:
return None
type = line_match.group("type")
if not type in SYSTRACE_LINE_TYPES:
return None
args = line_match.group("args").split('|')
if len(args) == 1 and len(args[0]) == 0:
args = None
constructor = TRACE_TYPE_MAP.get(args[0], AppTraceFunction)
return constructor(type, args)
def _parse_section_name(section_name):
if section_name is None:
return section_name, None, None
section_name_match = DECORATED_SECTION_NAME_PATTERN.match(section_name)
section_name = section_name_match.group("section_name")
command = section_name_match.group("command")
argument = section_name_match.group("argument")
return section_name, command, argument
def _format_section_name(section_name, command, argument):
if not command:
return section_name
return "{section_name}<{command}{argument}>".format(**vars())
class RoundTripFormattingTests(unittest.TestCase):
def testPlainSectionName(self):
section_name = "SectionName12345-5562342fas"
self.assertEqual(section_name, _format_section_name(*_parse_section_name(section_name)))
def testDecoratedSectionName(self):
section_name = "SectionName12345-5562342fas<D-123456>"
self.assertEqual(section_name, _format_section_name(*_parse_section_name(section_name)))
def testSimpleFunction(self):
function = "0: E"
self.assertEqual(function, str(_parse_function(function)))
def testFunctionWithoutCookie(self):
function = "0: B|27295|providerRemove"
self.assertEqual(function, str(_parse_function(function)))
def testFunctionWithCookie(self):
function = "0: S|27311|NNFColdStart|1112249168"
self.assertEqual(function, str(_parse_function(function)))
def testFunctionWithCookieAndArgs(self):
function = "0: T|27311|NNFColdStart|1122|Start"
self.assertEqual(function, str(_parse_function(function)))
def testFunctionWithArgsButNoPid(self):
function = "0: E|||foo=bar"
self.assertEqual(function, str(_parse_function(function)))
def testKitKatFunction(self):
function = "tracing_mark_write: B|14127|Looper.dispatchMessage|arg=>>>>> Dispatching to Handler (android.os.Handler) {422ae980} null: 0|Java"
self.assertEqual(function, str(_parse_function(function)))
def testNonSysTraceFunctionIgnored(self):
function = "sched_switch: prev_comm=swapper/1 prev_pid=0 prev_prio=120 prev_state=R ==> next_comm=mmcqd/0 next_pid=120 next_prio=120"
self.assertEqual(None, _parse_function(function))
def testLineWithFlagsAndTGID(self):
line = " <idle>-0 ( 550) [000] d..2 7953.258473: cpu_idle: state=1 cpu_id=0\n"
self.assertEqual(line, str(parse_line(line)))
def testLineWithFlagsAndNoTGID(self):
line = " <idle>-0 (-----) [000] d..2 7953.258473: cpu_idle: state=1 cpu_id=0\n"
self.assertEqual(line, str(parse_line(line)))
def testLineWithFlags(self):
line = " <idle>-0 [001] ...2 3269.291072: sched_switch: prev_comm=swapper/1 prev_pid=0 prev_prio=120 prev_state=R ==> next_comm=mmcqd/0 next_pid=120 next_prio=120\n"
self.assertEqual(line, str(parse_line(line)))
def testLineWithoutFlags(self):
line = " <idle>-0 [001] 3269.291072: sched_switch: prev_comm=swapper/1 prev_pid=0 prev_prio=120 prev_state=R ==> next_comm=mmcqd/0 next_pid=120 next_prio=120\n"
self.assertEqual(line, str(parse_line(line)))
| bsd-3-clause | -2,721,583,562,912,352,000 | 31.840164 | 187 | 0.610383 | false |
nop33/indico-plugins | chat/indico_chat/controllers/event.py | 1 | 1645 | # This file is part of Indico.
# Copyright (C) 2002 - 2017 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from flask_pluginengine import current_plugin
from indico.legacy.webinterface.rh.conferenceDisplay import RHConferenceBaseDisplay
from indico_chat.models.chatrooms import ChatroomEventAssociation
from indico_chat.views import WPChatEventPage
class RHChatEventPage(RHConferenceBaseDisplay):
"""Lists the public chatrooms in a conference"""
def _process(self):
chatrooms = ChatroomEventAssociation.find_for_event(self.event_new).all()
cols = set()
if any(c.chatroom.description for c in chatrooms):
cols.add('description')
if any(c.chatroom.password for c in chatrooms):
cols.add('password')
return WPChatEventPage.render_template('event_page.html', self._conf, event_chatrooms=chatrooms, cols=cols,
chat_links=current_plugin.settings.get('chat_links'))
| gpl-3.0 | -3,176,928,040,759,831,600 | 42.289474 | 115 | 0.728267 | false |
sertac/django | tests/gis_tests/geo3d/tests.py | 199 | 17484 | from __future__ import unicode_literals
import os
import re
from unittest import skipUnless
from django.contrib.gis.db.models import Extent3D, Union
from django.contrib.gis.db.models.functions import (
AsGeoJSON, AsKML, Length, Perimeter, Scale, Translate,
)
from django.contrib.gis.gdal import HAS_GDAL
from django.contrib.gis.geos import GEOSGeometry, LineString, Point, Polygon
from django.test import TestCase, ignore_warnings, skipUnlessDBFeature
from django.utils._os import upath
from django.utils.deprecation import (
RemovedInDjango20Warning, RemovedInDjango110Warning,
)
from .models import (
City3D, Interstate2D, Interstate3D, InterstateProj2D, InterstateProj3D,
MultiPoint3D, Point2D, Point3D, Polygon2D, Polygon3D,
)
if HAS_GDAL:
from django.contrib.gis.utils import LayerMapping, LayerMapError
data_path = os.path.realpath(os.path.join(os.path.dirname(upath(__file__)), '..', 'data'))
city_file = os.path.join(data_path, 'cities', 'cities.shp')
vrt_file = os.path.join(data_path, 'test_vrt', 'test_vrt.vrt')
# The coordinates of each city, with Z values corresponding to their
# altitude in meters.
city_data = (
('Houston', (-95.363151, 29.763374, 18)),
('Dallas', (-96.801611, 32.782057, 147)),
('Oklahoma City', (-97.521157, 34.464642, 380)),
('Wellington', (174.783117, -41.315268, 14)),
('Pueblo', (-104.609252, 38.255001, 1433)),
('Lawrence', (-95.235060, 38.971823, 251)),
('Chicago', (-87.650175, 41.850385, 181)),
('Victoria', (-123.305196, 48.462611, 15)),
)
# Reference mapping of city name to its altitude (Z value).
city_dict = {name: coords for name, coords in city_data}
# 3D freeway data derived from the National Elevation Dataset:
# http://seamless.usgs.gov/products/9arc.php
interstate_data = (
('I-45',
'LINESTRING(-95.3708481 29.7765870 11.339,-95.3694580 29.7787980 4.536,'
'-95.3690305 29.7797359 9.762,-95.3691886 29.7812450 12.448,'
'-95.3696447 29.7850144 10.457,-95.3702511 29.7868518 9.418,'
'-95.3706724 29.7881286 14.858,-95.3711632 29.7896157 15.386,'
'-95.3714525 29.7936267 13.168,-95.3717848 29.7955007 15.104,'
'-95.3717719 29.7969804 16.516,-95.3717305 29.7982117 13.923,'
'-95.3717254 29.8000778 14.385,-95.3719875 29.8013539 15.160,'
'-95.3720575 29.8026785 15.544,-95.3721321 29.8040912 14.975,'
'-95.3722074 29.8050998 15.688,-95.3722779 29.8060430 16.099,'
'-95.3733818 29.8076750 15.197,-95.3741563 29.8103686 17.268,'
'-95.3749458 29.8129927 19.857,-95.3763564 29.8144557 15.435)',
(11.339, 4.536, 9.762, 12.448, 10.457, 9.418, 14.858,
15.386, 13.168, 15.104, 16.516, 13.923, 14.385, 15.16,
15.544, 14.975, 15.688, 16.099, 15.197, 17.268, 19.857,
15.435),
),
)
# Bounding box polygon for inner-loop of Houston (in projected coordinate
# system 32140), with elevation values from the National Elevation Dataset
# (see above).
bbox_data = (
'POLYGON((941527.97 4225693.20,962596.48 4226349.75,963152.57 4209023.95,'
'942051.75 4208366.38,941527.97 4225693.20))',
(21.71, 13.21, 9.12, 16.40, 21.71)
)
class Geo3DLoadingHelper(object):
def _load_interstate_data(self):
# Interstate (2D / 3D and Geographic/Projected variants)
for name, line, exp_z in interstate_data:
line_3d = GEOSGeometry(line, srid=4269)
line_2d = LineString([l[:2] for l in line_3d.coords], srid=4269)
# Creating a geographic and projected version of the
# interstate in both 2D and 3D.
Interstate3D.objects.create(name=name, line=line_3d)
InterstateProj3D.objects.create(name=name, line=line_3d)
Interstate2D.objects.create(name=name, line=line_2d)
InterstateProj2D.objects.create(name=name, line=line_2d)
def _load_city_data(self):
for name, pnt_data in city_data:
City3D.objects.create(name=name, point=Point(*pnt_data, srid=4326))
def _load_polygon_data(self):
bbox_wkt, bbox_z = bbox_data
bbox_2d = GEOSGeometry(bbox_wkt, srid=32140)
bbox_3d = Polygon(tuple((x, y, z) for (x, y), z in zip(bbox_2d[0].coords, bbox_z)), srid=32140)
Polygon2D.objects.create(name='2D BBox', poly=bbox_2d)
Polygon3D.objects.create(name='3D BBox', poly=bbox_3d)
@skipUnless(HAS_GDAL, "GDAL is required for Geo3DTest.")
@skipUnlessDBFeature("gis_enabled", "supports_3d_storage")
class Geo3DTest(Geo3DLoadingHelper, TestCase):
"""
Only a subset of the PostGIS routines are 3D-enabled, and this TestCase
tries to test the features that can handle 3D and that are also
available within GeoDjango. For more information, see the PostGIS docs
on the routines that support 3D:
http://postgis.net/docs/PostGIS_Special_Functions_Index.html#PostGIS_3D_Functions
"""
def test_3d_hasz(self):
"""
Make sure data is 3D and has expected Z values -- shouldn't change
because of coordinate system.
"""
self._load_interstate_data()
for name, line, exp_z in interstate_data:
interstate = Interstate3D.objects.get(name=name)
interstate_proj = InterstateProj3D.objects.get(name=name)
for i in [interstate, interstate_proj]:
self.assertTrue(i.line.hasz)
self.assertEqual(exp_z, tuple(i.line.z))
self._load_city_data()
for name, pnt_data in city_data:
city = City3D.objects.get(name=name)
z = pnt_data[2]
self.assertTrue(city.point.hasz)
self.assertEqual(z, city.point.z)
def test_3d_polygons(self):
"""
Test the creation of polygon 3D models.
"""
self._load_polygon_data()
p3d = Polygon3D.objects.get(name='3D BBox')
self.assertTrue(p3d.poly.hasz)
self.assertIsInstance(p3d.poly, Polygon)
self.assertEqual(p3d.poly.srid, 32140)
def test_3d_layermapping(self):
"""
Testing LayerMapping on 3D models.
"""
point_mapping = {'point': 'POINT'}
mpoint_mapping = {'mpoint': 'MULTIPOINT'}
# The VRT is 3D, but should still be able to map sans the Z.
lm = LayerMapping(Point2D, vrt_file, point_mapping, transform=False)
lm.save()
self.assertEqual(3, Point2D.objects.count())
# The city shapefile is 2D, and won't be able to fill the coordinates
# in the 3D model -- thus, a LayerMapError is raised.
self.assertRaises(LayerMapError, LayerMapping,
Point3D, city_file, point_mapping, transform=False)
# 3D model should take 3D data just fine.
lm = LayerMapping(Point3D, vrt_file, point_mapping, transform=False)
lm.save()
self.assertEqual(3, Point3D.objects.count())
# Making sure LayerMapping.make_multi works right, by converting
# a Point25D into a MultiPoint25D.
lm = LayerMapping(MultiPoint3D, vrt_file, mpoint_mapping, transform=False)
lm.save()
self.assertEqual(3, MultiPoint3D.objects.count())
@ignore_warnings(category=RemovedInDjango20Warning)
def test_kml(self):
"""
Test GeoQuerySet.kml() with Z values.
"""
self._load_city_data()
h = City3D.objects.kml(precision=6).get(name='Houston')
# KML should be 3D.
# `SELECT ST_AsKML(point, 6) FROM geo3d_city3d WHERE name = 'Houston';`
ref_kml_regex = re.compile(r'^<Point><coordinates>-95.363\d+,29.763\d+,18</coordinates></Point>$')
self.assertTrue(ref_kml_regex.match(h.kml))
@ignore_warnings(category=RemovedInDjango20Warning)
def test_geojson(self):
"""
Test GeoQuerySet.geojson() with Z values.
"""
self._load_city_data()
h = City3D.objects.geojson(precision=6).get(name='Houston')
# GeoJSON should be 3D
# `SELECT ST_AsGeoJSON(point, 6) FROM geo3d_city3d WHERE name='Houston';`
ref_json_regex = re.compile(r'^{"type":"Point","coordinates":\[-95.363151,29.763374,18(\.0+)?\]}$')
self.assertTrue(ref_json_regex.match(h.geojson))
@skipUnlessDBFeature("supports_3d_functions")
def test_union(self):
"""
Testing the Union aggregate of 3D models.
"""
# PostGIS query that returned the reference EWKT for this test:
# `SELECT ST_AsText(ST_Union(point)) FROM geo3d_city3d;`
self._load_city_data()
ref_ewkt = (
'SRID=4326;MULTIPOINT(-123.305196 48.462611 15,-104.609252 38.255001 1433,'
'-97.521157 34.464642 380,-96.801611 32.782057 147,-95.363151 29.763374 18,'
'-95.23506 38.971823 251,-87.650175 41.850385 181,174.783117 -41.315268 14)'
)
ref_union = GEOSGeometry(ref_ewkt)
union = City3D.objects.aggregate(Union('point'))['point__union']
self.assertTrue(union.hasz)
# Ordering of points in the resulting geometry may vary between implementations
self.assertSetEqual({p.ewkt for p in ref_union}, {p.ewkt for p in union})
@skipUnlessDBFeature("supports_3d_functions")
@ignore_warnings(category=RemovedInDjango110Warning)
def test_extent(self):
"""
Testing the Extent3D aggregate for 3D models.
"""
self._load_city_data()
# `SELECT ST_Extent3D(point) FROM geo3d_city3d;`
ref_extent3d = (-123.305196, -41.315268, 14, 174.783117, 48.462611, 1433)
extent1 = City3D.objects.aggregate(Extent3D('point'))['point__extent3d']
extent2 = City3D.objects.extent3d()
def check_extent3d(extent3d, tol=6):
for ref_val, ext_val in zip(ref_extent3d, extent3d):
self.assertAlmostEqual(ref_val, ext_val, tol)
for e3d in [extent1, extent2]:
check_extent3d(e3d)
self.assertIsNone(City3D.objects.none().extent3d())
self.assertIsNone(City3D.objects.none().aggregate(Extent3D('point'))['point__extent3d'])
@ignore_warnings(category=RemovedInDjango20Warning)
@skipUnlessDBFeature("supports_3d_functions")
def test_perimeter(self):
"""
Testing GeoQuerySet.perimeter() on 3D fields.
"""
self._load_polygon_data()
# Reference query for values below:
# `SELECT ST_Perimeter3D(poly), ST_Perimeter2D(poly) FROM geo3d_polygon3d;`
ref_perim_3d = 76859.2620451
ref_perim_2d = 76859.2577803
tol = 6
self.assertAlmostEqual(ref_perim_2d,
Polygon2D.objects.perimeter().get(name='2D BBox').perimeter.m,
tol)
self.assertAlmostEqual(ref_perim_3d,
Polygon3D.objects.perimeter().get(name='3D BBox').perimeter.m,
tol)
@ignore_warnings(category=RemovedInDjango20Warning)
@skipUnlessDBFeature("supports_3d_functions")
def test_length(self):
"""
Testing GeoQuerySet.length() on 3D fields.
"""
# ST_Length_Spheroid Z-aware, and thus does not need to use
# a separate function internally.
# `SELECT ST_Length_Spheroid(line, 'SPHEROID["GRS 1980",6378137,298.257222101]')
# FROM geo3d_interstate[2d|3d];`
self._load_interstate_data()
tol = 3
ref_length_2d = 4368.1721949481
ref_length_3d = 4368.62547052088
self.assertAlmostEqual(ref_length_2d,
Interstate2D.objects.length().get(name='I-45').length.m,
tol)
self.assertAlmostEqual(ref_length_3d,
Interstate3D.objects.length().get(name='I-45').length.m,
tol)
# Making sure `ST_Length3D` is used on for a projected
# and 3D model rather than `ST_Length`.
# `SELECT ST_Length(line) FROM geo3d_interstateproj2d;`
ref_length_2d = 4367.71564892392
# `SELECT ST_Length3D(line) FROM geo3d_interstateproj3d;`
ref_length_3d = 4368.16897234101
self.assertAlmostEqual(ref_length_2d,
InterstateProj2D.objects.length().get(name='I-45').length.m,
tol)
self.assertAlmostEqual(ref_length_3d,
InterstateProj3D.objects.length().get(name='I-45').length.m,
tol)
@ignore_warnings(category=RemovedInDjango20Warning)
@skipUnlessDBFeature("supports_3d_functions")
def test_scale(self):
"""
Testing GeoQuerySet.scale() on Z values.
"""
self._load_city_data()
# Mapping of City name to reference Z values.
zscales = (-3, 4, 23)
for zscale in zscales:
for city in City3D.objects.scale(1.0, 1.0, zscale):
self.assertEqual(city_dict[city.name][2] * zscale, city.scale.z)
@ignore_warnings(category=RemovedInDjango20Warning)
@skipUnlessDBFeature("supports_3d_functions")
def test_translate(self):
"""
Testing GeoQuerySet.translate() on Z values.
"""
self._load_city_data()
ztranslations = (5.23, 23, -17)
for ztrans in ztranslations:
for city in City3D.objects.translate(0, 0, ztrans):
self.assertEqual(city_dict[city.name][2] + ztrans, city.translate.z)
@skipUnless(HAS_GDAL, "GDAL is required for Geo3DTest.")
@skipUnlessDBFeature("gis_enabled", "supports_3d_functions")
class Geo3DFunctionsTests(Geo3DLoadingHelper, TestCase):
def test_kml(self):
"""
Test KML() function with Z values.
"""
self._load_city_data()
h = City3D.objects.annotate(kml=AsKML('point', precision=6)).get(name='Houston')
# KML should be 3D.
# `SELECT ST_AsKML(point, 6) FROM geo3d_city3d WHERE name = 'Houston';`
ref_kml_regex = re.compile(r'^<Point><coordinates>-95.363\d+,29.763\d+,18</coordinates></Point>$')
self.assertTrue(ref_kml_regex.match(h.kml))
def test_geojson(self):
"""
Test GeoJSON() function with Z values.
"""
self._load_city_data()
h = City3D.objects.annotate(geojson=AsGeoJSON('point', precision=6)).get(name='Houston')
# GeoJSON should be 3D
# `SELECT ST_AsGeoJSON(point, 6) FROM geo3d_city3d WHERE name='Houston';`
ref_json_regex = re.compile(r'^{"type":"Point","coordinates":\[-95.363151,29.763374,18(\.0+)?\]}$')
self.assertTrue(ref_json_regex.match(h.geojson))
def test_perimeter(self):
"""
Testing Perimeter() function on 3D fields.
"""
self._load_polygon_data()
# Reference query for values below:
# `SELECT ST_Perimeter3D(poly), ST_Perimeter2D(poly) FROM geo3d_polygon3d;`
ref_perim_3d = 76859.2620451
ref_perim_2d = 76859.2577803
tol = 6
poly2d = Polygon2D.objects.annotate(perimeter=Perimeter('poly')).get(name='2D BBox')
self.assertAlmostEqual(ref_perim_2d, poly2d.perimeter.m, tol)
poly3d = Polygon3D.objects.annotate(perimeter=Perimeter('poly')).get(name='3D BBox')
self.assertAlmostEqual(ref_perim_3d, poly3d.perimeter.m, tol)
def test_length(self):
"""
Testing Length() function on 3D fields.
"""
# ST_Length_Spheroid Z-aware, and thus does not need to use
# a separate function internally.
# `SELECT ST_Length_Spheroid(line, 'SPHEROID["GRS 1980",6378137,298.257222101]')
# FROM geo3d_interstate[2d|3d];`
self._load_interstate_data()
tol = 3
ref_length_2d = 4368.1721949481
ref_length_3d = 4368.62547052088
inter2d = Interstate2D.objects.annotate(length=Length('line')).get(name='I-45')
self.assertAlmostEqual(ref_length_2d, inter2d.length.m, tol)
inter3d = Interstate3D.objects.annotate(length=Length('line')).get(name='I-45')
self.assertAlmostEqual(ref_length_3d, inter3d.length.m, tol)
# Making sure `ST_Length3D` is used on for a projected
# and 3D model rather than `ST_Length`.
# `SELECT ST_Length(line) FROM geo3d_interstateproj2d;`
ref_length_2d = 4367.71564892392
# `SELECT ST_Length3D(line) FROM geo3d_interstateproj3d;`
ref_length_3d = 4368.16897234101
inter2d = InterstateProj2D.objects.annotate(length=Length('line')).get(name='I-45')
self.assertAlmostEqual(ref_length_2d, inter2d.length.m, tol)
inter3d = InterstateProj3D.objects.annotate(length=Length('line')).get(name='I-45')
self.assertAlmostEqual(ref_length_3d, inter3d.length.m, tol)
def test_scale(self):
"""
Testing Scale() function on Z values.
"""
self._load_city_data()
# Mapping of City name to reference Z values.
zscales = (-3, 4, 23)
for zscale in zscales:
for city in City3D.objects.annotate(scale=Scale('point', 1.0, 1.0, zscale)):
self.assertEqual(city_dict[city.name][2] * zscale, city.scale.z)
def test_translate(self):
"""
Testing Translate() function on Z values.
"""
self._load_city_data()
ztranslations = (5.23, 23, -17)
for ztrans in ztranslations:
for city in City3D.objects.annotate(translate=Translate('point', 0, 0, ztrans)):
self.assertEqual(city_dict[city.name][2] + ztrans, city.translate.z)
| bsd-3-clause | 7,048,294,840,285,388,000 | 42.064039 | 107 | 0.627431 | false |
KaranToor/MA450 | google-cloud-sdk/platform/gsutil/third_party/boto/boto/cloudsearch2/domain.py | 153 | 21247 | # Copyright (c) 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
from boto.cloudsearch2.optionstatus import IndexFieldStatus
from boto.cloudsearch2.optionstatus import ServicePoliciesStatus
from boto.cloudsearch2.optionstatus import ExpressionStatus
from boto.cloudsearch2.optionstatus import AvailabilityOptionsStatus
from boto.cloudsearch2.optionstatus import ScalingParametersStatus
from boto.cloudsearch2.document import DocumentServiceConnection
from boto.cloudsearch2.search import SearchConnection
def handle_bool(value):
if value in [True, 'true', 'True', 'TRUE', 1]:
return True
return False
class Domain(object):
"""
A Cloudsearch domain.
:ivar name: The name of the domain.
:ivar id: The internally generated unique identifier for the domain.
:ivar created: A boolean which is True if the domain is
created. It can take several minutes to initialize a domain
when CreateDomain is called. Newly created search domains are
returned with a False value for Created until domain creation
is complete
:ivar deleted: A boolean which is True if the search domain has
been deleted. The system must clean up resources dedicated to
the search domain when delete is called. Newly deleted
search domains are returned from list_domains with a True
value for deleted for several minutes until resource cleanup
is complete.
:ivar processing: True if processing is being done to activate the
current domain configuration.
:ivar num_searchable_docs: The number of documents that have been
submittted to the domain and indexed.
:ivar requires_index_document: True if index_documents needs to be
called to activate the current domain configuration.
:ivar search_instance_count: The number of search instances that are
available to process search requests.
:ivar search_instance_type: The instance type that is being used to
process search requests.
:ivar search_partition_count: The number of partitions across which
the search index is spread.
"""
def __init__(self, layer1, data):
"""
Constructor - Create a domain object from a layer1 and data params
:type layer1: :class:`boto.cloudsearch2.layer1.Layer1` object
:param layer1: A :class:`boto.cloudsearch2.layer1.Layer1` object
which is used to perform operations on the domain.
"""
self.layer1 = layer1
self.update_from_data(data)
def update_from_data(self, data):
self.created = data['Created']
self.deleted = data['Deleted']
self.processing = data['Processing']
self.requires_index_documents = data['RequiresIndexDocuments']
self.domain_id = data['DomainId']
self.domain_name = data['DomainName']
self.search_instance_count = data['SearchInstanceCount']
self.search_instance_type = data.get('SearchInstanceType', None)
self.search_partition_count = data['SearchPartitionCount']
self._doc_service = data['DocService']
self._service_arn = data['ARN']
self._search_service = data['SearchService']
@property
def service_arn(self):
return self._service_arn
@property
def doc_service_endpoint(self):
return self._doc_service['Endpoint']
@property
def search_service_endpoint(self):
return self._search_service['Endpoint']
@property
def created(self):
return self._created
@created.setter
def created(self, value):
self._created = handle_bool(value)
@property
def deleted(self):
return self._deleted
@deleted.setter
def deleted(self, value):
self._deleted = handle_bool(value)
@property
def processing(self):
return self._processing
@processing.setter
def processing(self, value):
self._processing = handle_bool(value)
@property
def requires_index_documents(self):
return self._requires_index_documents
@requires_index_documents.setter
def requires_index_documents(self, value):
self._requires_index_documents = handle_bool(value)
@property
def search_partition_count(self):
return self._search_partition_count
@search_partition_count.setter
def search_partition_count(self, value):
self._search_partition_count = int(value)
@property
def search_instance_count(self):
return self._search_instance_count
@search_instance_count.setter
def search_instance_count(self, value):
self._search_instance_count = int(value)
@property
def name(self):
return self.domain_name
@property
def id(self):
return self.domain_id
def delete(self):
"""
Delete this domain and all index data associated with it.
"""
return self.layer1.delete_domain(self.name)
def get_analysis_schemes(self):
"""
Return a list of Analysis Scheme objects.
"""
return self.layer1.describe_analysis_schemes(self.name)
def get_availability_options(self):
"""
Return a :class:`boto.cloudsearch2.option.AvailabilityOptionsStatus`
object representing the currently defined availability options for
the domain.
:return: OptionsStatus object
:rtype: :class:`boto.cloudsearch2.option.AvailabilityOptionsStatus`
object
"""
return AvailabilityOptionsStatus(
self, refresh_fn=self.layer1.describe_availability_options,
refresh_key=['DescribeAvailabilityOptionsResponse',
'DescribeAvailabilityOptionsResult',
'AvailabilityOptions'],
save_fn=self.layer1.update_availability_options)
def get_scaling_options(self):
"""
Return a :class:`boto.cloudsearch2.option.ScalingParametersStatus`
object representing the currently defined scaling options for the
domain.
:return: ScalingParametersStatus object
:rtype: :class:`boto.cloudsearch2.option.ScalingParametersStatus`
object
"""
return ScalingParametersStatus(
self, refresh_fn=self.layer1.describe_scaling_parameters,
refresh_key=['DescribeScalingParametersResponse',
'DescribeScalingParametersResult',
'ScalingParameters'],
save_fn=self.layer1.update_scaling_parameters)
def get_access_policies(self):
"""
Return a :class:`boto.cloudsearch2.option.ServicePoliciesStatus`
object representing the currently defined access policies for the
domain.
:return: ServicePoliciesStatus object
:rtype: :class:`boto.cloudsearch2.option.ServicePoliciesStatus` object
"""
return ServicePoliciesStatus(
self, refresh_fn=self.layer1.describe_service_access_policies,
refresh_key=['DescribeServiceAccessPoliciesResponse',
'DescribeServiceAccessPoliciesResult',
'AccessPolicies'],
save_fn=self.layer1.update_service_access_policies)
def index_documents(self):
"""
Tells the search domain to start indexing its documents using
the latest text processing options and IndexFields. This
operation must be invoked to make options whose OptionStatus
has OptionState of RequiresIndexDocuments visible in search
results.
"""
self.layer1.index_documents(self.name)
def get_index_fields(self, field_names=None):
"""
Return a list of index fields defined for this domain.
:return: list of IndexFieldStatus objects
:rtype: list of :class:`boto.cloudsearch2.option.IndexFieldStatus`
object
"""
data = self.layer1.describe_index_fields(self.name, field_names)
data = (data['DescribeIndexFieldsResponse']
['DescribeIndexFieldsResult']
['IndexFields'])
return [IndexFieldStatus(self, d) for d in data]
def create_index_field(self, field_name, field_type,
default='', facet=False, returnable=False,
searchable=False, sortable=False,
highlight=False, source_field=None,
analysis_scheme=None):
"""
Defines an ``IndexField``, either replacing an existing
definition or creating a new one.
:type field_name: string
:param field_name: The name of a field in the search index.
:type field_type: string
:param field_type: The type of field. Valid values are
int | double | literal | text | date | latlon |
int-array | double-array | literal-array | text-array | date-array
:type default: string or int
:param default: The default value for the field. If the
field is of type ``int`` this should be an integer value.
Otherwise, it's a string.
:type facet: bool
:param facet: A boolean to indicate whether facets
are enabled for this field or not. Does not apply to
fields of type ``int, int-array, text, text-array``.
:type returnable: bool
:param returnable: A boolean to indicate whether values
of this field can be returned in search results or
used in ranking.
:type searchable: bool
:param searchable: A boolean to indicate whether search
is enabled for this field or not.
:type sortable: bool
:param sortable: A boolean to indicate whether sorting
is enabled for this field or not. Does not apply to
fields of array types.
:type highlight: bool
:param highlight: A boolean to indicate whether highlighting
is enabled for this field or not. Does not apply to
fields of type ``double, int, date, latlon``
:type source_field: list of strings or string
:param source_field: For array types, this is the list of fields
to treat as the source. For singular types, pass a string only.
:type analysis_scheme: string
:param analysis_scheme: The analysis scheme to use for this field.
Only applies to ``text | text-array`` field types
:return: IndexFieldStatus objects
:rtype: :class:`boto.cloudsearch2.option.IndexFieldStatus` object
:raises: BaseException, InternalException, LimitExceededException,
InvalidTypeException, ResourceNotFoundException
"""
index = {
'IndexFieldName': field_name,
'IndexFieldType': field_type
}
if field_type == 'literal':
index['LiteralOptions'] = {
'FacetEnabled': facet,
'ReturnEnabled': returnable,
'SearchEnabled': searchable,
'SortEnabled': sortable
}
if default:
index['LiteralOptions']['DefaultValue'] = default
if source_field:
index['LiteralOptions']['SourceField'] = source_field
elif field_type == 'literal-array':
index['LiteralArrayOptions'] = {
'FacetEnabled': facet,
'ReturnEnabled': returnable,
'SearchEnabled': searchable
}
if default:
index['LiteralArrayOptions']['DefaultValue'] = default
if source_field:
index['LiteralArrayOptions']['SourceFields'] = \
','.join(source_field)
elif field_type == 'int':
index['IntOptions'] = {
'DefaultValue': default,
'FacetEnabled': facet,
'ReturnEnabled': returnable,
'SearchEnabled': searchable,
'SortEnabled': sortable
}
if default:
index['IntOptions']['DefaultValue'] = default
if source_field:
index['IntOptions']['SourceField'] = source_field
elif field_type == 'int-array':
index['IntArrayOptions'] = {
'FacetEnabled': facet,
'ReturnEnabled': returnable,
'SearchEnabled': searchable
}
if default:
index['IntArrayOptions']['DefaultValue'] = default
if source_field:
index['IntArrayOptions']['SourceFields'] = \
','.join(source_field)
elif field_type == 'date':
index['DateOptions'] = {
'FacetEnabled': facet,
'ReturnEnabled': returnable,
'SearchEnabled': searchable,
'SortEnabled': sortable
}
if default:
index['DateOptions']['DefaultValue'] = default
if source_field:
index['DateOptions']['SourceField'] = source_field
elif field_type == 'date-array':
index['DateArrayOptions'] = {
'FacetEnabled': facet,
'ReturnEnabled': returnable,
'SearchEnabled': searchable
}
if default:
index['DateArrayOptions']['DefaultValue'] = default
if source_field:
index['DateArrayOptions']['SourceFields'] = \
','.join(source_field)
elif field_type == 'double':
index['DoubleOptions'] = {
'FacetEnabled': facet,
'ReturnEnabled': returnable,
'SearchEnabled': searchable,
'SortEnabled': sortable
}
if default:
index['DoubleOptions']['DefaultValue'] = default
if source_field:
index['DoubleOptions']['SourceField'] = source_field
elif field_type == 'double-array':
index['DoubleArrayOptions'] = {
'FacetEnabled': facet,
'ReturnEnabled': returnable,
'SearchEnabled': searchable
}
if default:
index['DoubleArrayOptions']['DefaultValue'] = default
if source_field:
index['DoubleArrayOptions']['SourceFields'] = \
','.join(source_field)
elif field_type == 'text':
index['TextOptions'] = {
'ReturnEnabled': returnable,
'HighlightEnabled': highlight,
'SortEnabled': sortable
}
if default:
index['TextOptions']['DefaultValue'] = default
if source_field:
index['TextOptions']['SourceField'] = source_field
if analysis_scheme:
index['TextOptions']['AnalysisScheme'] = analysis_scheme
elif field_type == 'text-array':
index['TextArrayOptions'] = {
'ReturnEnabled': returnable,
'HighlightEnabled': highlight
}
if default:
index['TextArrayOptions']['DefaultValue'] = default
if source_field:
index['TextArrayOptions']['SourceFields'] = \
','.join(source_field)
if analysis_scheme:
index['TextArrayOptions']['AnalysisScheme'] = analysis_scheme
elif field_type == 'latlon':
index['LatLonOptions'] = {
'FacetEnabled': facet,
'ReturnEnabled': returnable,
'SearchEnabled': searchable,
'SortEnabled': sortable
}
if default:
index['LatLonOptions']['DefaultValue'] = default
if source_field:
index['LatLonOptions']['SourceField'] = source_field
data = self.layer1.define_index_field(self.name, index)
data = (data['DefineIndexFieldResponse']
['DefineIndexFieldResult']
['IndexField'])
return IndexFieldStatus(self, data,
self.layer1.describe_index_fields)
def get_expressions(self, names=None):
"""
Return a list of rank expressions defined for this domain.
:return: list of ExpressionStatus objects
:rtype: list of :class:`boto.cloudsearch2.option.ExpressionStatus`
object
"""
fn = self.layer1.describe_expressions
data = fn(self.name, names)
data = (data['DescribeExpressionsResponse']
['DescribeExpressionsResult']
['Expressions'])
return [ExpressionStatus(self, d, fn) for d in data]
def create_expression(self, name, value):
"""
Create a new expression.
:type name: string
:param name: The name of an expression for processing
during a search request.
:type value: string
:param value: The expression to evaluate for ranking
or thresholding while processing a search request. The
Expression syntax is based on JavaScript expressions
and supports:
* Single value, sort enabled numeric fields (int, double, date)
* Other expressions
* The _score variable, which references a document's relevance
score
* The _time variable, which references the current epoch time
* Integer, floating point, hex, and octal literals
* Arithmetic operators: + - * / %
* Bitwise operators: | & ^ ~ << >> >>>
* Boolean operators (including the ternary operator): && || ! ?:
* Comparison operators: < <= == >= >
* Mathematical functions: abs ceil exp floor ln log2 log10 logn
max min pow sqrt pow
* Trigonometric functions: acos acosh asin asinh atan atan2 atanh
cos cosh sin sinh tanh tan
* The haversin distance function
Expressions always return an integer value from 0 to the maximum
64-bit signed integer value (2^63 - 1). Intermediate results are
calculated as double-precision floating point values and the return
value is rounded to the nearest integer. If the expression is
invalid or evaluates to a negative value, it returns 0. If the
expression evaluates to a value greater than the maximum, it
returns the maximum value.
The source data for an Expression can be the name of an
IndexField of type int or double, another Expression or the
reserved name _score. The _score source is
defined to return as a double from 0 to 10.0 (inclusive) to
indicate how relevant a document is to the search request,
taking into account repetition of search terms in the
document and proximity of search terms to each other in
each matching IndexField in the document.
For more information about using rank expressions to
customize ranking, see the Amazon CloudSearch Developer
Guide.
:return: ExpressionStatus object
:rtype: :class:`boto.cloudsearch2.option.ExpressionStatus` object
:raises: BaseException, InternalException, LimitExceededException,
InvalidTypeException, ResourceNotFoundException
"""
data = self.layer1.define_expression(self.name, name, value)
data = (data['DefineExpressionResponse']
['DefineExpressionResult']
['Expression'])
return ExpressionStatus(self, data,
self.layer1.describe_expressions)
def get_document_service(self):
return DocumentServiceConnection(domain=self)
def get_search_service(self):
return SearchConnection(domain=self)
def __repr__(self):
return '<Domain: %s>' % self.domain_name
| apache-2.0 | 1,078,780,574,583,607,700 | 38.201107 | 79 | 0.609968 | false |
rigdenlab/SIMBAD | i2/SIMBAD_report.py | 1 | 7959 | """
SIMBAD_report.py: CCP4 GUI Project
This library is free software: you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public License
version 3, modified in accordance with the provisions of the
license to address the requirements of UK law.
You should have received a copy of the modified GNU Lesser General
Public License along with this library. If not, copies may be
downloaded from http://www.ccp4.ac.uk/ccp4license.php
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
"""
import os
import re
if __name__ == '__main__':
import sys
ccp4 = os.environ['CCP4']
sys.path.append(os.path.join(ccp4, 'share', 'ccp4i2', 'report'))
sys.path.append(os.path.join(ccp4, 'share', 'ccp4i2', 'core'))
sys.path.append(os.path.join(ccp4, 'lib', 'python2.7', 'site-packages'))
from lxml import etree as ET
from report.CCP4ReportParser import Report
from simbad.util import SIMBAD_DIRNAME, SIMBAD_PYRVAPI_SHAREDIR
class SIMBAD_report(Report):
TASKNAME = 'SIMBAD'
RUNNING = True
def __init__(self, xmlnode=None, jobInfo={}, **kw):
Report.__init__(self, xmlnode=xmlnode, jobInfo=jobInfo, **kw)
repdir = os.path.join(jobInfo.get('fileroot', None), SIMBAD_DIRNAME, SIMBAD_PYRVAPI_SHAREDIR)
self.get_tables_as_elements(repdir)
#print("JMHT WRITING REPORT %s" % self.e1_dict)
self.addDiv(style='clear:both;')
for e1 in xmlnode:
# Process each tab separately
if e1.tag == 'tab':
self.report_section(e1, self)
return
def get_tables_as_elements(self, repdir):
"""Get tables as xmltree elements by parsing task.tsk file and .table files"""
try:
t1_list = list()
with open(os.path.join(repdir, 'task.tsk')) as istream:
#print("JMHT CHECKING task.tsk %s\n" % os.path.join(repdir, 'task.tsk'))
for s1 in re.findall('<table .+?</table>', istream.read(), re.S):
t1 = ET.fromstring(s1)
if len(t1): t1_list.append(t1)
for f1 in os.listdir(repdir):
if f1.endswith('.table'):
t1 = ET.parse(os.path.join(repdir, f1)).getroot()
if len(t1): t1_list.append(t1)
self.e1_dict = dict()
for t1 in t1_list:
tid = t1.get('id', None)
if tid and tid.endswith('-grid'):
tags = [t2.tag for t2 in t1]
if tags == ['thead', 'tbody']:
assert len(t1) == 2
e1 = t1
else:
tset = set(tags)
tag = tset.pop()
assert not tset and tag == 'tr'
e1 = ET.Element('table')
e1.append(t1)
e1.attrib.update(t1.attrib)
t1.attrib.clear()
t1.tag = 'tbody'
for e2 in e1.iter():
e2.attrib.pop('class', None)
e1.find('tbody').set('class', 'fancy')
self.e1_dict[tid[:-5]] = e1
if len(self.e1_dict.keys()): return True
return False
except Exception as e:
print "EXCEPTION: {0}".format(e)
return
def report_section(self, e1, r0, sort=False):
"""
"""
elems = list()
title = 'Untitled'
state = False
cou = 0
#print("Processing tag %s id %s\n%s" % (e1.tag, e1.get('id'),ET.tostring(e1)))
for e2 in e1:
row = e2.get('row', '_')
col = e2.get('col', '_')
if row.isdigit() : row = int(row)
if col.isdigit() : col = int(col)
if e2.get('id') or e2.tag == 'text':
elems.append([row, col, e2])
if e2.tag == 'table':
cou += 1
elif e2.tag == 'name':
title = e2.text.strip()
elif e2.tag == 'open':
state = e2.text.strip() == 'true'
if elems:
# strip out anything we can't deal with here
if any([x in title.lower() for x in ['downloads', 'log files', 'graph']]): return
#print "GOT ELEMS ",[g[2].get('id') for g in elems],title
r1 = r0.addFold(label=title, initiallyOpen=state)
#for row, col, e2 in sorted(grid):
if sorted: elems = sorted(elems)
for _,_,e2 in elems:
id2 = e2.get('id')
#print "PROCESSING ",id2, e2.tag
if e2.tag == 'section':
self.report_section(e2, r1)
elif e2.tag == 'table':
if id2 and id2 in self.e1_dict:
if id2 == 'mrbump_table':
r1.append("The table below details the Molecular Replacement results from MrBUMP")
if cou > 1:
r1.append(e2.findtext('legend').strip())
r1.append(ET.tostring(self.e1_dict[id2]))
elif e2.tag == 'text':
for t in e2.itertext(): r1.append(t)
else:
pass
if __name__ == '__main__':
# Run with no arguments in the CCP4 job directory (the one that holds the SIMBAD directory)
def test2():
import argparse
parser = argparse.ArgumentParser(
description='test of morda report generator',
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
parser.add_argument(
'-w', '--wrkdir',
help='''a directory, containing the subdirectory
report/ generated by rvapi''',
default='.',
metavar='<dir>'
)
parser.add_argument(
'-i', '--xml',
help='input xml-file generated previously by rvapi',
default='program.xml',
metavar='<file>'
)
parser.add_argument(
'-o', '--html',
help='output html-file, a report file for i2',
default='areport.html',
metavar='<file>'
)
opt = parser.parse_args()
xmlnode = ET.parse(opt.xml).getroot()
jobInfo = dict(fileroot=os.path.abspath(opt.wrkdir))
report = SIMBAD_report(xmlnode, jobInfo)
if len(report.errReport):
print 'ERROR REPORT'
print report.errReport.report()
htmlbase = 'file://' + \
os.environ['CCP4'] + '/share/ccp4i2/docs/report_files'
htmlstr = ET.tostring(report.as_etree(htmlBase=htmlbase))
with open(opt.html, 'w') as ostream:
print >> ostream, htmlstr.replace('><', '>\n<')
test2()
# #from CCP4ReportParser import Report
# # class AMPLE_report(Report):
# # # Specify which gui task and/or pluginscript this applies to
# # TASKNAME = 'AMPLE'
# # RUNNING = False
# # def __init__(self,xmlnode=None,jobInfo={},jobStatus=None,**kw):
# # Report. __init__(self,xmlnode=xmlnode,jobInfo=jobInfo, jobStatus=jobStatus, **kw)
# # clearingDiv = self.addDiv(style="clear:both;")
# # self.addDefaultReport(self)
# #
# # def addDefaultReport(self, parent=None):
# # if parent is None: parent=self
# # if len(self.xmlnode.xpath("LogText")) > 0:
# # newFold = parent.addFold(label="Log text", initiallyOpen=True)
# # newFold.addPre(text = self.xmlnode.xpath("LogText")[0].text)
| bsd-3-clause | 6,299,607,690,489,702,000 | 39.815385 | 110 | 0.517653 | false |
Isendir/brython | www/src/Lib/xml/sax/expatreader.py | 870 | 14659 | """
SAX driver for the pyexpat C module. This driver works with
pyexpat.__version__ == '2.22'.
"""
version = "0.20"
from xml.sax._exceptions import *
from xml.sax.handler import feature_validation, feature_namespaces
from xml.sax.handler import feature_namespace_prefixes
from xml.sax.handler import feature_external_ges, feature_external_pes
from xml.sax.handler import feature_string_interning
from xml.sax.handler import property_xml_string, property_interning_dict
# xml.parsers.expat does not raise ImportError in Jython
import sys
if sys.platform[:4] == "java":
raise SAXReaderNotAvailable("expat not available in Java", None)
del sys
try:
from xml.parsers import expat
except ImportError:
raise SAXReaderNotAvailable("expat not supported", None)
else:
if not hasattr(expat, "ParserCreate"):
raise SAXReaderNotAvailable("expat not supported", None)
from xml.sax import xmlreader, saxutils, handler
AttributesImpl = xmlreader.AttributesImpl
AttributesNSImpl = xmlreader.AttributesNSImpl
# If we're using a sufficiently recent version of Python, we can use
# weak references to avoid cycles between the parser and content
# handler, otherwise we'll just have to pretend.
try:
import _weakref
except ImportError:
def _mkproxy(o):
return o
else:
import weakref
_mkproxy = weakref.proxy
del weakref, _weakref
# --- ExpatLocator
class ExpatLocator(xmlreader.Locator):
"""Locator for use with the ExpatParser class.
This uses a weak reference to the parser object to avoid creating
a circular reference between the parser and the content handler.
"""
def __init__(self, parser):
self._ref = _mkproxy(parser)
def getColumnNumber(self):
parser = self._ref
if parser._parser is None:
return None
return parser._parser.ErrorColumnNumber
def getLineNumber(self):
parser = self._ref
if parser._parser is None:
return 1
return parser._parser.ErrorLineNumber
def getPublicId(self):
parser = self._ref
if parser is None:
return None
return parser._source.getPublicId()
def getSystemId(self):
parser = self._ref
if parser is None:
return None
return parser._source.getSystemId()
# --- ExpatParser
class ExpatParser(xmlreader.IncrementalParser, xmlreader.Locator):
"""SAX driver for the pyexpat C module."""
def __init__(self, namespaceHandling=0, bufsize=2**16-20):
xmlreader.IncrementalParser.__init__(self, bufsize)
self._source = xmlreader.InputSource()
self._parser = None
self._namespaces = namespaceHandling
self._lex_handler_prop = None
self._parsing = 0
self._entity_stack = []
self._external_ges = 1
self._interning = None
# XMLReader methods
def parse(self, source):
"Parse an XML document from a URL or an InputSource."
source = saxutils.prepare_input_source(source)
self._source = source
self.reset()
self._cont_handler.setDocumentLocator(ExpatLocator(self))
xmlreader.IncrementalParser.parse(self, source)
def prepareParser(self, source):
if source.getSystemId() is not None:
self._parser.SetBase(source.getSystemId())
# Redefined setContentHandler to allow changing handlers during parsing
def setContentHandler(self, handler):
xmlreader.IncrementalParser.setContentHandler(self, handler)
if self._parsing:
self._reset_cont_handler()
def getFeature(self, name):
if name == feature_namespaces:
return self._namespaces
elif name == feature_string_interning:
return self._interning is not None
elif name in (feature_validation, feature_external_pes,
feature_namespace_prefixes):
return 0
elif name == feature_external_ges:
return self._external_ges
raise SAXNotRecognizedException("Feature '%s' not recognized" % name)
def setFeature(self, name, state):
if self._parsing:
raise SAXNotSupportedException("Cannot set features while parsing")
if name == feature_namespaces:
self._namespaces = state
elif name == feature_external_ges:
self._external_ges = state
elif name == feature_string_interning:
if state:
if self._interning is None:
self._interning = {}
else:
self._interning = None
elif name == feature_validation:
if state:
raise SAXNotSupportedException(
"expat does not support validation")
elif name == feature_external_pes:
if state:
raise SAXNotSupportedException(
"expat does not read external parameter entities")
elif name == feature_namespace_prefixes:
if state:
raise SAXNotSupportedException(
"expat does not report namespace prefixes")
else:
raise SAXNotRecognizedException(
"Feature '%s' not recognized" % name)
def getProperty(self, name):
if name == handler.property_lexical_handler:
return self._lex_handler_prop
elif name == property_interning_dict:
return self._interning
elif name == property_xml_string:
if self._parser:
if hasattr(self._parser, "GetInputContext"):
return self._parser.GetInputContext()
else:
raise SAXNotRecognizedException(
"This version of expat does not support getting"
" the XML string")
else:
raise SAXNotSupportedException(
"XML string cannot be returned when not parsing")
raise SAXNotRecognizedException("Property '%s' not recognized" % name)
def setProperty(self, name, value):
if name == handler.property_lexical_handler:
self._lex_handler_prop = value
if self._parsing:
self._reset_lex_handler_prop()
elif name == property_interning_dict:
self._interning = value
elif name == property_xml_string:
raise SAXNotSupportedException("Property '%s' cannot be set" %
name)
else:
raise SAXNotRecognizedException("Property '%s' not recognized" %
name)
# IncrementalParser methods
def feed(self, data, isFinal = 0):
if not self._parsing:
self.reset()
self._parsing = 1
self._cont_handler.startDocument()
try:
# The isFinal parameter is internal to the expat reader.
# If it is set to true, expat will check validity of the entire
# document. When feeding chunks, they are not normally final -
# except when invoked from close.
self._parser.Parse(data, isFinal)
except expat.error as e:
exc = SAXParseException(expat.ErrorString(e.code), e, self)
# FIXME: when to invoke error()?
self._err_handler.fatalError(exc)
def close(self):
if self._entity_stack:
# If we are completing an external entity, do nothing here
return
self.feed("", isFinal = 1)
self._cont_handler.endDocument()
self._parsing = 0
# break cycle created by expat handlers pointing to our methods
self._parser = None
bs = self._source.getByteStream()
if bs is not None:
bs.close()
def _reset_cont_handler(self):
self._parser.ProcessingInstructionHandler = \
self._cont_handler.processingInstruction
self._parser.CharacterDataHandler = self._cont_handler.characters
def _reset_lex_handler_prop(self):
lex = self._lex_handler_prop
parser = self._parser
if lex is None:
parser.CommentHandler = None
parser.StartCdataSectionHandler = None
parser.EndCdataSectionHandler = None
parser.StartDoctypeDeclHandler = None
parser.EndDoctypeDeclHandler = None
else:
parser.CommentHandler = lex.comment
parser.StartCdataSectionHandler = lex.startCDATA
parser.EndCdataSectionHandler = lex.endCDATA
parser.StartDoctypeDeclHandler = self.start_doctype_decl
parser.EndDoctypeDeclHandler = lex.endDTD
def reset(self):
if self._namespaces:
self._parser = expat.ParserCreate(self._source.getEncoding(), " ",
intern=self._interning)
self._parser.namespace_prefixes = 1
self._parser.StartElementHandler = self.start_element_ns
self._parser.EndElementHandler = self.end_element_ns
else:
self._parser = expat.ParserCreate(self._source.getEncoding(),
intern = self._interning)
self._parser.StartElementHandler = self.start_element
self._parser.EndElementHandler = self.end_element
self._reset_cont_handler()
self._parser.UnparsedEntityDeclHandler = self.unparsed_entity_decl
self._parser.NotationDeclHandler = self.notation_decl
self._parser.StartNamespaceDeclHandler = self.start_namespace_decl
self._parser.EndNamespaceDeclHandler = self.end_namespace_decl
self._decl_handler_prop = None
if self._lex_handler_prop:
self._reset_lex_handler_prop()
# self._parser.DefaultHandler =
# self._parser.DefaultHandlerExpand =
# self._parser.NotStandaloneHandler =
self._parser.ExternalEntityRefHandler = self.external_entity_ref
try:
self._parser.SkippedEntityHandler = self.skipped_entity_handler
except AttributeError:
# This pyexpat does not support SkippedEntity
pass
self._parser.SetParamEntityParsing(
expat.XML_PARAM_ENTITY_PARSING_UNLESS_STANDALONE)
self._parsing = 0
self._entity_stack = []
# Locator methods
def getColumnNumber(self):
if self._parser is None:
return None
return self._parser.ErrorColumnNumber
def getLineNumber(self):
if self._parser is None:
return 1
return self._parser.ErrorLineNumber
def getPublicId(self):
return self._source.getPublicId()
def getSystemId(self):
return self._source.getSystemId()
# event handlers
def start_element(self, name, attrs):
self._cont_handler.startElement(name, AttributesImpl(attrs))
def end_element(self, name):
self._cont_handler.endElement(name)
def start_element_ns(self, name, attrs):
pair = name.split()
if len(pair) == 1:
# no namespace
pair = (None, name)
elif len(pair) == 3:
pair = pair[0], pair[1]
else:
# default namespace
pair = tuple(pair)
newattrs = {}
qnames = {}
for (aname, value) in attrs.items():
parts = aname.split()
length = len(parts)
if length == 1:
# no namespace
qname = aname
apair = (None, aname)
elif length == 3:
qname = "%s:%s" % (parts[2], parts[1])
apair = parts[0], parts[1]
else:
# default namespace
qname = parts[1]
apair = tuple(parts)
newattrs[apair] = value
qnames[apair] = qname
self._cont_handler.startElementNS(pair, None,
AttributesNSImpl(newattrs, qnames))
def end_element_ns(self, name):
pair = name.split()
if len(pair) == 1:
pair = (None, name)
elif len(pair) == 3:
pair = pair[0], pair[1]
else:
pair = tuple(pair)
self._cont_handler.endElementNS(pair, None)
# this is not used (call directly to ContentHandler)
def processing_instruction(self, target, data):
self._cont_handler.processingInstruction(target, data)
# this is not used (call directly to ContentHandler)
def character_data(self, data):
self._cont_handler.characters(data)
def start_namespace_decl(self, prefix, uri):
self._cont_handler.startPrefixMapping(prefix, uri)
def end_namespace_decl(self, prefix):
self._cont_handler.endPrefixMapping(prefix)
def start_doctype_decl(self, name, sysid, pubid, has_internal_subset):
self._lex_handler_prop.startDTD(name, pubid, sysid)
def unparsed_entity_decl(self, name, base, sysid, pubid, notation_name):
self._dtd_handler.unparsedEntityDecl(name, pubid, sysid, notation_name)
def notation_decl(self, name, base, sysid, pubid):
self._dtd_handler.notationDecl(name, pubid, sysid)
def external_entity_ref(self, context, base, sysid, pubid):
if not self._external_ges:
return 1
source = self._ent_handler.resolveEntity(pubid, sysid)
source = saxutils.prepare_input_source(source,
self._source.getSystemId() or
"")
self._entity_stack.append((self._parser, self._source))
self._parser = self._parser.ExternalEntityParserCreate(context)
self._source = source
try:
xmlreader.IncrementalParser.parse(self, source)
except:
return 0 # FIXME: save error info here?
(self._parser, self._source) = self._entity_stack[-1]
del self._entity_stack[-1]
return 1
def skipped_entity_handler(self, name, is_pe):
if is_pe:
# The SAX spec requires to report skipped PEs with a '%'
name = '%'+name
self._cont_handler.skippedEntity(name)
# ---
def create_parser(*args, **kwargs):
return ExpatParser(*args, **kwargs)
# ---
if __name__ == "__main__":
import xml.sax.saxutils
p = create_parser()
p.setContentHandler(xml.sax.saxutils.XMLGenerator())
p.setErrorHandler(xml.sax.ErrorHandler())
p.parse("http://www.ibiblio.org/xml/examples/shakespeare/hamlet.xml")
| bsd-3-clause | 8,051,308,389,293,693,000 | 34.153477 | 79 | 0.601542 | false |
ericlink/adms-server | playframework-dist/play-1.1/python/Lib/compiler/syntax.py | 25 | 1490 | """Check for errs in the AST.
The Python parser does not catch all syntax errors. Others, like
assignments with invalid targets, are caught in the code generation
phase.
The compiler package catches some errors in the transformer module.
But it seems clearer to write checkers that use the AST to detect
errors.
"""
from compiler import ast, walk
def check(tree, multi=None):
v = SyntaxErrorChecker(multi)
walk(tree, v)
return v.errors
class SyntaxErrorChecker:
"""A visitor to find syntax errors in the AST."""
def __init__(self, multi=None):
"""Create new visitor object.
If optional argument multi is not None, then print messages
for each error rather than raising a SyntaxError for the
first.
"""
self.multi = multi
self.errors = 0
def error(self, node, msg):
self.errors = self.errors + 1
if self.multi is not None:
print "%s:%s: %s" % (node.filename, node.lineno, msg)
else:
raise SyntaxError, "%s (%s:%s)" % (msg, node.filename, node.lineno)
def visitAssign(self, node):
# the transformer module handles many of these
pass
## for target in node.nodes:
## if isinstance(target, ast.AssList):
## if target.lineno is None:
## target.lineno = node.lineno
## self.error(target, "can't assign to list comprehension")
| mit | -8,033,178,810,418,655,000 | 30.391304 | 79 | 0.608054 | false |
ESOedX/edx-platform | openedx/core/djangoapps/enrollments/errors.py | 75 | 1335 | """All Error Types pertaining to Enrollment."""
class CourseEnrollmentError(Exception):
"""Generic Course Enrollment Error.
Describes any error that may occur when reading or updating enrollment information for a user or a course.
"""
def __init__(self, msg, data=None):
super(CourseEnrollmentError, self).__init__(msg)
# Corresponding information to help resolve the error.
self.data = data
class UserNotFoundError(CourseEnrollmentError):
pass
class CourseEnrollmentClosedError(CourseEnrollmentError):
pass
class CourseEnrollmentFullError(CourseEnrollmentError):
pass
class CourseEnrollmentExistsError(CourseEnrollmentError):
enrollment = None
def __init__(self, message, enrollment):
super(CourseEnrollmentExistsError, self).__init__(message)
self.enrollment = enrollment
class CourseModeNotFoundError(CourseEnrollmentError):
"""The requested course mode could not be found."""
pass
class EnrollmentNotFoundError(CourseEnrollmentError):
"""The requested enrollment could not be found."""
pass
class EnrollmentApiLoadError(CourseEnrollmentError):
"""The data API could not be loaded."""
pass
class InvalidEnrollmentAttribute(CourseEnrollmentError):
"""Enrollment Attributes could not be validated"""
pass
| agpl-3.0 | -7,400,700,213,435,550,000 | 24.188679 | 110 | 0.734082 | false |
DmitryYurov/BornAgain | Tests/Functional/Python/PyCore/transform_cube.py | 2 | 7597 | """
Test of rotation/positioning of simple cubic particle. Original particle is compared with the one obtained
"""
from __future__ import print_function
import os, sys, unittest
import utils
from libBornAgainCore import *
class RotationsCubeTest(unittest.TestCase):
"""
Test of rotations and translations of simple cube in three layers system
"""
def get_sample(self, formfactor, rot = None, pos = None, layout_rot = None, layout_pos = None, add_to="air"):
mAmbience = HomogeneousMaterial("Air", 0.0, 0.0)
mParticle = HomogeneousMaterial("Particle", 6e-4, 2e-8)
mMiddle= HomogeneousMaterial("MidleLayer", 5e-5, 2e-8)
mSubstrate = HomogeneousMaterial("Substrate", 6e-6, 2e-8)
particle = Particle(mParticle, formfactor)
if pos:
particle.setPosition(pos)
if rot:
particle.setRotation(rot)
layout = ParticleLayout()
if layout_rot and layout_pos:
layout.addParticle(particle, 1.0, layout_pos, layout_rot)
elif layout_rot and not layout_pos:
layout.addParticle(particle, 1.0, kvector_t(0, 0, 0), layout_rot)
elif not layout_rot and layout_pos:
layout.addParticle(particle, 1.0, layout_pos)
else:
layout.addParticle(particle)
air_layer = Layer(mAmbience)
middle_layer = Layer(mSubstrate, 50.0)
substrate = Layer(mSubstrate)
if add_to == "air":
air_layer.addLayout(layout)
else:
middle_layer.addLayout(layout)
multi_layer = MultiLayer()
multi_layer.addLayer(air_layer)
multi_layer.addLayer(middle_layer)
multi_layer.addLayer(substrate)
return multi_layer
def get_result(self, data, add_to="air"):
ff = data[0]
rot = data[1]
pos = data[2]
layout_rot = data[3]
layout_pos = data[4]
sample = self.get_sample(ff, rot, pos, layout_rot, layout_pos, add_to)
# simulation = self.get_simulation(sample)
simulation = utils.get_simulation_MiniGISAS(sample)
simulation.runSimulation()
return simulation.result()
def get_difference(self, reference_data, test_data, add_to="air"):
intensity = self.get_result(test_data, add_to)
return RelativeDifference(reference_data, intensity)
def testRotationZ(self):
"""
Cube is Z-rotated either through setRotation method or through particle layout. The result is compared
with unrotated cube.
"""
box = FormFactorBox(10, 10, 10)
data_to_test = [
# ff rot pos layout_rot layout_pos
(box, None, None, None, None), # reference
(box, RotationZ(90.*degree), None, None, None), # rotating particle
(box, RotationZ(-90.*degree), None, None, None),
(box, RotationZ(180.*degree), None, None, None),
(box, None, None, RotationZ(90.*degree), None), # rotating through layout
(box, RotationZ(45.*degree), None, RotationZ(45.*degree), None), # cumulative rotation
]
reference_data = self.get_result(data_to_test[0])
isSuccess = True
for i in range(1, len(data_to_test)):
diff = self.get_difference(reference_data, data_to_test[i])
print("{0} #{1} diff {2:.2e}".format(self.testRotationZ.__name__, i, diff))
if(diff > 1e-10) : isSuccess=False
self.assertTrue(isSuccess)
def testRotationY(self):
"""
Cube is Y-rotated either through setRotation method or through particle layout. Additional translation is
applied if necessary. The result is compared with unrotated cube.
"""
box = FormFactorBox(10, 10, 10)
data_to_test = [
# ff rot pos layout_rot layout_pos
(box, None, None, None, None), # reference
(box, RotationY(90.*degree), kvector_t(0,0,5.0), None, None), # rotating and translating
(box, None, None, RotationY(90.*degree), kvector_t(0,0,5.0)), # rotating and translating
(box, RotationY(90.*degree), None, None, kvector_t(0,0,5.0)), # rotating and translating
(box, RotationY(45.*degree), kvector_t(0,0,0.0), RotationY(45.*degree), kvector_t(0,0,5.0)), # rotating and translating
]
reference_data = self.get_result(data_to_test[0])
isSuccess = True
for i in range(1, len(data_to_test)):
diff = self.get_difference(reference_data, data_to_test[i])
print("{0} #{1} diff {2:.2e}".format(self.testRotationY.__name__, i, diff))
if(diff > 1e-10) : isSuccess=False
self.assertTrue(isSuccess)
def testRotationX(self):
"""
Cube is Z-rotated either through setRotation method or through particle layout. Additional translation is
applied if necessary. The result is compared with unrotated cube.
"""
box = FormFactorBox(10, 10, 10)
data_to_test = [
# ff rot pos layout_rot layout_pos
(box, None, None, None, None), # reference
(box, RotationX(90.*degree), kvector_t(0,0,5.0), None, None), # rotating and translating
(box, None, None, RotationX(90.*degree), kvector_t(0,0,5.0)), # rotating and translating
(box, RotationX(90.*degree), None, None, kvector_t(0,0,5.0)), # rotating and translating
(box, RotationX(45.*degree), kvector_t(0,0,0.0), RotationX(45.*degree), kvector_t(0,0,5.0)), # rotating and translating
]
reference_data = self.get_result(data_to_test[0])
isSuccess = True
for i in range(1, len(data_to_test)):
diff = self.get_difference(reference_data, data_to_test[i])
print("{0} #{1} diff {2:.2e}".format(self.testRotationX.__name__, i, diff))
if(diff > 1e-10) : isSuccess=False
self.assertTrue(isSuccess)
def testRotationsInMiddleLayer(self):
"""
"""
box = FormFactorBox(10, 10, 10)
data_to_test = [
# ff rot pos layout_rot layout_pos
(box, None, kvector_t(0,0,-25.0), None, None), # reference
(box, RotationX(90.*degree), kvector_t(0,0,-20.0), None, None), # rotating and translating
]
reference_data = self.get_result(data_to_test[0], "add_to_middle")
isSuccess = True
for i in range(1, len(data_to_test)):
diff = self.get_difference(reference_data, data_to_test[i], "add_to_middle")
print("{0} #{1} diff {2:.2e}".format(self.testRotationX.__name__, i, diff))
if(diff > 1e-10) : isSuccess=False
self.assertTrue(isSuccess)
if __name__ == '__main__':
unittest.main()
| gpl-3.0 | -9,185,068,778,408,198,000 | 43.426901 | 137 | 0.533763 | false |
markYoungH/chromium.src | components/crash/tools/dmp2minidump.py | 129 | 1353 | #!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A tool to extract minidumps from dmp crash dumps."""
import os
import sys
from cgi import parse_multipart
def ProcessDump(dump_file, minidump_file):
"""Extracts the part of the dump file that minidump_stackwalk can read.
The dump files generated by the breakpad integration multi-part form data
that include the minidump as file attachment.
Args:
dump_file: the dump file that needs to be processed.
minidump_file: the file to write the minidump to.
"""
try:
dump = open(dump_file, 'rb')
boundary = dump.readline().strip()[2:]
data = parse_multipart(dump, {'boundary': boundary})
except:
print 'Failed to read dmp file %s' % dump_file
return
if not 'upload_file_minidump' in data:
print 'Could not find minidump file in dump.'
return
f = open(minidump_file, 'w')
f.write("\r\n".join(data['upload_file_minidump']))
f.close()
def main():
if len(sys.argv) != 3:
print 'Usage: %s [dmp file] [minidump]' % sys.argv[0]
print ''
print 'Extracts the minidump stored in the crash dump file'
return 1
ProcessDump(sys.argv[1], sys.argv[2])
if '__main__' == __name__:
sys.exit(main())
| bsd-3-clause | -4,361,319,377,686,584,300 | 25.529412 | 75 | 0.678492 | false |
hdinsight/hue | desktop/core/ext-py/boto-2.38.0/boto/beanstalk/wrapper.py | 153 | 1078 | """Wraps layer1 api methods and converts layer1 dict responses to objects."""
from boto.beanstalk.layer1 import Layer1
import boto.beanstalk.response
from boto.exception import BotoServerError
import boto.beanstalk.exception as exception
def beanstalk_wrapper(func, name):
def _wrapped_low_level_api(*args, **kwargs):
try:
response = func(*args, **kwargs)
except BotoServerError as e:
raise exception.simple(e)
# Turn 'this_is_a_function_name' into 'ThisIsAFunctionNameResponse'.
cls_name = ''.join([part.capitalize() for part in name.split('_')]) + 'Response'
cls = getattr(boto.beanstalk.response, cls_name)
return cls(response)
return _wrapped_low_level_api
class Layer1Wrapper(object):
def __init__(self, *args, **kwargs):
self.api = Layer1(*args, **kwargs)
def __getattr__(self, name):
try:
return beanstalk_wrapper(getattr(self.api, name), name)
except AttributeError:
raise AttributeError("%s has no attribute %r" % (self, name))
| apache-2.0 | 4,463,652,206,866,657,300 | 36.172414 | 88 | 0.655844 | false |
kiran/bart-sign | venv/lib/python2.7/site-packages/untangle.py | 2 | 4047 | #!/usr/bin/env python
"""
untangle
Converts xml to python objects.
The only method you need to call is parse()
Partially inspired by xml2obj
(http://code.activestate.com/recipes/149368-xml2obj/)
Author: Christian Stefanescu (http://0chris.com)
License: MIT License - http://www.opensource.org/licenses/mit-license.php
"""
import os
from xml.sax import make_parser, handler
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
__version__ = '1.1.0'
class Element():
"""
Representation of an XML element.
"""
def __init__(self, name, attributes):
self._name = name
self._attributes = attributes
self.children = []
self.is_root = False
self.cdata = ''
def add_child(self, element):
self.children.append(element)
def add_cdata(self, cdata):
self.cdata = self.cdata + cdata
def get_attribute(self, key):
return self._attributes.get(key)
def get_elements(self, name=None):
if name:
return [e for e in self.children if e._name == name]
else:
return self.children
def __getitem__(self, key):
return self.get_attribute(key)
def __getattr__(self, key):
matching_children = [x for x in self.children if x._name == key]
if matching_children:
if len(matching_children) == 1:
self.__dict__[key] = matching_children[0]
return matching_children[0]
else:
self.__dict__[key] = matching_children
return matching_children
else:
raise IndexError('Unknown key <%s>' % key)
def __iter__(self):
yield self
def __str__(self):
return (
"Element <%s> with attributes %s and children %s" %
(self._name, self._attributes, self.children)
)
def __repr__(self):
return (
"Element(name = %s, attributes = %s, cdata = %s)" %
(self._name, self._attributes, self.cdata)
)
def __nonzero__(self):
return self.is_root or self._name is not None
def __eq__(self, val):
return self.cdata == val
def __dir__(self):
children_names = [x._name for x in self.children]
return children_names
class Handler(handler.ContentHandler):
"""
SAX handler which creates the Python object structure out of ``Element``s
"""
def __init__(self):
self.root = Element(None, None)
self.root.is_root = True
self.elements = []
def startElement(self, name, attributes):
name = name.replace('-', '_')
name = name.replace('.', '_')
name = name.replace(':', '_')
attrs = dict()
for k, v in attributes.items():
attrs[k] = v
element = Element(name, attrs)
if len(self.elements) > 0:
self.elements[-1].add_child(element)
else:
self.root.add_child(element)
self.elements.append(element)
def endElement(self, name):
self.elements.pop()
def characters(self, cdata):
self.elements[-1].add_cdata(cdata)
def parse(filename):
"""
Interprets the given string as a filename, URL or XML data string,
parses it and returns a Python object which represents the given
document.
Raises ``ValueError`` if the argument is None / empty string.
Raises ``xml.sax.SAXParseException`` if something goes wrong
during parsing.s
"""
if filename is None or filename.strip() == '':
raise ValueError('parse() takes a filename, URL or XML string')
parser = make_parser()
sax_handler = Handler()
parser.setContentHandler(sax_handler)
if os.path.exists(filename) or is_url(filename):
parser.parse(filename)
else:
parser.parse(StringIO(filename))
return sax_handler.root
def is_url(string):
return string.startswith('http://') or string.startswith('https://')
# vim: set expandtab ts=4 sw=4:
| mit | 4,501,389,996,569,703,400 | 25.801325 | 77 | 0.590067 | false |
auferack08/edx-platform | cms/djangoapps/contentstore/tests/test_permissions.py | 28 | 5759 | """
Test CRUD for authorization.
"""
import copy
from django.contrib.auth.models import User
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from contentstore.tests.utils import AjaxEnabledTestClient
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from contentstore.utils import reverse_url, reverse_course_url
from student.roles import CourseInstructorRole, CourseStaffRole, OrgStaffRole, OrgInstructorRole
from contentstore.views.access import has_course_access
from student import auth
class TestCourseAccess(ModuleStoreTestCase):
"""
Course-based access (as opposed to access of a non-course xblock)
"""
def setUp(self):
"""
Create a staff user and log them in (creating the client).
Create a pool of users w/o granting them any permissions
"""
user_password = super(TestCourseAccess, self).setUp()
self.client = AjaxEnabledTestClient()
self.client.login(username=self.user.username, password=user_password)
# create a course via the view handler which has a different strategy for permissions than the factory
self.course_key = SlashSeparatedCourseKey('myu', 'mydept.mycourse', 'myrun')
course_url = reverse_url('course_handler')
self.client.ajax_post(course_url,
{
'org': self.course_key.org,
'number': self.course_key.course,
'display_name': 'My favorite course',
'run': self.course_key.run,
}
)
self.users = self._create_users()
def _create_users(self):
"""
Create 8 users and return them
"""
users = []
for i in range(8):
username = "user{}".format(i)
email = "test+user{}@edx.org".format(i)
user = User.objects.create_user(username, email, 'foo')
user.is_active = True
user.save()
users.append(user)
return users
def tearDown(self):
"""
Reverse the setup
"""
self.client.logout()
ModuleStoreTestCase.tearDown(self)
def test_get_all_users(self):
"""
Test getting all authors for a course where their permissions run the gamut of allowed group
types.
"""
# first check the course creator.has explicit access (don't use has_access as is_staff
# will trump the actual test)
self.assertTrue(
CourseInstructorRole(self.course_key).has_user(self.user),
"Didn't add creator as instructor."
)
users = copy.copy(self.users)
# doesn't use role.users_with_role b/c it's verifying the roles.py behavior
user_by_role = {}
# add the misc users to the course in different groups
for role in [CourseInstructorRole, CourseStaffRole, OrgStaffRole, OrgInstructorRole]:
user_by_role[role] = []
# Org-based roles are created via org name, rather than course_key
if (role is OrgStaffRole) or (role is OrgInstructorRole):
group = role(self.course_key.org)
else:
group = role(self.course_key)
# NOTE: this loop breaks the roles.py abstraction by purposely assigning
# users to one of each possible groupname in order to test that has_course_access
# and remove_user work
user = users.pop()
group.add_users(user)
user_by_role[role].append(user)
self.assertTrue(has_course_access(user, self.course_key), "{} does not have access".format(user))
course_team_url = reverse_course_url('course_team_handler', self.course_key)
response = self.client.get_html(course_team_url)
for role in [CourseInstructorRole, CourseStaffRole]: # Global and org-based roles don't appear on this page
for user in user_by_role[role]:
self.assertContains(response, user.email)
# test copying course permissions
copy_course_key = SlashSeparatedCourseKey('copyu', 'copydept.mycourse', 'myrun')
for role in [CourseInstructorRole, CourseStaffRole, OrgStaffRole, OrgInstructorRole]:
if (role is OrgStaffRole) or (role is OrgInstructorRole):
auth.add_users(
self.user,
role(copy_course_key.org),
*role(self.course_key.org).users_with_role()
)
else:
auth.add_users(
self.user,
role(copy_course_key),
*role(self.course_key).users_with_role()
)
# verify access in copy course and verify that removal from source course w/ the various
# groupnames works
for role in [CourseInstructorRole, CourseStaffRole, OrgStaffRole, OrgInstructorRole]:
for user in user_by_role[role]:
# forcefully decache the groups: premise is that any real request will not have
# multiple objects repr the same user but this test somehow uses different instance
# in above add_users call
if hasattr(user, '_roles'):
del user._roles
self.assertTrue(has_course_access(user, copy_course_key), "{} no copy access".format(user))
if (role is OrgStaffRole) or (role is OrgInstructorRole):
auth.remove_users(self.user, role(self.course_key.org), user)
else:
auth.remove_users(self.user, role(self.course_key), user)
self.assertFalse(has_course_access(user, self.course_key), "{} remove didn't work".format(user))
| agpl-3.0 | -5,100,271,570,129,240,000 | 42.300752 | 116 | 0.611217 | false |
X-DataInitiative/tick | tick/survival/simu_sccs.py | 2 | 26354 | # License: BSD 3 clause
from operator import itemgetter
import numpy as np
import scipy.sparse as sps
from scipy.sparse import csr_matrix
from tick.base.simulation import Simu
from tick.hawkes import SimuHawkesExpKernels, SimuHawkesMulti
from tick.preprocessing import LongitudinalFeaturesLagger
from itertools import permutations
from copy import deepcopy
from scipy.stats import beta, norm
class SimuSCCS(Simu):
"""Simulation of a Self Control Case Series (SCCS) model. This simulator can
produce exposure (features), outcomes (labels) and censoring data.
The features matrices are a `n_cases` list of numpy arrays (dense case) or
csr_matrices (sparse case) of shape `(n_intervals, n_features)` containing
exposures to each feature.
Exposure can take two forms:
- short repeated exposures (`single_exposure`): in that case, each column of the
numpy arrays or csr matrices can contain multiple ones, each one representing an
exposure for a particular time bucket.
- infinite unique exposures (`multiple_exposure`): in that case, each column of the
numpy arrays or csr matrices can only contain a single one, corresponding to the
starting date of the exposure.
Parameters
----------
n_cases : `int`
Number of cases to generate. A case is a sample who experience at
least one adverse event.
n_intervals : `int`
Number of time intervals used to generate features and outcomes.
n_features : `int`
Number of features to simulate for each case.
n_lags : `numpy.ndarray`, shape=(n_features,), dtype="uint64"
Number of lags per feature. The model will regress labels on the
last observed values of the features over their corresponding
`n_lags` time intervals. `n_lags` values must be between 0 and
`n_intervals` - 1.
exposure_type : {'single_exposure', 'multiple_exposure'}, default='single_exposure'
Either 'single_exposure' for infinite unique exposures or 'multiple_exposure' for
short repeated exposures.
distribution : {'multinomial', 'poisson'}, default='multinomial'
Distribution used to generate the outcomes. In the 'multinomial'
case, the Poisson process used to generate the events is conditioned
by total the number event per sample, which is set to be equal to
one. In that case, the simulation matches exactly the SCCS model
hypotheses. In the 'poisson' case, the outcomes are generated from a
Poisson process, which can result in more than one outcome tick per
sample. In this case, the first event is kept, and the other are
discarded.
sparse : `boolean`, default=True
Generate sparse or dense features.
censoring_prob : `float`, default=0.
Probability that a sample is censored. Should be in [0, 1]. If 0, no
censoring is applied. When > 0, SimuSCCS simulates a censoring vector.
In that case, the features and outcomes are simulated, then right-censored
according to the simulated censoring dates.
censoring_scale : `float`, default=None
The number of censored time intervals are drawn from a Poisson
distribution with intensity equal to `censoring_scale`. The higher,
the more intervals will be censored. If None, no censoring is
applied.
coeffs : `list` containing `numpy.ndarray`, default=None
Can be used to provide your own set of coefficients. Element `i` of
the list should be a 1-d `numpy.ndarray` of shape (n_lags + 1), where
`n_lags[i]` is the number of lags associated to feature `i`.
If set to None, the simulator will generate coefficients randomly.
hawkes_exp_kernels : `SimuHawkesExpKernels`, default=None
Features are simulated with exponential kernel Hawkes processes.
This parameter can be used to specify your own kernels (see
`SimuHawkesExpKernels` documentation). If None, random kernels
are generated. The same kernels are used to generate features for
the whole generated population.
n_correlations : `int`, default=0
If `hawkes_exp_kernels` is None, random kernels are generated. This
parameter controls the number of non-null non-diagonal kernels.
batch_size : `int`, default=None
When generating outcomes with Poisson distribution, the simulator will
discard samples to which no event has occurred. In this case, the
simulator generate successive batches of samples, until it reaches
a total of n_samples. This parameter can be used to set the batch size.
seed : `int`, default=None
The seed of the random number generator
verbose : `bool`, default=True
If True, print things
Examples
--------
>>> import numpy as np
>>> from tick.survival import SimuSCCS
>>> n_lags = np.repeat(2, 2).astype('uint64')
>>> sim = SimuSCCS(n_cases=5, n_intervals=3, n_features=2, n_lags=n_lags,
... seed=42, sparse=False, exposure_type="multiple_exposures",
... verbose=False)
>>> features, labels, outcomes, censoring, _coeffs = sim.simulate()
>>> print(features)
[array([[0., 0.],
[1., 0.],
[1., 1.]]), array([[1., 0.],
[1., 0.],
[1., 1.]]), array([[1., 1.],
[1., 1.],
[1., 1.]]), array([[0., 0.],
[1., 1.],
[1., 0.]]), array([[1., 0.],
[0., 0.],
[0., 0.]])]
>>> print(censoring)
[3 3 3 3 3]
>>> print(_coeffs)
[array([ 0.54738557, -0.15109073, 0.71345739]), array([ 1.67633284, -0.25656871, -0.25655065])]
"""
_const_attr = [
# user defined parameters
'_exposure_type',
'_outcome_distribution',
'_censoring_prob',
'_censoring_scale', # redundant with prob ?
'_batch_size',
'_distribution',
'_n_lags',
# user defined or computed attributes
'_hawkes_exp_kernel',
'_coeffs',
'_time_drift',
'_features_offset'
]
_attrinfos = {key: {'writable': False} for key in _const_attr}
_attrinfos['hawkes_obj'] = {'writable': True}
def __init__(
self,
n_cases,
n_intervals,
n_features,
n_lags,
time_drift=None,
exposure_type="single_exposure",
distribution="multinomial",
sparse=True,
censoring_prob=0,
censoring_scale=None,
coeffs=None,
hawkes_exp_kernels=None,
n_correlations=0,
batch_size=None,
seed=None,
verbose=True,
):
super(SimuSCCS, self).__init__(seed, verbose)
self.n_cases = n_cases
self.n_intervals = n_intervals
self.n_features = n_features
self._features_offset = None
self._n_lags = None
self.n_lags = n_lags
self.sparse = sparse
self.hawkes_obj = None
# attributes with restricted value range
self._exposure_type = None
self.exposure_type = exposure_type
self._distribution = None
self.distribution = distribution
self._censoring_prob = 0
self.censoring_prob = censoring_prob
self._censoring_scale = None
self.censoring_scale = censoring_scale if censoring_scale \
else n_intervals / 4
self._coeffs = None
self.coeffs = coeffs
self._batch_size = None
self.batch_size = batch_size
# TODO later: add properties for these parameters
self.n_correlations = n_correlations
self.hawkes_exp_kernels = hawkes_exp_kernels
self.time_drift = time_drift # function(t), used only for the paper, allow to add a baseline
# TODO: make a property from this baseline
def simulate(self):
""" Launch simulation of the data.
Returns
-------
features : `list` of `numpy.ndarray` or `list` of `scipy.sparse.csr_matrix`,
list of length n_cases, each element of the list of
shape=(n_intervals, n_features)
The list of features matrices.
labels : `list` of `numpy.ndarray`,
list of length n_cases, each element of the list of
shape=(n_intervals,)
The labels vector
censoring : `numpy.ndarray`, shape=(n_cases,), dtype="uint64"
The censoring data. This array should contain integers in
[1, n_intervals]. If the value i is equal to n_intervals, then there
is no censoring for sample i. If censoring = c < n_intervals, then
the observation of sample i is stopped at interval c, that is, the
row c - 1 of the corresponding matrix. The last n_intervals - c rows
are then set to 0.
_coeffs : `numpy.ndarray`, shape=(n_features * (n_lags + 1),)
The coefficients used to simulate the data.
"""
return Simu.simulate(self)
def _simulate(self):
""" Loop to generate batches of samples until n_cases is reached.
"""
n_lagged_features = int(self.n_lags.sum() + self.n_features)
n_cases = self.n_cases
if self._coeffs is None:
self._set('_coeffs', np.random.normal(1e-3, 1.1,
n_lagged_features))
features = []
censored_features = []
outcomes = []
censoring = np.zeros((n_cases,), dtype="uint64")
cases_count = 0
while cases_count < n_cases:
_features, _censored_features, _outcomes, _censoring, _n_samples = \
self._simulate_batch()
n_new_cases = _n_samples
c = cases_count
cases_count += n_new_cases
n = n_cases - c if cases_count >= n_cases else n_new_cases
features.extend(_features[0:n])
censored_features.extend(_censored_features[0:n])
outcomes.extend(_outcomes[0:n])
censoring[c:c + n] = _censoring[0:n]
return features, censored_features, outcomes, censoring, self.coeffs
def _simulate_batch(self):
"""Simulate a batch of samples, each of which have ticked at least once.
"""
_features, _n_samples = self.simulate_features(self.batch_size)
_censored_features = deepcopy(_features)
_outcomes = self.simulate_outcomes(_features)
_censoring = np.full((_n_samples,), self.n_intervals, dtype="uint64")
if self.censoring_prob:
censored_idx = np.random.binomial(1, self.censoring_prob,
size=_n_samples).astype("bool")
_censoring[censored_idx] -= np.random.poisson(
lam=self.censoring_scale,
size=(censored_idx.sum(),)).astype("uint64")
_censored_features = self._censor_array_list(
_censored_features, _censoring)
_outcomes = self._censor_array_list(_outcomes, _censoring)
_features, _censored_features, _outcomes, censoring, _ = \
self._filter_non_positive_samples(_features, _censored_features,
_outcomes, _censoring)
return _features, _censored_features, _outcomes, _censoring, _n_samples
def simulate_features(self, n_samples):
"""Simulates features, either `single_exposure` or
`multiple_exposures` exposures.
"""
if self.exposure_type == "single_exposure":
features, n_samples = self._sim_single_exposures()
elif self.exposure_type == "multiple_exposures":
sim = self._sim_multiple_exposures_exposures
features = [sim() for _ in range(n_samples)]
return features, n_samples
# We just keep it for the tests now
# TODO later: need to be improved with Hawkes
def _sim_multiple_exposures_exposures(self):
features = np.zeros((self.n_intervals, self.n_features))
while features.sum() == 0:
# Make sure we do not generate empty feature matrix
features = np.random.randint(
2,
size=(self.n_intervals, self.n_features),
).astype("float64")
if self.sparse:
features = csr_matrix(features, dtype="float64")
return features
def _sim_single_exposures(self):
if not self.sparse:
raise ValueError(
"'single_exposure' exposures can only be simulated"
" as sparse feature matrices")
if self.hawkes_exp_kernels is None:
np.random.seed(self.seed)
decays = .002 * np.ones((self.n_features, self.n_features))
baseline = 4 * np.random.random(self.n_features) / self.n_intervals
mult = np.random.random(self.n_features)
adjacency = mult * np.eye(self.n_features)
if self.n_correlations:
comb = list(permutations(range(self.n_features), 2))
if len(comb) > 1:
idx = itemgetter(*np.random.choice(
range(len(comb)), size=self.n_correlations,
replace=False))
comb = idx(comb)
for i, j in comb:
adjacency[i, j] = np.random.random(1)
self._set(
'hawkes_exp_kernels',
SimuHawkesExpKernels(adjacency=adjacency, decays=decays,
baseline=baseline, verbose=False,
seed=self.seed))
self.hawkes_exp_kernels.adjust_spectral_radius(
.1) # TODO later: allow to change this parameter
hawkes = SimuHawkesMulti(self.hawkes_exp_kernels,
n_simulations=self.n_cases)
run_time = self.n_intervals
hawkes.end_time = [1 * run_time for _ in range(self.n_cases)]
dt = 1
self.hawkes_exp_kernels.track_intensity(dt)
hawkes.simulate()
self.hawkes_obj = hawkes
features = [[
np.min(np.floor(f)) if len(f) > 0 else -1 for f in patient_events
] for patient_events in hawkes.timestamps]
features = [
self.to_coo(feat, (run_time, self.n_features)) for feat in features
]
# Make sure patients have at least one exposure?
exposures_filter = itemgetter(
*[i for i, f in enumerate(features) if f.sum() > 0])
features = exposures_filter(features)
n_samples = len(features)
return features, n_samples
def simulate_outcomes(self, features):
features, _, _ = LongitudinalFeaturesLagger(n_lags=self.n_lags). \
fit_transform(features)
if self.distribution == "poisson":
# TODO later: add self.max_n_events to allow for multiple outcomes
# In this case, the multinomial simulator should use this arg too
outcomes = self._simulate_poisson_outcomes(features, self._coeffs)
else:
outcomes = self._simulate_multinomial_outcomes(
features, self._coeffs)
return outcomes
def _simulate_multinomial_outcomes(self, features, coeffs):
baseline = np.zeros(self.n_intervals)
if self.time_drift is not None:
baseline = self.time_drift(np.arange(self.n_intervals))
dot_products = [baseline + feat.dot(coeffs) for feat in features]
def sim(dot_prod):
dot_prod -= dot_prod.max()
probabilities = np.exp(dot_prod) / np.sum(np.exp(dot_prod))
outcomes = np.random.multinomial(1, probabilities)
return outcomes.astype("int32")
return [sim(dot_product) for dot_product in dot_products]
def _simulate_poisson_outcomes(self, features, coeffs,
first_tick_only=True):
baseline = np.zeros(self.n_intervals)
if self.time_drift is not None:
baseline = self.time_drift(np.arange(self.n_intervals))
dot_products = [baseline + feat.dot(coeffs) for feat in features]
def sim(dot_prod):
dot_prod -= dot_prod.max()
intensities = np.exp(dot_prod)
ticks = np.random.poisson(lam=intensities)
if first_tick_only:
first_tick_idx = np.argmax(ticks > 0)
y = np.zeros_like(intensities)
if ticks.sum() > 0:
y[first_tick_idx] = 1
else:
y = ticks
return y.astype("int32")
return [sim(dot_product) for dot_product in dot_products]
@staticmethod
def _censor_array_list(array_list, censoring):
"""Apply censoring to a list of array-like objects. Works for 1-D or 2-D
arrays, as long as the first axis represents the time.
Parameters
----------
array_list : list of numpy.ndarray or list of scipy.sparse.csr_matrix,
list of length n_cases, each element of the list of
shape=(n_intervals, n_features) or shape=(n_intervals,)
The list of features matrices.
censoring : `numpy.ndarray`, shape=(n_cases, 1), dtype="uint64"
The censoring data. This array should contain integers in
[1, n_intervals]. If the value i is equal to n_intervals, then there
is no censoring for sample i. If censoring = c < n_intervals, then
the observation of sample i is stopped at interval c, that is, the
row c - 1 of the corresponding matrix. The last n_intervals - c rows
are then set to 0.
Returns
-------
output : `[numpy.ndarrays]` or `[csr_matrices]`, shape=(n_intervals, n_features)
The list of censored features matrices.
"""
def censor(array, censoring_idx):
if sps.issparse(array):
array = array.tolil()
array[int(censoring_idx):] = 0
array = array.tocsr()
else:
array[int(censoring_idx):] = 0
return array
return [censor(l, censoring[i]) for i, l in enumerate(array_list)]
@staticmethod
def _filter_non_positive_samples(features, features_censored, labels,
censoring):
"""Filter out samples which don't tick in the observation window.
Parameters
----------
features : list of numpy.ndarray or list of scipy.sparse.csr_matrix,
list of length n_cases, each element of the list of
shape=(n_intervals, n_features)
The list of features matrices.
labels : list of numpy.ndarray of length n_cases,
shape=(n_intervals,)
The list of labels matrices.
"""
nnz = [np.nonzero(arr)[0] for arr in labels]
positive_sample_idx = [i for i, arr in enumerate(nnz) if len(arr) > 0]
if len(positive_sample_idx) == 0:
raise ValueError("There should be at least one positive sample per\
batch. Try to increase batch_size.")
pos_samples_filter = itemgetter(*positive_sample_idx)
return list(pos_samples_filter(features)),\
list(pos_samples_filter(features_censored)),\
list(pos_samples_filter(labels)),\
censoring[positive_sample_idx],\
np.array(positive_sample_idx, dtype="uint64")
@staticmethod
def to_coo(feat, shape):
feat = np.array(feat)
cols = np.where(feat >= 0)[0]
rows = np.array(feat[feat >= 0])
if len(cols) == 0:
cols = np.random.randint(0, shape[1], 1)
rows = np.random.randint(0, shape[0], 1)
data = np.ones_like(cols)
return csr_matrix((data, (rows, cols)), shape=shape, dtype="float64")
@property
def exposure_type(self):
return self._exposure_type
@exposure_type.setter
def exposure_type(self, value):
if value not in ["single_exposure", "multiple_exposures"]:
raise ValueError("exposure_type can be only 'single_exposure' or "
"'multiple_exposures'.")
self._set("_exposure_type", value)
@property
def distribution(self):
return self._distribution
@distribution.setter
def distribution(self, value):
if value not in ["multinomial", "poisson"]:
raise ValueError("distribution can be only 'multinomial' or "
"'poisson'.")
self._set("_distribution", value)
@property
def censoring_prob(self):
return self._censoring_prob
@censoring_prob.setter
def censoring_prob(self, value):
if value < 0 or value > 1:
raise ValueError("censoring_prob value should be in [0, 1].")
self._set("_censoring_prob", value)
@property
def censoring_scale(self):
return self._censoring_scale
@censoring_scale.setter
def censoring_scale(self, value):
if value < 0:
raise ValueError("censoring_scale should be greater than 0.")
self._set("_censoring_scale", value)
@property
def n_lags(self):
return self._n_lags
@n_lags.setter
def n_lags(self, value):
offsets = [0]
for l in value:
if l < 0:
raise ValueError('n_lags elements should be greater than or '
'equal to 0.')
offsets.append(offsets[-1] + l + 1)
self._set('_n_lags', value)
self._set('_features_offset', offsets)
@property
def coeffs(self):
value = list()
for i, l in enumerate(self.n_lags):
start = int(self._features_offset[i])
end = int(start + l + 1)
value.append(self._coeffs[start:end])
return value
@coeffs.setter
def coeffs(self, value):
if value is not None:
for i, c in enumerate(value):
if c.shape[0] != int(self.n_lags[i] + 1):
raise ValueError("Coeffs %i th element should be of shape\
(n_lags[%i] + 1),)" % (i, self.n_lags[i]))
value = np.hstack(value)
self._set("_coeffs", value)
@property
def batch_size(self):
return self._batch_size
@batch_size.setter
def batch_size(self, value):
if value is None and self.distribution == "multinomial":
self._set("_batch_size", self.n_cases)
elif value is None:
self._set("_batch_size", int(min(2000, self.n_cases)))
else:
self._set("_batch_size", int(value))
self._set("_batch_size", max(100, self.batch_size))
class CustomEffects:
def __init__(self, n_intervals):
"""Class provinding flexible relative incidence curves to be used as
coefficients in the `SimuSCCS` class.
Parameters
----------
n_intervals : `int`
Number of time intervals used to generate features and outcomes.
"""
self.n_intervals = n_intervals
self._curves_type_dict = {
1: (5, 1),
2: (2, 2),
3: (.5, .5),
4: (2, 5),
5: (1, 3)
}
def constant_effect(self, amplitude, cut=0):
"""Returns coefficients corresponding to a constant relative incidence
of value equal to `amplitude`. If `cut` is greater than 0, the relative
incidence will be null on [`cut`, `n_intervals`]
"""
risk_curve = np.ones(self.n_intervals) * amplitude
if cut > 0:
risk_curve[cut:] = 1
return risk_curve
def bell_shaped_effect(self, amplitude, width, lag=0, cut=0):
"""Returns coefficients corresponding to a bell shaped relative
incidence of max value equal to `amplitude`. If `cut` is greater than 0,
the relative incidence will be null on [`cut`, `n_intervals`]. The
effect starts at `lag` interval, and lasts `width` intervals.
"""
self._check_params(lag, width, amplitude, cut)
if width % 2 == 0:
width += 1
effect = norm(0, width / 5).pdf(np.arange(width) - int(width / 2))
return self._create_risk_curve(effect, amplitude, cut, width, lag)
def increasing_effect(self, amplitude, lag=0, cut=0, curvature_type=1):
"""Returns coefficients corresponding to an increasing relative
incidence of max value equal to `amplitude`. If `cut` is greater than 0,
the relative incidence will be null on [`cut`, `n_intervals`]. The
effect starts at `lag` interval, and lasts `width` intervals.
The parameter `curvature_type` controls the shape of the relative
incidence curve, it can take values in {1, 2, 3, 4, 5}.
"""
width = self.n_intervals
self._check_params(lag, width, amplitude, cut)
if curvature_type not in np.arange(5) + 1:
raise ValueError('curvature type should be in {1, 2, 3, 4, 5}')
a, b = self._curves_type_dict[curvature_type]
effect = beta(a, b).cdf(np.arange(width) / width)
return self._create_risk_curve(effect, amplitude, cut, width, lag)
def _check_params(self, lag, width, amplitude, cut):
if cut is not None and cut >= width:
raise ValueError('cut should be < width')
if lag > self.n_intervals:
raise ValueError('n_intervals should be > lag')
if amplitude <= 0:
raise ValueError('amplitude should be > 0')
def _create_risk_curve(self, effect, amplitude, cut, width, lag):
if cut:
effect = effect[:int(width - cut)]
end_effect = int(lag + width - cut)
if end_effect > self.n_intervals:
end_effect = self.n_intervals
effect = effect[:end_effect - lag]
M = effect.max()
m = effect.min()
effect = (effect - m) / (M - m)
effect *= (amplitude - 1)
risk_curve = np.ones(self.n_intervals)
risk_curve[lag:end_effect] += effect
return risk_curve
@staticmethod
def negative_effect(positive_effect):
return np.exp(-np.log(positive_effect))
| bsd-3-clause | -8,178,844,875,369,164,000 | 38.334328 | 101 | 0.587463 | false |
spreeker/democracygame | external_apps/docutils-snapshot/test/test_parsers/test_rst/test_directives/test_decorations.py | 19 | 1847 | #! /usr/bin/env python
# $Id: test_decorations.py 4667 2006-07-12 21:40:56Z wiemann $
# Author: David Goodger <[email protected]>
# Copyright: This module has been placed in the public domain.
"""
Tests for the "header" & "footer" directives.
"""
from __init__ import DocutilsTestSupport
def suite():
s = DocutilsTestSupport.ParserTestSuite()
s.generateTests(totest)
return s
totest = {}
totest['headers'] = [
["""\
.. header:: a paragraph for the header
""",
"""\
<document source="test data">
<decoration>
<header>
<paragraph>
a paragraph for the header
"""],
["""\
.. header::
""",
"""\
<document source="test data">
<system_message level="3" line="1" source="test data" type="ERROR">
<paragraph>
Content block expected for the "header" directive; none found.
<literal_block xml:space="preserve">
.. header::
"""],
["""\
.. header:: first part of the header
.. header:: second part of the header
""",
"""\
<document source="test data">
<decoration>
<header>
<paragraph>
first part of the header
<paragraph>
second part of the header
"""],
]
totest['footers'] = [
["""\
.. footer:: a paragraph for the footer
""",
"""\
<document source="test data">
<decoration>
<footer>
<paragraph>
a paragraph for the footer
"""],
["""\
.. footer:: even if a footer is declared first
.. header:: the header appears first
""",
"""\
<document source="test data">
<decoration>
<header>
<paragraph>
the header appears first
<footer>
<paragraph>
even if a footer is declared first
"""],
]
if __name__ == '__main__':
import unittest
unittest.main(defaultTest='suite')
| bsd-3-clause | 5,365,550,794,846,383,000 | 20.229885 | 74 | 0.566865 | false |
pxzhenren/flask | tests/test_views.py | 155 | 4202 | # -*- coding: utf-8 -*-
"""
tests.views
~~~~~~~~~~~
Pluggable views.
:copyright: (c) 2015 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import pytest
import flask
import flask.views
from werkzeug.http import parse_set_header
def common_test(app):
c = app.test_client()
assert c.get('/').data == b'GET'
assert c.post('/').data == b'POST'
assert c.put('/').status_code == 405
meths = parse_set_header(c.open('/', method='OPTIONS').headers['Allow'])
assert sorted(meths) == ['GET', 'HEAD', 'OPTIONS', 'POST']
def test_basic_view():
app = flask.Flask(__name__)
class Index(flask.views.View):
methods = ['GET', 'POST']
def dispatch_request(self):
return flask.request.method
app.add_url_rule('/', view_func=Index.as_view('index'))
common_test(app)
def test_method_based_view():
app = flask.Flask(__name__)
class Index(flask.views.MethodView):
def get(self):
return 'GET'
def post(self):
return 'POST'
app.add_url_rule('/', view_func=Index.as_view('index'))
common_test(app)
def test_view_patching():
app = flask.Flask(__name__)
class Index(flask.views.MethodView):
def get(self):
1 // 0
def post(self):
1 // 0
class Other(Index):
def get(self):
return 'GET'
def post(self):
return 'POST'
view = Index.as_view('index')
view.view_class = Other
app.add_url_rule('/', view_func=view)
common_test(app)
def test_view_inheritance():
app = flask.Flask(__name__)
class Index(flask.views.MethodView):
def get(self):
return 'GET'
def post(self):
return 'POST'
class BetterIndex(Index):
def delete(self):
return 'DELETE'
app.add_url_rule('/', view_func=BetterIndex.as_view('index'))
c = app.test_client()
meths = parse_set_header(c.open('/', method='OPTIONS').headers['Allow'])
assert sorted(meths) == ['DELETE', 'GET', 'HEAD', 'OPTIONS', 'POST']
def test_view_decorators():
app = flask.Flask(__name__)
def add_x_parachute(f):
def new_function(*args, **kwargs):
resp = flask.make_response(f(*args, **kwargs))
resp.headers['X-Parachute'] = 'awesome'
return resp
return new_function
class Index(flask.views.View):
decorators = [add_x_parachute]
def dispatch_request(self):
return 'Awesome'
app.add_url_rule('/', view_func=Index.as_view('index'))
c = app.test_client()
rv = c.get('/')
assert rv.headers['X-Parachute'] == 'awesome'
assert rv.data == b'Awesome'
def test_implicit_head():
app = flask.Flask(__name__)
class Index(flask.views.MethodView):
def get(self):
return flask.Response('Blub', headers={
'X-Method': flask.request.method
})
app.add_url_rule('/', view_func=Index.as_view('index'))
c = app.test_client()
rv = c.get('/')
assert rv.data == b'Blub'
assert rv.headers['X-Method'] == 'GET'
rv = c.head('/')
assert rv.data == b''
assert rv.headers['X-Method'] == 'HEAD'
def test_explicit_head():
app = flask.Flask(__name__)
class Index(flask.views.MethodView):
def get(self):
return 'GET'
def head(self):
return flask.Response('', headers={'X-Method': 'HEAD'})
app.add_url_rule('/', view_func=Index.as_view('index'))
c = app.test_client()
rv = c.get('/')
assert rv.data == b'GET'
rv = c.head('/')
assert rv.data == b''
assert rv.headers['X-Method'] == 'HEAD'
def test_endpoint_override():
app = flask.Flask(__name__)
app.debug = True
class Index(flask.views.View):
methods = ['GET', 'POST']
def dispatch_request(self):
return flask.request.method
app.add_url_rule('/', view_func=Index.as_view('index'))
with pytest.raises(AssertionError):
app.add_url_rule('/', view_func=Index.as_view('index'))
# But these tests should still pass. We just log a warning.
common_test(app)
| bsd-3-clause | -1,334,914,678,959,628,300 | 24.938272 | 76 | 0.566397 | false |
Frulko/AutobahnPython | examples/wamp/rpc/keyvalue/client.py | 12 | 1841 | ###############################################################################
##
## Copyright 2011 Tavendo GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
import sys
from twisted.python import log
from twisted.internet import reactor
from twisted.internet.defer import Deferred, DeferredList
from autobahn.websocket import connectWS
from autobahn.wamp import WampClientFactory, WampClientProtocol
class KeyValueClientProtocol(WampClientProtocol):
def done(self, *args):
self.sendClose()
reactor.stop()
def show(self, key, value):
print key, value
def get(self, keys):
defs = []
for key in keys:
d = self.call("keyvalue:get", key).addCallback(lambda value, key = key: self.show(key, value))
defs.append(d)
return DeferredList(defs)
def onSessionOpen(self):
self.prefix("keyvalue", "http://example.com/simple/keyvalue#")
self.call("keyvalue:keys").addCallbacks(self.get).addCallback(self.done)
if __name__ == '__main__':
log.startLogging(sys.stdout)
factory = WampClientFactory("ws://localhost:9000")
factory.protocol = KeyValueClientProtocol
connectWS(factory)
reactor.run()
| apache-2.0 | -150,212,668,433,214,140 | 32.092593 | 103 | 0.62792 | false |
Godley/MuseParse | MuseParse/tests/testHandlers/testHandlePartsAndPiece.py | 1 | 8150 | import unittest
from MuseParse.classes.Input import MxmlParser
from MuseParse.classes.ObjectHierarchy.TreeClasses.PieceTree import PieceTree
class testSetupPiece(unittest.TestCase):
def setUp(self):
self.handler = MxmlParser.SetupPiece
self.tags = []
self.attrs = {}
self.chars = {}
self.data = {}
self.piece = PieceTree()
def testNoTags(self):
self.assertEqual(
None,
self.handler(
self.tags,
self.attrs,
self.chars,
self.piece,
self.data),
"ERROR: testNoTags failed: nothing should happen if there are no tags in list")
def testMetaExists(self):
self.assertFalse(
hasattr(
self.piece.GetItem(),
"meta"),
"ERROR: testMetaExists failed: meta should not be set in piece class at beginning of testing")
def testIrrelevantTag(self):
self.tags.append("lol")
self.assertEqual(
None,
self.handler(
self.tags,
self.attrs,
self.chars,
self.piece,
self.data),
"ERROR: irrelevant tag should do nothing in TestIrrelevance")
def testTitleTag(self):
self.tags.append("movement-title")
self.chars["movement-title"] = "hehehe"
self.handler(self.tags, self.attrs, self.chars, self.piece, self.data)
self.assertTrue(hasattr(self.piece.GetItem(), "meta"),
"ERROR: Meta should exist in TestTitleTag")
self.assertEqual(
"hehehe",
self.piece.GetItem().meta.title,
"ERROR: title set incorrectly in TestTitleTag")
def testRightsTag(self):
self.tags.append("rights")
self.chars["rights"] = "lee"
self.handler(self.tags, self.attrs, self.chars, self.piece, self.data)
self.assertTrue(hasattr(self.piece.GetItem(), "meta"))
self.assertEqual("lee ", self.piece.GetItem().meta.copyright)
def testCompTag(self):
self.tags.append("creator")
self.attrs["creator"] = {"type": "composer"}
self.chars["creator"] = "lee"
self.handler(self.tags, self.attrs, self.chars, self.piece, self.data)
self.assertTrue(
hasattr(
self.piece.GetItem(),
"meta"),
"ERROR: meta should exist in piece class in TestCompTag")
self.assertEqual(
"lee",
self.piece.GetItem().meta.composer,
"ERROR: composer should match expected in TestCompTag")
def testTitleCompTag(self):
self.tags.append("creator")
self.attrs["creator"] = {"type": "composer"}
self.chars["creator"] = "lee"
self.chars["movement-title"] = "hello world"
self.handler(self.tags, self.attrs, self.chars, self.piece, self.data)
self.assertTrue(
hasattr(
self.piece.GetItem().meta,
"composer"),
"ERROR: meta should have composer attrib in TestTitleCompTag")
self.assertEqual(
"lee",
self.piece.GetItem().meta.composer,
"ERROR: composer should match test in TestTitleCompTag")
self.tags.append("movement-title")
self.handler(self.tags, self.attrs, self.chars, self.piece, self.data)
self.assertTrue(
hasattr(
self.piece.GetItem().meta,
"title"),
"ERROR: meta should have title in TestTitleCompTag")
self.assertEqual(
"hello world",
self.piece.GetItem().meta.title,
"ERROR: meta title set incorrectly in TestTitleCompTag")
class testHandlePart(unittest.TestCase):
def setUp(self):
self.handler = MxmlParser.UpdatePart
self.tags = []
self.chars = {}
self.attrs = {}
self.piece = PieceTree()
self.data = {}
def testNoData(self):
self.assertEqual(
None,
self.handler(
self.tags,
self.attrs,
self.chars,
self.piece,
self.data),
"ERROR: no tags should return none in TestNodata")
def testIrrelevantTag(self):
self.tags.append("wut")
MxmlParser.part_id = None
self.assertEqual(
None,
self.handler(
self.tags,
self.attrs,
self.chars,
self.piece,
self.data),
"ERROR: irrelevant tags should return none in TestIrrelevantTag")
def testScorePartTag(self):
MxmlParser.part_id = None
self.assertEqual(
None,
MxmlParser.part_id,
"ERROR: part_id not none in testScorePartTag")
self.tags.append("score-part")
self.attrs["score-part"] = {"id": "P1"}
self.handler(self.tags, self.attrs, self.chars, self.piece, self.data)
self.assertEqual(1, len(self.piece.root.GetChildrenIndexes()))
def testPnameTag(self):
self.assertEqual(0, len(self.piece.root.GetChildrenIndexes()))
self.tags.append("score-part")
self.attrs["score-part"] = {"id": "P1"}
self.tags.append("part-name")
self.chars["part-name"] = "will"
self.handler(self.tags, self.attrs, self.chars, self.piece, self.data)
self.assertEqual("will", self.piece.getPart("P1").GetItem().name)
def testPNameWithShortName(self):
self.assertEqual(0, len(self.piece.root.GetChildrenIndexes()))
self.tags.append("score-part")
self.attrs["score-part"] = {"id": "P1"}
self.tags.append("part-abbreviation")
self.chars["part-abbreviation"] = "w"
self.handler(self.tags, self.attrs, self.chars, self.piece, self.data)
self.assertEqual("w", self.piece.getPart("P1").GetItem().shortname)
def testPartGroupOpen(self):
self.tags.append("part-group")
self.attrs["part-group"] = {"number": "1", "type": "start"}
self.tags.append("score-part")
self.attrs["score-part"] = {"id": "P1"}
self.handler(self.tags, self.attrs, self.chars, self.piece, self.data)
self.tags.append("score-part")
self.attrs["score-part"] = {"id": "P2"}
self.handler(self.tags, self.attrs, self.chars, self.piece, self.data)
self.assertEqual(["P1", "P2"], self.piece.getGroup(1))
def testPartGroupClose(self):
self.tags.append("part-group")
self.attrs["part-group"] = {"number": "1", "type": "start"}
self.tags.append("score-part")
self.attrs["score-part"] = {"id": "P1"}
self.handler(self.tags, self.attrs, self.chars, self.piece, self.data)
self.tags.append("part-group")
self.attrs["part-group"] = {"number": "1", "type": "stop"}
self.handler(self.tags, self.attrs, self.chars, self.piece, self.data)
self.tags.append("score-part")
self.attrs["score-part"] = {"id": "P2"}
self.handler(self.tags, self.attrs, self.chars, self.piece, self.data)
self.assertEqual(["P1"], self.piece.getGroup(1))
class testRights(unittest.TestCase):
def setUp(self):
testSetupPiece.setUp(self)
self.data = {}
self.tags.append("credit")
self.tags.append("credit-type")
self.chars["credit-type"] = "rights"
self.handler(self.tags, self.attrs, self.chars, self.piece, self.data)
def testRightsCredit(self):
self.tags.append("credit-words")
self.chars["credit-words"] = "copyright lol"
self.handler(self.tags, self.attrs, self.chars, self.piece, self.data)
self.assertTrue(hasattr(self.piece.GetItem().meta, "copyright"))
def testRightsValue(self):
self.tags.append("credit-words")
self.chars["credit-words"] = "copyright lol"
self.handler(self.tags, self.attrs, self.chars, self.piece, self.data)
self.assertEqual(self.piece.GetItem().meta.copyright, "copyright lol")
| mit | -6,637,512,166,481,366,000 | 36.385321 | 106 | 0.578037 | false |
areski/django | tests/inspectdb/models.py | 208 | 2737 | # -*- encoding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
class People(models.Model):
name = models.CharField(max_length=255)
parent = models.ForeignKey('self', models.CASCADE)
class Message(models.Model):
from_field = models.ForeignKey(People, models.CASCADE, db_column='from_id')
class PeopleData(models.Model):
people_pk = models.ForeignKey(People, models.CASCADE, primary_key=True)
ssn = models.CharField(max_length=11)
class PeopleMoreData(models.Model):
people_unique = models.ForeignKey(People, models.CASCADE, unique=True)
license = models.CharField(max_length=255)
class DigitsInColumnName(models.Model):
all_digits = models.CharField(max_length=11, db_column='123')
leading_digit = models.CharField(max_length=11, db_column='4extra')
leading_digits = models.CharField(max_length=11, db_column='45extra')
class SpecialName(models.Model):
field = models.IntegerField(db_column='field')
# Underscores
field_field_0 = models.IntegerField(db_column='Field_')
field_field_1 = models.IntegerField(db_column='Field__')
field_field_2 = models.IntegerField(db_column='__field')
# Other chars
prc_x = models.IntegerField(db_column='prc(%) x')
non_ascii = models.IntegerField(db_column='tamaño')
class Meta:
db_table = "inspectdb_special.table name"
class ColumnTypes(models.Model):
id = models.AutoField(primary_key=True)
big_int_field = models.BigIntegerField()
bool_field = models.BooleanField(default=False)
null_bool_field = models.NullBooleanField()
char_field = models.CharField(max_length=10)
null_char_field = models.CharField(max_length=10, blank=True, null=True)
comma_separated_int_field = models.CommaSeparatedIntegerField(max_length=99)
date_field = models.DateField()
date_time_field = models.DateTimeField()
decimal_field = models.DecimalField(max_digits=6, decimal_places=1)
email_field = models.EmailField()
file_field = models.FileField(upload_to="unused")
file_path_field = models.FilePathField()
float_field = models.FloatField()
int_field = models.IntegerField()
gen_ip_adress_field = models.GenericIPAddressField(protocol="ipv4")
pos_int_field = models.PositiveIntegerField()
pos_small_int_field = models.PositiveSmallIntegerField()
slug_field = models.SlugField()
small_int_field = models.SmallIntegerField()
text_field = models.TextField()
time_field = models.TimeField()
url_field = models.URLField()
class UniqueTogether(models.Model):
field1 = models.IntegerField()
field2 = models.CharField(max_length=10)
class Meta:
unique_together = ('field1', 'field2')
| bsd-3-clause | 4,449,508,348,190,260,700 | 34.532468 | 80 | 0.71345 | false |
jorik041/phantomjs | src/qt/qtbase/util/local_database/cldr2qlocalexml.py | 102 | 42691 | #!/usr/bin/env python
#############################################################################
##
## Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
## Contact: http://www.qt-project.org/legal
##
## This file is part of the test suite of the Qt Toolkit.
##
## $QT_BEGIN_LICENSE:LGPL$
## Commercial License Usage
## Licensees holding valid commercial Qt licenses may use this file in
## accordance with the commercial license agreement provided with the
## Software or, alternatively, in accordance with the terms contained in
## a written agreement between you and Digia. For licensing terms and
## conditions see http://qt.digia.com/licensing. For further information
## use the contact form at http://qt.digia.com/contact-us.
##
## GNU Lesser General Public License Usage
## Alternatively, this file may be used under the terms of the GNU Lesser
## General Public License version 2.1 as published by the Free Software
## Foundation and appearing in the file LICENSE.LGPL included in the
## packaging of this file. Please review the following information to
## ensure the GNU Lesser General Public License version 2.1 requirements
## will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
##
## In addition, as a special exception, Digia gives you certain additional
## rights. These rights are described in the Digia Qt LGPL Exception
## version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
##
## GNU General Public License Usage
## Alternatively, this file may be used under the terms of the GNU
## General Public License version 3.0 as published by the Free Software
## Foundation and appearing in the file LICENSE.GPL included in the
## packaging of this file. Please review the following information to
## ensure the GNU General Public License version 3.0 requirements will be
## met: http://www.gnu.org/copyleft/gpl.html.
##
##
## $QT_END_LICENSE$
##
#############################################################################
import os
import sys
import enumdata
import xpathlite
from xpathlite import DraftResolution
from dateconverter import convert_date
from xml.sax.saxutils import escape, unescape
import re
findAlias = xpathlite.findAlias
findEntry = xpathlite.findEntry
findEntryInFile = xpathlite._findEntryInFile
findTagsInFile = xpathlite.findTagsInFile
def parse_number_format(patterns, data):
# this is a very limited parsing of the number format for currency only.
def skip_repeating_pattern(x):
p = x.replace('0', '#').replace(',', '').replace('.', '')
seen = False
result = ''
for c in p:
if c == '#':
if seen:
continue
seen = True
else:
seen = False
result = result + c
return result
patterns = patterns.split(';')
result = []
for pattern in patterns:
pattern = skip_repeating_pattern(pattern)
pattern = pattern.replace('#', "%1")
# according to http://www.unicode.org/reports/tr35/#Number_Format_Patterns
# there can be doubled or trippled currency sign, however none of the
# locales use that.
pattern = pattern.replace(u'\xa4', "%2")
pattern = pattern.replace("''", "###").replace("'", '').replace("###", "'")
pattern = pattern.replace('-', data['minus'])
pattern = pattern.replace('+', data['plus'])
result.append(pattern)
return result
def parse_list_pattern_part_format(pattern):
# this is a very limited parsing of the format for list pattern part only.
result = ""
result = pattern.replace("{0}", "%1")
result = result.replace("{1}", "%2")
result = result.replace("{2}", "%3")
return result
def ordStr(c):
if len(c) == 1:
return str(ord(c))
raise xpathlite.Error("Unable to handle value \"%s\"" % addEscapes(c))
return "##########"
# the following functions are supposed to fix the problem with QLocale
# returning a character instead of strings for QLocale::exponential()
# and others. So we fallback to default values in these cases.
def fixOrdStrMinus(c):
if len(c) == 1:
return str(ord(c))
return str(ord('-'))
def fixOrdStrPlus(c):
if len(c) == 1:
return str(ord(c))
return str(ord('+'))
def fixOrdStrExp(c):
if len(c) == 1:
return str(ord(c))
return str(ord('e'))
def fixOrdStrPercent(c):
if len(c) == 1:
return str(ord(c))
return str(ord('%'))
def fixOrdStrList(c):
if len(c) == 1:
return str(ord(c))
return str(ord(';'))
def generateLocaleInfo(path):
(dir_name, file_name) = os.path.split(path)
if not path.endswith(".xml"):
return {}
# skip legacy/compatibility ones
alias = findAlias(path)
if alias:
raise xpathlite.Error("alias to \"%s\"" % alias)
language_code = findEntryInFile(path, "identity/language", attribute="type")[0]
if language_code == 'root':
# just skip it
return {}
country_code = findEntryInFile(path, "identity/territory", attribute="type")[0]
script_code = findEntryInFile(path, "identity/script", attribute="type")[0]
variant_code = findEntryInFile(path, "identity/variant", attribute="type")[0]
# we do not support variants
# ### actually there is only one locale with variant: en_US_POSIX
# does anybody care about it at all?
if variant_code:
raise xpathlite.Error("we do not support variants (\"%s\")" % variant_code)
language_id = enumdata.languageCodeToId(language_code)
if language_id <= 0:
raise xpathlite.Error("unknown language code \"%s\"" % language_code)
language = enumdata.language_list[language_id][0]
script_id = enumdata.scriptCodeToId(script_code)
if script_id == -1:
raise xpathlite.Error("unknown script code \"%s\"" % script_code)
script = enumdata.script_list[script_id][0]
# we should handle fully qualified names with the territory
if not country_code:
return {}
country_id = enumdata.countryCodeToId(country_code)
if country_id <= 0:
raise xpathlite.Error("unknown country code \"%s\"" % country_code)
country = enumdata.country_list[country_id][0]
# So we say we accept only those values that have "contributed" or
# "approved" resolution. see http://www.unicode.org/cldr/process.html
# But we only respect the resolution for new datas for backward
# compatibility.
draft = DraftResolution.contributed
result = {}
result['language'] = language
result['script'] = script
result['country'] = country
result['language_code'] = language_code
result['country_code'] = country_code
result['script_code'] = script_code
result['variant_code'] = variant_code
result['language_id'] = language_id
result['script_id'] = script_id
result['country_id'] = country_id
supplementalPath = dir_name + "/../supplemental/supplementalData.xml"
currencies = findTagsInFile(supplementalPath, "currencyData/region[iso3166=%s]"%country_code);
result['currencyIsoCode'] = ''
result['currencyDigits'] = 2
result['currencyRounding'] = 1
if currencies:
for e in currencies:
if e[0] == 'currency':
tender = True
t = filter(lambda x: x[0] == 'tender', e[1])
if t and t[0][1] == 'false':
tender = False;
if tender and not filter(lambda x: x[0] == 'to', e[1]):
result['currencyIsoCode'] = filter(lambda x: x[0] == 'iso4217', e[1])[0][1]
break
if result['currencyIsoCode']:
t = findTagsInFile(supplementalPath, "currencyData/fractions/info[iso4217=%s]"%result['currencyIsoCode']);
if t and t[0][0] == 'info':
result['currencyDigits'] = int(filter(lambda x: x[0] == 'digits', t[0][1])[0][1])
result['currencyRounding'] = int(filter(lambda x: x[0] == 'rounding', t[0][1])[0][1])
numbering_system = None
try:
numbering_system = findEntry(path, "numbers/defaultNumberingSystem")
except:
pass
def findEntryDef(path, xpath, value=''):
try:
return findEntry(path, xpath)
except xpathlite.Error:
return value
def get_number_in_system(path, xpath, numbering_system):
if numbering_system:
try:
return findEntry(path, xpath + "[numberSystem=" + numbering_system + "]")
except xpathlite.Error:
# in CLDR 1.9 number system was refactored for numbers (but not for currency)
# so if previous findEntry doesn't work we should try this:
try:
return findEntry(path, xpath.replace("/symbols/", "/symbols[numberSystem=" + numbering_system + "]/"))
except xpathlite.Error:
# fallback to default
pass
return findEntry(path, xpath)
result['decimal'] = get_number_in_system(path, "numbers/symbols/decimal", numbering_system)
result['group'] = get_number_in_system(path, "numbers/symbols/group", numbering_system)
result['list'] = get_number_in_system(path, "numbers/symbols/list", numbering_system)
result['percent'] = get_number_in_system(path, "numbers/symbols/percentSign", numbering_system)
try:
numbering_systems = {}
for ns in findTagsInFile(cldr_dir + "/../supplemental/numberingSystems.xml", "numberingSystems"):
tmp = {}
id = ""
for data in ns[1:][0]: # ns looks like this: [u'numberingSystem', [(u'digits', u'0123456789'), (u'type', u'numeric'), (u'id', u'latn')]]
tmp[data[0]] = data[1]
if data[0] == u"id":
id = data[1]
numbering_systems[id] = tmp
result['zero'] = numbering_systems[numbering_system][u"digits"][0]
except e:
sys.stderr.write("Native zero detection problem:\n" + str(e) + "\n")
result['zero'] = get_number_in_system(path, "numbers/symbols/nativeZeroDigit", numbering_system)
result['minus'] = get_number_in_system(path, "numbers/symbols/minusSign", numbering_system)
result['plus'] = get_number_in_system(path, "numbers/symbols/plusSign", numbering_system)
result['exp'] = get_number_in_system(path, "numbers/symbols/exponential", numbering_system).lower()
result['quotationStart'] = findEntry(path, "delimiters/quotationStart")
result['quotationEnd'] = findEntry(path, "delimiters/quotationEnd")
result['alternateQuotationStart'] = findEntry(path, "delimiters/alternateQuotationStart")
result['alternateQuotationEnd'] = findEntry(path, "delimiters/alternateQuotationEnd")
result['listPatternPartStart'] = parse_list_pattern_part_format(findEntry(path, "listPatterns/listPattern/listPatternPart[start]"))
result['listPatternPartMiddle'] = parse_list_pattern_part_format(findEntry(path, "listPatterns/listPattern/listPatternPart[middle]"))
result['listPatternPartEnd'] = parse_list_pattern_part_format(findEntry(path, "listPatterns/listPattern/listPatternPart[end]"))
result['listPatternPartTwo'] = parse_list_pattern_part_format(findEntry(path, "listPatterns/listPattern/listPatternPart[2]"))
result['am'] = findEntry(path, "dates/calendars/calendar[gregorian]/dayPeriods/dayPeriodContext[format]/dayPeriodWidth[wide]/dayPeriod[am]", draft)
result['pm'] = findEntry(path, "dates/calendars/calendar[gregorian]/dayPeriods/dayPeriodContext[format]/dayPeriodWidth[wide]/dayPeriod[pm]", draft)
result['longDateFormat'] = convert_date(findEntry(path, "dates/calendars/calendar[gregorian]/dateFormats/dateFormatLength[full]/dateFormat/pattern"))
result['shortDateFormat'] = convert_date(findEntry(path, "dates/calendars/calendar[gregorian]/dateFormats/dateFormatLength[short]/dateFormat/pattern"))
result['longTimeFormat'] = convert_date(findEntry(path, "dates/calendars/calendar[gregorian]/timeFormats/timeFormatLength[full]/timeFormat/pattern"))
result['shortTimeFormat'] = convert_date(findEntry(path, "dates/calendars/calendar[gregorian]/timeFormats/timeFormatLength[short]/timeFormat/pattern"))
endonym = None
if country_code and script_code:
endonym = findEntryDef(path, "localeDisplayNames/languages/language[type=%s_%s_%s]" % (language_code, script_code, country_code))
if not endonym and script_code:
endonym = findEntryDef(path, "localeDisplayNames/languages/language[type=%s_%s]" % (language_code, script_code))
if not endonym and country_code:
endonym = findEntryDef(path, "localeDisplayNames/languages/language[type=%s_%s]" % (language_code, country_code))
if not endonym:
endonym = findEntryDef(path, "localeDisplayNames/languages/language[type=%s]" % (language_code))
result['language_endonym'] = endonym
result['country_endonym'] = findEntryDef(path, "localeDisplayNames/territories/territory[type=%s]" % (country_code))
currency_format = get_number_in_system(path, "numbers/currencyFormats/currencyFormatLength/currencyFormat/pattern", numbering_system)
currency_format = parse_number_format(currency_format, result)
result['currencyFormat'] = currency_format[0]
result['currencyNegativeFormat'] = ''
if len(currency_format) > 1:
result['currencyNegativeFormat'] = currency_format[1]
result['currencySymbol'] = ''
result['currencyDisplayName'] = ''
if result['currencyIsoCode']:
result['currencySymbol'] = findEntryDef(path, "numbers/currencies/currency[%s]/symbol" % result['currencyIsoCode'])
display_name_path = "numbers/currencies/currency[%s]/displayName" % result['currencyIsoCode']
result['currencyDisplayName'] \
= findEntryDef(path, display_name_path) + ";" \
+ findEntryDef(path, display_name_path + "[count=zero]") + ";" \
+ findEntryDef(path, display_name_path + "[count=one]") + ";" \
+ findEntryDef(path, display_name_path + "[count=two]") + ";" \
+ findEntryDef(path, display_name_path + "[count=few]") + ";" \
+ findEntryDef(path, display_name_path + "[count=many]") + ";" \
+ findEntryDef(path, display_name_path + "[count=other]") + ";"
standalone_long_month_path = "dates/calendars/calendar[gregorian]/months/monthContext[stand-alone]/monthWidth[wide]/month"
result['standaloneLongMonths'] \
= findEntry(path, standalone_long_month_path + "[1]") + ";" \
+ findEntry(path, standalone_long_month_path + "[2]") + ";" \
+ findEntry(path, standalone_long_month_path + "[3]") + ";" \
+ findEntry(path, standalone_long_month_path + "[4]") + ";" \
+ findEntry(path, standalone_long_month_path + "[5]") + ";" \
+ findEntry(path, standalone_long_month_path + "[6]") + ";" \
+ findEntry(path, standalone_long_month_path + "[7]") + ";" \
+ findEntry(path, standalone_long_month_path + "[8]") + ";" \
+ findEntry(path, standalone_long_month_path + "[9]") + ";" \
+ findEntry(path, standalone_long_month_path + "[10]") + ";" \
+ findEntry(path, standalone_long_month_path + "[11]") + ";" \
+ findEntry(path, standalone_long_month_path + "[12]") + ";"
standalone_short_month_path = "dates/calendars/calendar[gregorian]/months/monthContext[stand-alone]/monthWidth[abbreviated]/month"
result['standaloneShortMonths'] \
= findEntry(path, standalone_short_month_path + "[1]") + ";" \
+ findEntry(path, standalone_short_month_path + "[2]") + ";" \
+ findEntry(path, standalone_short_month_path + "[3]") + ";" \
+ findEntry(path, standalone_short_month_path + "[4]") + ";" \
+ findEntry(path, standalone_short_month_path + "[5]") + ";" \
+ findEntry(path, standalone_short_month_path + "[6]") + ";" \
+ findEntry(path, standalone_short_month_path + "[7]") + ";" \
+ findEntry(path, standalone_short_month_path + "[8]") + ";" \
+ findEntry(path, standalone_short_month_path + "[9]") + ";" \
+ findEntry(path, standalone_short_month_path + "[10]") + ";" \
+ findEntry(path, standalone_short_month_path + "[11]") + ";" \
+ findEntry(path, standalone_short_month_path + "[12]") + ";"
standalone_narrow_month_path = "dates/calendars/calendar[gregorian]/months/monthContext[stand-alone]/monthWidth[narrow]/month"
result['standaloneNarrowMonths'] \
= findEntry(path, standalone_narrow_month_path + "[1]") + ";" \
+ findEntry(path, standalone_narrow_month_path + "[2]") + ";" \
+ findEntry(path, standalone_narrow_month_path + "[3]") + ";" \
+ findEntry(path, standalone_narrow_month_path + "[4]") + ";" \
+ findEntry(path, standalone_narrow_month_path + "[5]") + ";" \
+ findEntry(path, standalone_narrow_month_path + "[6]") + ";" \
+ findEntry(path, standalone_narrow_month_path + "[7]") + ";" \
+ findEntry(path, standalone_narrow_month_path + "[8]") + ";" \
+ findEntry(path, standalone_narrow_month_path + "[9]") + ";" \
+ findEntry(path, standalone_narrow_month_path + "[10]") + ";" \
+ findEntry(path, standalone_narrow_month_path + "[11]") + ";" \
+ findEntry(path, standalone_narrow_month_path + "[12]") + ";"
long_month_path = "dates/calendars/calendar[gregorian]/months/monthContext[format]/monthWidth[wide]/month"
result['longMonths'] \
= findEntry(path, long_month_path + "[1]") + ";" \
+ findEntry(path, long_month_path + "[2]") + ";" \
+ findEntry(path, long_month_path + "[3]") + ";" \
+ findEntry(path, long_month_path + "[4]") + ";" \
+ findEntry(path, long_month_path + "[5]") + ";" \
+ findEntry(path, long_month_path + "[6]") + ";" \
+ findEntry(path, long_month_path + "[7]") + ";" \
+ findEntry(path, long_month_path + "[8]") + ";" \
+ findEntry(path, long_month_path + "[9]") + ";" \
+ findEntry(path, long_month_path + "[10]") + ";" \
+ findEntry(path, long_month_path + "[11]") + ";" \
+ findEntry(path, long_month_path + "[12]") + ";"
short_month_path = "dates/calendars/calendar[gregorian]/months/monthContext[format]/monthWidth[abbreviated]/month"
result['shortMonths'] \
= findEntry(path, short_month_path + "[1]") + ";" \
+ findEntry(path, short_month_path + "[2]") + ";" \
+ findEntry(path, short_month_path + "[3]") + ";" \
+ findEntry(path, short_month_path + "[4]") + ";" \
+ findEntry(path, short_month_path + "[5]") + ";" \
+ findEntry(path, short_month_path + "[6]") + ";" \
+ findEntry(path, short_month_path + "[7]") + ";" \
+ findEntry(path, short_month_path + "[8]") + ";" \
+ findEntry(path, short_month_path + "[9]") + ";" \
+ findEntry(path, short_month_path + "[10]") + ";" \
+ findEntry(path, short_month_path + "[11]") + ";" \
+ findEntry(path, short_month_path + "[12]") + ";"
narrow_month_path = "dates/calendars/calendar[gregorian]/months/monthContext[format]/monthWidth[narrow]/month"
result['narrowMonths'] \
= findEntry(path, narrow_month_path + "[1]") + ";" \
+ findEntry(path, narrow_month_path + "[2]") + ";" \
+ findEntry(path, narrow_month_path + "[3]") + ";" \
+ findEntry(path, narrow_month_path + "[4]") + ";" \
+ findEntry(path, narrow_month_path + "[5]") + ";" \
+ findEntry(path, narrow_month_path + "[6]") + ";" \
+ findEntry(path, narrow_month_path + "[7]") + ";" \
+ findEntry(path, narrow_month_path + "[8]") + ";" \
+ findEntry(path, narrow_month_path + "[9]") + ";" \
+ findEntry(path, narrow_month_path + "[10]") + ";" \
+ findEntry(path, narrow_month_path + "[11]") + ";" \
+ findEntry(path, narrow_month_path + "[12]") + ";"
long_day_path = "dates/calendars/calendar[gregorian]/days/dayContext[format]/dayWidth[wide]/day"
result['longDays'] \
= findEntry(path, long_day_path + "[sun]") + ";" \
+ findEntry(path, long_day_path + "[mon]") + ";" \
+ findEntry(path, long_day_path + "[tue]") + ";" \
+ findEntry(path, long_day_path + "[wed]") + ";" \
+ findEntry(path, long_day_path + "[thu]") + ";" \
+ findEntry(path, long_day_path + "[fri]") + ";" \
+ findEntry(path, long_day_path + "[sat]") + ";"
short_day_path = "dates/calendars/calendar[gregorian]/days/dayContext[format]/dayWidth[abbreviated]/day"
result['shortDays'] \
= findEntry(path, short_day_path + "[sun]") + ";" \
+ findEntry(path, short_day_path + "[mon]") + ";" \
+ findEntry(path, short_day_path + "[tue]") + ";" \
+ findEntry(path, short_day_path + "[wed]") + ";" \
+ findEntry(path, short_day_path + "[thu]") + ";" \
+ findEntry(path, short_day_path + "[fri]") + ";" \
+ findEntry(path, short_day_path + "[sat]") + ";"
narrow_day_path = "dates/calendars/calendar[gregorian]/days/dayContext[format]/dayWidth[narrow]/day"
result['narrowDays'] \
= findEntry(path, narrow_day_path + "[sun]") + ";" \
+ findEntry(path, narrow_day_path + "[mon]") + ";" \
+ findEntry(path, narrow_day_path + "[tue]") + ";" \
+ findEntry(path, narrow_day_path + "[wed]") + ";" \
+ findEntry(path, narrow_day_path + "[thu]") + ";" \
+ findEntry(path, narrow_day_path + "[fri]") + ";" \
+ findEntry(path, narrow_day_path + "[sat]") + ";"
standalone_long_day_path = "dates/calendars/calendar[gregorian]/days/dayContext[stand-alone]/dayWidth[wide]/day"
result['standaloneLongDays'] \
= findEntry(path, standalone_long_day_path + "[sun]") + ";" \
+ findEntry(path, standalone_long_day_path + "[mon]") + ";" \
+ findEntry(path, standalone_long_day_path + "[tue]") + ";" \
+ findEntry(path, standalone_long_day_path + "[wed]") + ";" \
+ findEntry(path, standalone_long_day_path + "[thu]") + ";" \
+ findEntry(path, standalone_long_day_path + "[fri]") + ";" \
+ findEntry(path, standalone_long_day_path + "[sat]") + ";"
standalone_short_day_path = "dates/calendars/calendar[gregorian]/days/dayContext[stand-alone]/dayWidth[abbreviated]/day"
result['standaloneShortDays'] \
= findEntry(path, standalone_short_day_path + "[sun]") + ";" \
+ findEntry(path, standalone_short_day_path + "[mon]") + ";" \
+ findEntry(path, standalone_short_day_path + "[tue]") + ";" \
+ findEntry(path, standalone_short_day_path + "[wed]") + ";" \
+ findEntry(path, standalone_short_day_path + "[thu]") + ";" \
+ findEntry(path, standalone_short_day_path + "[fri]") + ";" \
+ findEntry(path, standalone_short_day_path + "[sat]") + ";"
standalone_narrow_day_path = "dates/calendars/calendar[gregorian]/days/dayContext[stand-alone]/dayWidth[narrow]/day"
result['standaloneNarrowDays'] \
= findEntry(path, standalone_narrow_day_path + "[sun]") + ";" \
+ findEntry(path, standalone_narrow_day_path + "[mon]") + ";" \
+ findEntry(path, standalone_narrow_day_path + "[tue]") + ";" \
+ findEntry(path, standalone_narrow_day_path + "[wed]") + ";" \
+ findEntry(path, standalone_narrow_day_path + "[thu]") + ";" \
+ findEntry(path, standalone_narrow_day_path + "[fri]") + ";" \
+ findEntry(path, standalone_narrow_day_path + "[sat]") + ";"
return result
def addEscapes(s):
result = ''
for c in s:
n = ord(c)
if n < 128:
result += c
else:
result += "\\x"
result += "%02x" % (n)
return result
def unicodeStr(s):
utf8 = s.encode('utf-8')
return "<size>" + str(len(utf8)) + "</size><data>" + addEscapes(utf8) + "</data>"
def usage():
print "Usage: cldr2qlocalexml.py <path-to-cldr-main>"
sys.exit()
def integrateWeekData(filePath):
if not filePath.endswith(".xml"):
return {}
monFirstDayIn = findEntryInFile(filePath, "weekData/firstDay[day=mon]", attribute="territories")[0].split(" ")
tueFirstDayIn = findEntryInFile(filePath, "weekData/firstDay[day=tue]", attribute="territories")[0].split(" ")
wedFirstDayIn = findEntryInFile(filePath, "weekData/firstDay[day=wed]", attribute="territories")[0].split(" ")
thuFirstDayIn = findEntryInFile(filePath, "weekData/firstDay[day=thu]", attribute="territories")[0].split(" ")
friFirstDayIn = findEntryInFile(filePath, "weekData/firstDay[day=fri]", attribute="territories")[0].split(" ")
satFirstDayIn = findEntryInFile(filePath, "weekData/firstDay[day=sat]", attribute="territories")[0].split(" ")
sunFirstDayIn = findEntryInFile(filePath, "weekData/firstDay[day=sun]", attribute="territories")[0].split(" ")
monWeekendStart = findEntryInFile(filePath, "weekData/weekendStart[day=mon]", attribute="territories")[0].split(" ")
tueWeekendStart = findEntryInFile(filePath, "weekData/weekendStart[day=tue]", attribute="territories")[0].split(" ")
wedWeekendStart = findEntryInFile(filePath, "weekData/weekendStart[day=wed]", attribute="territories")[0].split(" ")
thuWeekendStart = findEntryInFile(filePath, "weekData/weekendStart[day=thu]", attribute="territories")[0].split(" ")
friWeekendStart = findEntryInFile(filePath, "weekData/weekendStart[day=fri]", attribute="territories")[0].split(" ")
satWeekendStart = findEntryInFile(filePath, "weekData/weekendStart[day=sat]", attribute="territories")[0].split(" ")
sunWeekendStart = findEntryInFile(filePath, "weekData/weekendStart[day=sun]", attribute="territories")[0].split(" ")
monWeekendEnd = findEntryInFile(filePath, "weekData/weekendEnd[day=mon]", attribute="territories")[0].split(" ")
tueWeekendEnd = findEntryInFile(filePath, "weekData/weekendEnd[day=tue]", attribute="territories")[0].split(" ")
wedWeekendEnd = findEntryInFile(filePath, "weekData/weekendEnd[day=wed]", attribute="territories")[0].split(" ")
thuWeekendEnd = findEntryInFile(filePath, "weekData/weekendEnd[day=thu]", attribute="territories")[0].split(" ")
friWeekendEnd = findEntryInFile(filePath, "weekData/weekendEnd[day=fri]", attribute="territories")[0].split(" ")
satWeekendEnd = findEntryInFile(filePath, "weekData/weekendEnd[day=sat]", attribute="territories")[0].split(" ")
sunWeekendEnd = findEntryInFile(filePath, "weekData/weekendEnd[day=sun]", attribute="territories")[0].split(" ")
firstDayByCountryCode = {}
for countryCode in monFirstDayIn:
firstDayByCountryCode[countryCode] = "mon"
for countryCode in tueFirstDayIn:
firstDayByCountryCode[countryCode] = "tue"
for countryCode in wedFirstDayIn:
firstDayByCountryCode[countryCode] = "wed"
for countryCode in thuFirstDayIn:
firstDayByCountryCode[countryCode] = "thu"
for countryCode in friFirstDayIn:
firstDayByCountryCode[countryCode] = "fri"
for countryCode in satFirstDayIn:
firstDayByCountryCode[countryCode] = "sat"
for countryCode in sunFirstDayIn:
firstDayByCountryCode[countryCode] = "sun"
weekendStartByCountryCode = {}
for countryCode in monWeekendStart:
weekendStartByCountryCode[countryCode] = "mon"
for countryCode in tueWeekendStart:
weekendStartByCountryCode[countryCode] = "tue"
for countryCode in wedWeekendStart:
weekendStartByCountryCode[countryCode] = "wed"
for countryCode in thuWeekendStart:
weekendStartByCountryCode[countryCode] = "thu"
for countryCode in friWeekendStart:
weekendStartByCountryCode[countryCode] = "fri"
for countryCode in satWeekendStart:
weekendStartByCountryCode[countryCode] = "sat"
for countryCode in sunWeekendStart:
weekendStartByCountryCode[countryCode] = "sun"
weekendEndByCountryCode = {}
for countryCode in monWeekendEnd:
weekendEndByCountryCode[countryCode] = "mon"
for countryCode in tueWeekendEnd:
weekendEndByCountryCode[countryCode] = "tue"
for countryCode in wedWeekendEnd:
weekendEndByCountryCode[countryCode] = "wed"
for countryCode in thuWeekendEnd:
weekendEndByCountryCode[countryCode] = "thu"
for countryCode in friWeekendEnd:
weekendEndByCountryCode[countryCode] = "fri"
for countryCode in satWeekendEnd:
weekendEndByCountryCode[countryCode] = "sat"
for countryCode in sunWeekendEnd:
weekendEndByCountryCode[countryCode] = "sun"
for (key,locale) in locale_database.iteritems():
countryCode = locale['country_code']
if countryCode in firstDayByCountryCode:
locale_database[key]['firstDayOfWeek'] = firstDayByCountryCode[countryCode]
else:
locale_database[key]['firstDayOfWeek'] = firstDayByCountryCode["001"]
if countryCode in weekendStartByCountryCode:
locale_database[key]['weekendStart'] = weekendStartByCountryCode[countryCode]
else:
locale_database[key]['weekendStart'] = weekendStartByCountryCode["001"]
if countryCode in weekendEndByCountryCode:
locale_database[key]['weekendEnd'] = weekendEndByCountryCode[countryCode]
else:
locale_database[key]['weekendEnd'] = weekendEndByCountryCode["001"]
if len(sys.argv) != 2:
usage()
cldr_dir = sys.argv[1]
if not os.path.isdir(cldr_dir):
usage()
cldr_files = os.listdir(cldr_dir)
locale_database = {}
for file in cldr_files:
try:
l = generateLocaleInfo(cldr_dir + "/" + file)
if not l:
sys.stderr.write("skipping file \"" + file + "\"\n")
continue
except xpathlite.Error as e:
sys.stderr.write("skipping file \"%s\" (%s)\n" % (file, str(e)))
continue
locale_database[(l['language_id'], l['script_id'], l['country_id'], l['variant_code'])] = l
integrateWeekData(cldr_dir+"/../supplemental/supplementalData.xml")
locale_keys = locale_database.keys()
locale_keys.sort()
cldr_version = 'unknown'
ldml = open(cldr_dir+"/../dtd/ldml.dtd", "r")
for line in ldml:
if 'version cldrVersion CDATA #FIXED' in line:
cldr_version = line.split('"')[1]
print "<localeDatabase>"
print " <version>" + cldr_version + "</version>"
print " <languageList>"
for id in enumdata.language_list:
l = enumdata.language_list[id]
print " <language>"
print " <name>" + l[0] + "</name>"
print " <id>" + str(id) + "</id>"
print " <code>" + l[1] + "</code>"
print " </language>"
print " </languageList>"
print " <scriptList>"
for id in enumdata.script_list:
l = enumdata.script_list[id]
print " <script>"
print " <name>" + l[0] + "</name>"
print " <id>" + str(id) + "</id>"
print " <code>" + l[1] + "</code>"
print " </script>"
print " </scriptList>"
print " <countryList>"
for id in enumdata.country_list:
l = enumdata.country_list[id]
print " <country>"
print " <name>" + l[0] + "</name>"
print " <id>" + str(id) + "</id>"
print " <code>" + l[1] + "</code>"
print " </country>"
print " </countryList>"
def _parseLocale(l):
language = "AnyLanguage"
script = "AnyScript"
country = "AnyCountry"
if l == "und":
raise xpathlite.Error("we are treating unknown locale like C")
items = l.split("_")
language_code = items[0]
if language_code != "und":
language_id = enumdata.languageCodeToId(language_code)
if language_id == -1:
raise xpathlite.Error("unknown language code \"%s\"" % language_code)
language = enumdata.language_list[language_id][0]
if len(items) > 1:
script_code = items[1]
country_code = ""
if len(items) > 2:
country_code = items[2]
if len(script_code) == 4:
script_id = enumdata.scriptCodeToId(script_code)
if script_id == -1:
raise xpathlite.Error("unknown script code \"%s\"" % script_code)
script = enumdata.script_list[script_id][0]
else:
country_code = script_code
if country_code:
country_id = enumdata.countryCodeToId(country_code)
if country_id == -1:
raise xpathlite.Error("unknown country code \"%s\"" % country_code)
country = enumdata.country_list[country_id][0]
return (language, script, country)
print " <likelySubtags>"
for ns in findTagsInFile(cldr_dir + "/../supplemental/likelySubtags.xml", "likelySubtags"):
tmp = {}
for data in ns[1:][0]: # ns looks like this: [u'likelySubtag', [(u'from', u'aa'), (u'to', u'aa_Latn_ET')]]
tmp[data[0]] = data[1]
try:
(from_language, from_script, from_country) = _parseLocale(tmp[u"from"])
except xpathlite.Error as e:
sys.stderr.write("skipping likelySubtag \"%s\" -> \"%s\" (%s)\n" % (tmp[u"from"], tmp[u"to"], str(e)))
continue
try:
(to_language, to_script, to_country) = _parseLocale(tmp[u"to"])
except xpathlite.Error as e:
sys.stderr.write("skipping likelySubtag \"%s\" -> \"%s\" (%s)\n" % (tmp[u"from"], tmp[u"to"], str(e)))
continue
# substitute according to http://www.unicode.org/reports/tr35/#Likely_Subtags
if to_country == "AnyCountry" and from_country != to_country:
to_country = from_country
if to_script == "AnyScript" and from_script != to_script:
to_script = from_script
print " <likelySubtag>"
print " <from>"
print " <language>" + from_language + "</language>"
print " <script>" + from_script + "</script>"
print " <country>" + from_country + "</country>"
print " </from>"
print " <to>"
print " <language>" + to_language + "</language>"
print " <script>" + to_script + "</script>"
print " <country>" + to_country + "</country>"
print " </to>"
print " </likelySubtag>"
print " </likelySubtags>"
print " <localeList>"
print \
" <locale>\n\
<language>C</language>\n\
<languageEndonym></languageEndonym>\n\
<script>AnyScript</script>\n\
<country>AnyCountry</country>\n\
<countryEndonym></countryEndonym>\n\
<decimal>46</decimal>\n\
<group>44</group>\n\
<list>59</list>\n\
<percent>37</percent>\n\
<zero>48</zero>\n\
<minus>45</minus>\n\
<plus>43</plus>\n\
<exp>101</exp>\n\
<quotationStart>\"</quotationStart>\n\
<quotationEnd>\"</quotationEnd>\n\
<alternateQuotationStart>\'</alternateQuotationStart>\n\
<alternateQuotationEnd>\'</alternateQuotationEnd>\n\
<listPatternPartStart>%1, %2</listPatternPartStart>\n\
<listPatternPartMiddle>%1, %2</listPatternPartMiddle>\n\
<listPatternPartEnd>%1, %2</listPatternPartEnd>\n\
<listPatternPartTwo>%1, %2</listPatternPartTwo>\n\
<am>AM</am>\n\
<pm>PM</pm>\n\
<firstDayOfWeek>mon</firstDayOfWeek>\n\
<weekendStart>sat</weekendStart>\n\
<weekendEnd>sun</weekendEnd>\n\
<longDateFormat>EEEE, d MMMM yyyy</longDateFormat>\n\
<shortDateFormat>d MMM yyyy</shortDateFormat>\n\
<longTimeFormat>HH:mm:ss z</longTimeFormat>\n\
<shortTimeFormat>HH:mm:ss</shortTimeFormat>\n\
<standaloneLongMonths>January;February;March;April;May;June;July;August;September;October;November;December;</standaloneLongMonths>\n\
<standaloneShortMonths>Jan;Feb;Mar;Apr;May;Jun;Jul;Aug;Sep;Oct;Nov;Dec;</standaloneShortMonths>\n\
<standaloneNarrowMonths>J;F;M;A;M;J;J;A;S;O;N;D;</standaloneNarrowMonths>\n\
<longMonths>January;February;March;April;May;June;July;August;September;October;November;December;</longMonths>\n\
<shortMonths>Jan;Feb;Mar;Apr;May;Jun;Jul;Aug;Sep;Oct;Nov;Dec;</shortMonths>\n\
<narrowMonths>1;2;3;4;5;6;7;8;9;10;11;12;</narrowMonths>\n\
<longDays>Sunday;Monday;Tuesday;Wednesday;Thursday;Friday;Saturday;</longDays>\n\
<shortDays>Sun;Mon;Tue;Wed;Thu;Fri;Sat;</shortDays>\n\
<narrowDays>7;1;2;3;4;5;6;</narrowDays>\n\
<standaloneLongDays>Sunday;Monday;Tuesday;Wednesday;Thursday;Friday;Saturday;</standaloneLongDays>\n\
<standaloneShortDays>Sun;Mon;Tue;Wed;Thu;Fri;Sat;</standaloneShortDays>\n\
<standaloneNarrowDays>S;M;T;W;T;F;S;</standaloneNarrowDays>\n\
<currencyIsoCode></currencyIsoCode>\n\
<currencySymbol></currencySymbol>\n\
<currencyDisplayName>;;;;;;;</currencyDisplayName>\n\
<currencyDigits>2</currencyDigits>\n\
<currencyRounding>1</currencyRounding>\n\
<currencyFormat>%1%2</currencyFormat>\n\
<currencyNegativeFormat></currencyNegativeFormat>\n\
</locale>"
for key in locale_keys:
l = locale_database[key]
print " <locale>"
print " <language>" + l['language'] + "</language>"
print " <languageEndonym>" + escape(l['language_endonym']).encode('utf-8') + "</languageEndonym>"
print " <script>" + l['script'] + "</script>"
print " <country>" + l['country'] + "</country>"
print " <countryEndonym>" + escape(l['country_endonym']).encode('utf-8') + "</countryEndonym>"
print " <languagecode>" + l['language_code'] + "</languagecode>"
print " <scriptcode>" + l['script_code'] + "</scriptcode>"
print " <countrycode>" + l['country_code'] + "</countrycode>"
print " <decimal>" + ordStr(l['decimal']) + "</decimal>"
print " <group>" + ordStr(l['group']) + "</group>"
print " <list>" + fixOrdStrList(l['list']) + "</list>"
print " <percent>" + fixOrdStrPercent(l['percent']) + "</percent>"
print " <zero>" + ordStr(l['zero']) + "</zero>"
print " <minus>" + fixOrdStrMinus(l['minus']) + "</minus>"
print " <plus>" + fixOrdStrPlus(l['plus']) + "</plus>"
print " <exp>" + fixOrdStrExp(l['exp']) + "</exp>"
print " <quotationStart>" + l['quotationStart'].encode('utf-8') + "</quotationStart>"
print " <quotationEnd>" + l['quotationEnd'].encode('utf-8') + "</quotationEnd>"
print " <alternateQuotationStart>" + l['alternateQuotationStart'].encode('utf-8') + "</alternateQuotationStart>"
print " <alternateQuotationEnd>" + l['alternateQuotationEnd'].encode('utf-8') + "</alternateQuotationEnd>"
print " <listPatternPartStart>" + l['listPatternPartStart'].encode('utf-8') + "</listPatternPartStart>"
print " <listPatternPartMiddle>" + l['listPatternPartMiddle'].encode('utf-8') + "</listPatternPartMiddle>"
print " <listPatternPartEnd>" + l['listPatternPartEnd'].encode('utf-8') + "</listPatternPartEnd>"
print " <listPatternPartTwo>" + l['listPatternPartTwo'].encode('utf-8') + "</listPatternPartTwo>"
print " <am>" + l['am'].encode('utf-8') + "</am>"
print " <pm>" + l['pm'].encode('utf-8') + "</pm>"
print " <firstDayOfWeek>" + l['firstDayOfWeek'].encode('utf-8') + "</firstDayOfWeek>"
print " <weekendStart>" + l['weekendStart'].encode('utf-8') + "</weekendStart>"
print " <weekendEnd>" + l['weekendEnd'].encode('utf-8') + "</weekendEnd>"
print " <longDateFormat>" + l['longDateFormat'].encode('utf-8') + "</longDateFormat>"
print " <shortDateFormat>" + l['shortDateFormat'].encode('utf-8') + "</shortDateFormat>"
print " <longTimeFormat>" + l['longTimeFormat'].encode('utf-8') + "</longTimeFormat>"
print " <shortTimeFormat>" + l['shortTimeFormat'].encode('utf-8') + "</shortTimeFormat>"
print " <standaloneLongMonths>" + l['standaloneLongMonths'].encode('utf-8') + "</standaloneLongMonths>"
print " <standaloneShortMonths>"+ l['standaloneShortMonths'].encode('utf-8') + "</standaloneShortMonths>"
print " <standaloneNarrowMonths>"+ l['standaloneNarrowMonths'].encode('utf-8') + "</standaloneNarrowMonths>"
print " <longMonths>" + l['longMonths'].encode('utf-8') + "</longMonths>"
print " <shortMonths>" + l['shortMonths'].encode('utf-8') + "</shortMonths>"
print " <narrowMonths>" + l['narrowMonths'].encode('utf-8') + "</narrowMonths>"
print " <longDays>" + l['longDays'].encode('utf-8') + "</longDays>"
print " <shortDays>" + l['shortDays'].encode('utf-8') + "</shortDays>"
print " <narrowDays>" + l['narrowDays'].encode('utf-8') + "</narrowDays>"
print " <standaloneLongDays>" + l['standaloneLongDays'].encode('utf-8') + "</standaloneLongDays>"
print " <standaloneShortDays>" + l['standaloneShortDays'].encode('utf-8') + "</standaloneShortDays>"
print " <standaloneNarrowDays>" + l['standaloneNarrowDays'].encode('utf-8') + "</standaloneNarrowDays>"
print " <currencyIsoCode>" + l['currencyIsoCode'].encode('utf-8') + "</currencyIsoCode>"
print " <currencySymbol>" + l['currencySymbol'].encode('utf-8') + "</currencySymbol>"
print " <currencyDisplayName>" + l['currencyDisplayName'].encode('utf-8') + "</currencyDisplayName>"
print " <currencyDigits>" + str(l['currencyDigits']) + "</currencyDigits>"
print " <currencyRounding>" + str(l['currencyRounding']) + "</currencyRounding>"
print " <currencyFormat>" + l['currencyFormat'].encode('utf-8') + "</currencyFormat>"
print " <currencyNegativeFormat>" + l['currencyNegativeFormat'].encode('utf-8') + "</currencyNegativeFormat>"
print " </locale>"
print " </localeList>"
print "</localeDatabase>"
| bsd-3-clause | -470,703,937,720,312,640 | 51.639951 | 155 | 0.607341 | false |
cpanelli/-git-clone-https-chromium.googlesource.com-chromium-tools-depot_tools | third_party/pylint/lint.py | 46 | 56500 | # Copyright (c) 2003-2014 LOGILAB S.A. (Paris, FRANCE).
# http://www.logilab.fr/ -- mailto:[email protected]
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
""" %prog [options] module_or_package
Check that a module satisfies a coding standard (and more !).
%prog --help
Display this help message and exit.
%prog --help-msg <msg-id>[,<msg-id>]
Display help messages about given message identifiers and exit.
"""
from __future__ import print_function
import collections
import contextlib
import itertools
import operator
import os
try:
import multiprocessing
except ImportError:
multiprocessing = None
import sys
import tokenize
import warnings
import astroid
from astroid.__pkginfo__ import version as astroid_version
from astroid import modutils
from logilab.common import configuration
from logilab.common import optik_ext
from logilab.common import interface
from logilab.common import textutils
from logilab.common import ureports
from logilab.common.__pkginfo__ import version as common_version
import six
from pylint import checkers
from pylint import interfaces
from pylint import reporters
from pylint import utils
from pylint import config
from pylint.__pkginfo__ import version
MANAGER = astroid.MANAGER
def _get_new_args(message):
location = (
message.abspath,
message.path,
message.module,
message.obj,
message.line,
message.column,
)
return (
message.msg_id,
message.symbol,
location,
message.msg,
message.confidence,
)
def _get_python_path(filepath):
dirname = os.path.realpath(os.path.expanduser(filepath))
if not os.path.isdir(dirname):
dirname = os.path.dirname(dirname)
while True:
if not os.path.exists(os.path.join(dirname, "__init__.py")):
return dirname
old_dirname = dirname
dirname = os.path.dirname(dirname)
if old_dirname == dirname:
return os.getcwd()
def _merge_stats(stats):
merged = {}
for stat in stats:
for key, item in six.iteritems(stat):
if key not in merged:
merged[key] = item
else:
if isinstance(item, dict):
merged[key].update(item)
else:
merged[key] = merged[key] + item
return merged
# Python Linter class #########################################################
MSGS = {
'F0001': ('%s',
'fatal',
'Used when an error occurred preventing the analysis of a \
module (unable to find it for instance).'),
'F0002': ('%s: %s',
'astroid-error',
'Used when an unexpected error occurred while building the '
'Astroid representation. This is usually accompanied by a '
'traceback. Please report such errors !'),
'F0003': ('ignored builtin module %s',
'ignored-builtin-module',
'Used to indicate that the user asked to analyze a builtin '
'module which has been skipped.'),
'F0010': ('error while code parsing: %s',
'parse-error',
'Used when an exception occured while building the Astroid '
'representation which could be handled by astroid.'),
'I0001': ('Unable to run raw checkers on built-in module %s',
'raw-checker-failed',
'Used to inform that a built-in module has not been checked '
'using the raw checkers.'),
'I0010': ('Unable to consider inline option %r',
'bad-inline-option',
'Used when an inline option is either badly formatted or can\'t '
'be used inside modules.'),
'I0011': ('Locally disabling %s (%s)',
'locally-disabled',
'Used when an inline option disables a message or a messages '
'category.'),
'I0012': ('Locally enabling %s (%s)',
'locally-enabled',
'Used when an inline option enables a message or a messages '
'category.'),
'I0013': ('Ignoring entire file',
'file-ignored',
'Used to inform that the file will not be checked'),
'I0020': ('Suppressed %s (from line %d)',
'suppressed-message',
'A message was triggered on a line, but suppressed explicitly '
'by a disable= comment in the file. This message is not '
'generated for messages that are ignored due to configuration '
'settings.'),
'I0021': ('Useless suppression of %s',
'useless-suppression',
'Reported when a message is explicitly disabled for a line or '
'a block of code, but never triggered.'),
'I0022': ('Pragma "%s" is deprecated, use "%s" instead',
'deprecated-pragma',
'Some inline pylint options have been renamed or reworked, '
'only the most recent form should be used. '
'NOTE:skip-all is only available with pylint >= 0.26',
{'old_names': [('I0014', 'deprecated-disable-all')]}),
'E0001': ('%s',
'syntax-error',
'Used when a syntax error is raised for a module.'),
'E0011': ('Unrecognized file option %r',
'unrecognized-inline-option',
'Used when an unknown inline option is encountered.'),
'E0012': ('Bad option value %r',
'bad-option-value',
'Used when a bad value for an inline option is encountered.'),
}
def _deprecated_option(shortname, opt_type):
def _warn_deprecated(option, optname, *args): # pylint: disable=unused-argument
sys.stderr.write('Warning: option %s is deprecated and ignored.\n' % (optname,))
return {'short': shortname, 'help': 'DEPRECATED', 'hide': True,
'type': opt_type, 'action': 'callback', 'callback': _warn_deprecated}
if multiprocessing is not None:
class ChildLinter(multiprocessing.Process): # pylint: disable=no-member
def run(self):
tasks_queue, results_queue, self._config = self._args # pylint: disable=no-member
self._config["jobs"] = 1 # Child does not parallelize any further.
# Run linter for received files/modules.
for file_or_module in iter(tasks_queue.get, 'STOP'):
result = self._run_linter(file_or_module[0])
try:
results_queue.put(result)
except Exception as ex:
print("internal error with sending report for module %s" % file_or_module, file=sys.stderr)
print(ex, file=sys.stderr)
results_queue.put({})
def _run_linter(self, file_or_module):
linter = PyLinter()
# Register standard checkers.
linter.load_default_plugins()
# Load command line plugins.
# TODO linter.load_plugin_modules(self._plugins)
linter.load_configuration(**self._config)
linter.set_reporter(reporters.CollectingReporter())
# Run the checks.
linter.check(file_or_module)
msgs = [_get_new_args(m) for m in linter.reporter.messages]
return (file_or_module, linter.file_state.base_name, linter.current_name,
msgs, linter.stats, linter.msg_status)
class PyLinter(configuration.OptionsManagerMixIn,
utils.MessagesHandlerMixIn,
utils.ReportsHandlerMixIn,
checkers.BaseTokenChecker):
"""lint Python modules using external checkers.
This is the main checker controlling the other ones and the reports
generation. It is itself both a raw checker and an astroid checker in order
to:
* handle message activation / deactivation at the module level
* handle some basic but necessary stats'data (number of classes, methods...)
IDE plugins developpers: you may have to call
`astroid.builder.MANAGER.astroid_cache.clear()` accross run if you want
to ensure the latest code version is actually checked.
"""
__implements__ = (interfaces.ITokenChecker, )
name = 'master'
priority = 0
level = 0
msgs = MSGS
@staticmethod
def make_options():
return (('ignore',
{'type' : 'csv', 'metavar' : '<file>[,<file>...]',
'dest' : 'black_list', 'default' : ('CVS',),
'help' : 'Add files or directories to the blacklist. '
'They should be base names, not paths.'}),
('persistent',
{'default': True, 'type' : 'yn', 'metavar' : '<y_or_n>',
'level': 1,
'help' : 'Pickle collected data for later comparisons.'}),
('load-plugins',
{'type' : 'csv', 'metavar' : '<modules>', 'default' : (),
'level': 1,
'help' : 'List of plugins (as comma separated values of '
'python modules names) to load, usually to register '
'additional checkers.'}),
('output-format',
{'default': 'text', 'type': 'string', 'metavar' : '<format>',
'short': 'f',
'group': 'Reports',
'help' : 'Set the output format. Available formats are text,'
' parseable, colorized, msvs (visual studio) and html. You '
'can also give a reporter class, eg mypackage.mymodule.'
'MyReporterClass.'}),
('files-output',
{'default': 0, 'type' : 'yn', 'metavar' : '<y_or_n>',
'group': 'Reports', 'level': 1,
'help' : 'Put messages in a separate file for each module / '
'package specified on the command line instead of printing '
'them on stdout. Reports (if any) will be written in a file '
'name "pylint_global.[txt|html]".'}),
('reports',
{'default': 1, 'type' : 'yn', 'metavar' : '<y_or_n>',
'short': 'r',
'group': 'Reports',
'help' : 'Tells whether to display a full report or only the '
'messages'}),
('evaluation',
{'type' : 'string', 'metavar' : '<python_expression>',
'group': 'Reports', 'level': 1,
'default': '10.0 - ((float(5 * error + warning + refactor + '
'convention) / statement) * 10)',
'help' : 'Python expression which should return a note less '
'than 10 (10 is the highest note). You have access '
'to the variables errors warning, statement which '
'respectively contain the number of errors / '
'warnings messages and the total number of '
'statements analyzed. This is used by the global '
'evaluation report (RP0004).'}),
('comment',
{'default': 0, 'type' : 'yn', 'metavar' : '<y_or_n>',
'group': 'Reports', 'level': 1,
'help' : 'Add a comment according to your evaluation note. '
'This is used by the global evaluation report (RP0004).'}),
('confidence',
{'type' : 'multiple_choice', 'metavar': '<levels>',
'default': '',
'choices': [c.name for c in interfaces.CONFIDENCE_LEVELS],
'group': 'Messages control',
'help' : 'Only show warnings with the listed confidence levels.'
' Leave empty to show all. Valid levels: %s' % (
', '.join(c.name for c in interfaces.CONFIDENCE_LEVELS),)}),
('enable',
{'type' : 'csv', 'metavar': '<msg ids>',
'short': 'e',
'group': 'Messages control',
'help' : 'Enable the message, report, category or checker with the '
'given id(s). You can either give multiple identifier '
'separated by comma (,) or put this option multiple time. '
'See also the "--disable" option for examples. '}),
('disable',
{'type' : 'csv', 'metavar': '<msg ids>',
'short': 'd',
'group': 'Messages control',
'help' : 'Disable the message, report, category or checker '
'with the given id(s). You can either give multiple identifiers'
' separated by comma (,) or put this option multiple times '
'(only on the command line, not in the configuration file '
'where it should appear only once).'
'You can also use "--disable=all" to disable everything first '
'and then reenable specific checks. For example, if you want '
'to run only the similarities checker, you can use '
'"--disable=all --enable=similarities". '
'If you want to run only the classes checker, but have no '
'Warning level messages displayed, use'
'"--disable=all --enable=classes --disable=W"'}),
('msg-template',
{'type' : 'string', 'metavar': '<template>',
'group': 'Reports',
'help' : ('Template used to display messages. '
'This is a python new-style format string '
'used to format the message information. '
'See doc for all details')
}),
('include-ids', _deprecated_option('i', 'yn')),
('symbols', _deprecated_option('s', 'yn')),
('jobs',
{'type' : 'int', 'metavar': '<n-processes>',
'short': 'j',
'default': 1,
'help' : '''Use multiple processes to speed up Pylint.''',
}),
('unsafe-load-any-extension',
{'type': 'yn', 'metavar': '<yn>', 'default': False, 'hide': True,
'help': ('Allow loading of arbitrary C extensions. Extensions'
' are imported into the active Python interpreter and'
' may run arbitrary code.')}),
('extension-pkg-whitelist',
{'type': 'csv', 'metavar': '<pkg[,pkg]>', 'default': [],
'help': ('A comma-separated list of package or module names'
' from where C extensions may be loaded. Extensions are'
' loading into the active Python interpreter and may run'
' arbitrary code')}
),
)
option_groups = (
('Messages control', 'Options controling analysis messages'),
('Reports', 'Options related to output formating and reporting'),
)
def __init__(self, options=(), reporter=None, option_groups=(),
pylintrc=None):
# some stuff has to be done before ancestors initialization...
#
# messages store / checkers / reporter / astroid manager
self.msgs_store = utils.MessagesStore()
self.reporter = None
self._reporter_name = None
self._reporters = {}
self._checkers = collections.defaultdict(list)
self._pragma_lineno = {}
self._ignore_file = False
# visit variables
self.file_state = utils.FileState()
self.current_name = None
self.current_file = None
self.stats = None
# init options
self._external_opts = options
self.options = options + PyLinter.make_options()
self.option_groups = option_groups + PyLinter.option_groups
self._options_methods = {
'enable': self.enable,
'disable': self.disable}
self._bw_options_methods = {'disable-msg': self.disable,
'enable-msg': self.enable}
full_version = '%%prog %s, \nastroid %s, common %s\nPython %s' % (
version, astroid_version, common_version, sys.version)
configuration.OptionsManagerMixIn.__init__(
self, usage=__doc__,
version=full_version,
config_file=pylintrc or config.PYLINTRC)
utils.MessagesHandlerMixIn.__init__(self)
utils.ReportsHandlerMixIn.__init__(self)
checkers.BaseTokenChecker.__init__(self)
# provided reports
self.reports = (('RP0001', 'Messages by category',
report_total_messages_stats),
('RP0002', '% errors / warnings by module',
report_messages_by_module_stats),
('RP0003', 'Messages',
report_messages_stats),
('RP0004', 'Global evaluation',
self.report_evaluation),
)
self.register_checker(self)
self._dynamic_plugins = set()
self.load_provider_defaults()
if reporter:
self.set_reporter(reporter)
def load_default_plugins(self):
checkers.initialize(self)
reporters.initialize(self)
# Make sure to load the default reporter, because
# the option has been set before the plugins had been loaded.
if not self.reporter:
self._load_reporter()
def load_plugin_modules(self, modnames):
"""take a list of module names which are pylint plugins and load
and register them
"""
for modname in modnames:
if modname in self._dynamic_plugins:
continue
self._dynamic_plugins.add(modname)
module = modutils.load_module_from_name(modname)
module.register(self)
def _load_reporter(self):
name = self._reporter_name.lower()
if name in self._reporters:
self.set_reporter(self._reporters[name]())
else:
qname = self._reporter_name
module = modutils.load_module_from_name(
modutils.get_module_part(qname))
class_name = qname.split('.')[-1]
reporter_class = getattr(module, class_name)
self.set_reporter(reporter_class())
def set_reporter(self, reporter):
"""set the reporter used to display messages and reports"""
self.reporter = reporter
reporter.linter = self
def set_option(self, optname, value, action=None, optdict=None):
"""overridden from configuration.OptionsProviderMixin to handle some
special options
"""
if optname in self._options_methods or \
optname in self._bw_options_methods:
if value:
try:
meth = self._options_methods[optname]
except KeyError:
meth = self._bw_options_methods[optname]
warnings.warn('%s is deprecated, replace it by %s' % (
optname, optname.split('-')[0]),
DeprecationWarning)
value = optik_ext.check_csv(None, optname, value)
if isinstance(value, (list, tuple)):
for _id in value:
meth(_id, ignore_unknown=True)
else:
meth(value)
return # no need to call set_option, disable/enable methods do it
elif optname == 'output-format':
self._reporter_name = value
# If the reporters are already available, load
# the reporter class.
if self._reporters:
self._load_reporter()
try:
checkers.BaseTokenChecker.set_option(self, optname,
value, action, optdict)
except configuration.UnsupportedAction:
print('option %s can\'t be read from config file' % \
optname, file=sys.stderr)
def register_reporter(self, reporter_class):
self._reporters[reporter_class.name] = reporter_class
def report_order(self):
reports = sorted(self._reports, key=lambda x: getattr(x, 'name', ''))
try:
# Remove the current reporter and add it
# at the end of the list.
reports.pop(reports.index(self))
except ValueError:
pass
else:
reports.append(self)
return reports
# checkers manipulation methods ############################################
def register_checker(self, checker):
"""register a new checker
checker is an object implementing IRawChecker or / and IAstroidChecker
"""
assert checker.priority <= 0, 'checker priority can\'t be >= 0'
self._checkers[checker.name].append(checker)
for r_id, r_title, r_cb in checker.reports:
self.register_report(r_id, r_title, r_cb, checker)
self.register_options_provider(checker)
if hasattr(checker, 'msgs'):
self.msgs_store.register_messages(checker)
checker.load_defaults()
# Register the checker, but disable all of its messages.
# TODO(cpopa): we should have a better API for this.
if not getattr(checker, 'enabled', True):
self.disable(checker.name)
def disable_noerror_messages(self):
for msgcat, msgids in six.iteritems(self.msgs_store._msgs_by_category):
if msgcat == 'E':
for msgid in msgids:
self.enable(msgid)
else:
for msgid in msgids:
self.disable(msgid)
def disable_reporters(self):
"""disable all reporters"""
for reporters in six.itervalues(self._reports):
for report_id, _, _ in reporters:
self.disable_report(report_id)
def error_mode(self):
"""error mode: enable only errors; no reports, no persistent"""
self.disable_noerror_messages()
self.disable('miscellaneous')
self.set_option('reports', False)
self.set_option('persistent', False)
# block level option handling #############################################
#
# see func_block_disable_msg.py test case for expected behaviour
def process_tokens(self, tokens):
"""process tokens from the current module to search for module/block
level options
"""
control_pragmas = {'disable', 'enable'}
for (tok_type, content, start, _, _) in tokens:
if tok_type != tokenize.COMMENT:
continue
match = utils.OPTION_RGX.search(content)
if match is None:
continue
if match.group(1).strip() == "disable-all" or \
match.group(1).strip() == 'skip-file':
if match.group(1).strip() == "disable-all":
self.add_message('deprecated-pragma', line=start[0],
args=('disable-all', 'skip-file'))
self.add_message('file-ignored', line=start[0])
self._ignore_file = True
return
try:
opt, value = match.group(1).split('=', 1)
except ValueError:
self.add_message('bad-inline-option', args=match.group(1).strip(),
line=start[0])
continue
opt = opt.strip()
if opt in self._options_methods or opt in self._bw_options_methods:
try:
meth = self._options_methods[opt]
except KeyError:
meth = self._bw_options_methods[opt]
# found a "(dis|en)able-msg" pragma deprecated suppresssion
self.add_message('deprecated-pragma', line=start[0], args=(opt, opt.replace('-msg', '')))
for msgid in textutils.splitstrip(value):
# Add the line where a control pragma was encountered.
if opt in control_pragmas:
self._pragma_lineno[msgid] = start[0]
try:
if (opt, msgid) == ('disable', 'all'):
self.add_message('deprecated-pragma', line=start[0], args=('disable=all', 'skip-file'))
self.add_message('file-ignored', line=start[0])
self._ignore_file = True
return
meth(msgid, 'module', start[0])
except utils.UnknownMessage:
self.add_message('bad-option-value', args=msgid, line=start[0])
else:
self.add_message('unrecognized-inline-option', args=opt, line=start[0])
# code checking methods ###################################################
def get_checkers(self):
"""return all available checkers as a list"""
return [self] + [c for checkers in six.itervalues(self._checkers)
for c in checkers if c is not self]
def prepare_checkers(self):
"""return checkers needed for activated messages and reports"""
if not self.config.reports:
self.disable_reporters()
# get needed checkers
neededcheckers = [self]
for checker in self.get_checkers()[1:]:
# fatal errors should not trigger enable / disabling a checker
messages = set(msg for msg in checker.msgs
if msg[0] != 'F' and self.is_message_enabled(msg))
if (messages or
any(self.report_is_enabled(r[0]) for r in checker.reports)):
neededcheckers.append(checker)
# Sort checkers by priority
neededcheckers = sorted(neededcheckers,
key=operator.attrgetter('priority'),
reverse=True)
return neededcheckers
def should_analyze_file(self, modname, path): # pylint: disable=unused-argument, no-self-use
"""Returns whether or not a module should be checked.
This implementation returns True for all python source file, indicating
that all files should be linted.
Subclasses may override this method to indicate that modules satisfying
certain conditions should not be linted.
:param str modname: The name of the module to be checked.
:param str path: The full path to the source code of the module.
:returns: True if the module should be checked.
:rtype: bool
"""
return path.endswith('.py')
def check(self, files_or_modules):
"""main checking entry: check a list of files or modules from their
name.
"""
# initialize msgs_state now that all messages have been registered into
# the store
for msg in self.msgs_store.messages:
if not msg.may_be_emitted():
self._msgs_state[msg.msgid] = False
if not isinstance(files_or_modules, (list, tuple)):
files_or_modules = (files_or_modules,)
if self.config.jobs == 1:
with fix_import_path(files_or_modules):
self._do_check(files_or_modules)
else:
# Hack that permits running pylint, on Windows, with -m switch
# and with --jobs, as in 'python -2 -m pylint .. --jobs'.
# For more details why this is needed,
# see Python issue http://bugs.python.org/issue10845.
mock_main = __name__ != '__main__' # -m switch
if mock_main:
sys.modules['__main__'] = sys.modules[__name__]
try:
self._parallel_check(files_or_modules)
finally:
if mock_main:
sys.modules.pop('__main__')
def _parallel_task(self, files_or_modules):
# Prepare configuration for child linters.
filter_options = {'symbols', 'include-ids', 'long-help'}
filter_options.update([opt_name for opt_name, _ in self._external_opts])
config = {}
for opt_providers in six.itervalues(self._all_options):
for optname, optdict, val in opt_providers.options_and_values():
if optname not in filter_options:
config[optname] = configuration.format_option_value(optdict, val)
childs = []
manager = multiprocessing.Manager() # pylint: disable=no-member
tasks_queue = manager.Queue() # pylint: disable=no-member
results_queue = manager.Queue() # pylint: disable=no-member
for _ in range(self.config.jobs):
cl = ChildLinter(args=(tasks_queue, results_queue, config))
cl.start() # pylint: disable=no-member
childs.append(cl)
# send files to child linters
for files_or_module in files_or_modules:
tasks_queue.put([files_or_module])
# collect results from child linters
failed = False
for _ in files_or_modules:
try:
result = results_queue.get()
except Exception as ex:
print("internal error while receiving results from child linter",
file=sys.stderr)
print(ex, file=sys.stderr)
failed = True
break
yield result
# Stop child linters and wait for their completion.
for _ in range(self.config.jobs):
tasks_queue.put('STOP')
for cl in childs:
cl.join()
if failed:
print("Error occured, stopping the linter.", file=sys.stderr)
sys.exit(32)
def _parallel_check(self, files_or_modules):
# Reset stats.
self.open()
all_stats = []
for result in self._parallel_task(files_or_modules):
(
file_or_module,
self.file_state.base_name,
module,
messages,
stats,
msg_status
) = result
if file_or_module == files_or_modules[-1]:
last_module = module
for msg in messages:
msg = utils.Message(*msg)
self.set_current_module(module)
self.reporter.handle_message(msg)
all_stats.append(stats)
self.msg_status |= msg_status
self.stats = _merge_stats(itertools.chain(all_stats, [self.stats]))
self.current_name = last_module
# Insert stats data to local checkers.
for checker in self.get_checkers():
if checker is not self:
checker.stats = self.stats
def _do_check(self, files_or_modules):
walker = utils.PyLintASTWalker(self)
checkers = self.prepare_checkers()
tokencheckers = [c for c in checkers
if interface.implements(c, interfaces.ITokenChecker)
and c is not self]
rawcheckers = [c for c in checkers
if interface.implements(c, interfaces.IRawChecker)]
# notify global begin
for checker in checkers:
checker.open()
if interface.implements(checker, interfaces.IAstroidChecker):
walker.add_checker(checker)
# build ast and check modules or packages
for descr in self.expand_files(files_or_modules):
modname, filepath = descr['name'], descr['path']
if not descr['isarg'] and not self.should_analyze_file(modname, filepath):
continue
if self.config.files_output:
reportfile = 'pylint_%s.%s' % (modname, self.reporter.extension)
self.reporter.set_output(open(reportfile, 'w'))
self.set_current_module(modname, filepath)
# get the module representation
ast_node = self.get_ast(filepath, modname)
if ast_node is None:
continue
# XXX to be correct we need to keep module_msgs_state for every
# analyzed module (the problem stands with localized messages which
# are only detected in the .close step)
self.file_state = utils.FileState(descr['basename'])
self._ignore_file = False
# fix the current file (if the source file was not available or
# if it's actually a c extension)
self.current_file = ast_node.file # pylint: disable=maybe-no-member
self.check_astroid_module(ast_node, walker, rawcheckers, tokencheckers)
# warn about spurious inline messages handling
for msgid, line, args in self.file_state.iter_spurious_suppression_messages(self.msgs_store):
self.add_message(msgid, line, None, args)
# notify global end
self.stats['statement'] = walker.nbstatements
checkers.reverse()
for checker in checkers:
checker.close()
def expand_files(self, modules):
"""get modules and errors from a list of modules and handle errors
"""
result, errors = utils.expand_modules(modules, self.config.black_list)
for error in errors:
message = modname = error["mod"]
key = error["key"]
self.set_current_module(modname)
if key == "fatal":
message = str(error["ex"]).replace(os.getcwd() + os.sep, '')
self.add_message(key, args=message)
return result
def set_current_module(self, modname, filepath=None):
"""set the name of the currently analyzed module and
init statistics for it
"""
if not modname and filepath is None:
return
self.reporter.on_set_current_module(modname, filepath)
self.current_name = modname
self.current_file = filepath or modname
self.stats['by_module'][modname] = {}
self.stats['by_module'][modname]['statement'] = 0
for msg_cat in six.itervalues(utils.MSG_TYPES):
self.stats['by_module'][modname][msg_cat] = 0
def get_ast(self, filepath, modname):
"""return a ast(roid) representation for a module"""
try:
return MANAGER.ast_from_file(filepath, modname, source=True)
except SyntaxError as ex:
self.add_message('syntax-error', line=ex.lineno, args=ex.msg)
except astroid.AstroidBuildingException as ex:
self.add_message('parse-error', args=ex)
except Exception as ex: # pylint: disable=broad-except
import traceback
traceback.print_exc()
self.add_message('astroid-error', args=(ex.__class__, ex))
def check_astroid_module(self, ast_node, walker,
rawcheckers, tokencheckers):
"""Check a module from its astroid representation."""
try:
tokens = utils.tokenize_module(ast_node)
except tokenize.TokenError as ex:
self.add_message('syntax-error', line=ex.args[1][0], args=ex.args[0])
return
if not ast_node.pure_python:
self.add_message('raw-checker-failed', args=ast_node.name)
else:
#assert astroid.file.endswith('.py')
# invoke ITokenChecker interface on self to fetch module/block
# level options
self.process_tokens(tokens)
if self._ignore_file:
return False
# walk ast to collect line numbers
self.file_state.collect_block_lines(self.msgs_store, ast_node)
# run raw and tokens checkers
for checker in rawcheckers:
checker.process_module(ast_node)
for checker in tokencheckers:
checker.process_tokens(tokens)
# generate events to astroid checkers
walker.walk(ast_node)
return True
# IAstroidChecker interface #################################################
def open(self):
"""initialize counters"""
self.stats = {'by_module' : {},
'by_msg' : {},
}
MANAGER.always_load_extensions = self.config.unsafe_load_any_extension
MANAGER.extension_package_whitelist.update(
self.config.extension_pkg_whitelist)
for msg_cat in six.itervalues(utils.MSG_TYPES):
self.stats[msg_cat] = 0
def generate_reports(self):
"""close the whole package /module, it's time to make reports !
if persistent run, pickle results for later comparison
"""
if self.file_state.base_name is not None:
# load previous results if any
previous_stats = config.load_results(self.file_state.base_name)
# XXX code below needs refactoring to be more reporter agnostic
self.reporter.on_close(self.stats, previous_stats)
if self.config.reports:
sect = self.make_reports(self.stats, previous_stats)
if self.config.files_output:
filename = 'pylint_global.' + self.reporter.extension
self.reporter.set_output(open(filename, 'w'))
else:
sect = ureports.Section()
if self.config.reports or self.config.output_format == 'html':
self.reporter.display_results(sect)
# save results if persistent run
if self.config.persistent:
config.save_results(self.stats, self.file_state.base_name)
else:
if self.config.output_format == 'html':
# No output will be emitted for the html
# reporter if the file doesn't exist, so emit
# the results here.
self.reporter.display_results(ureports.Section())
self.reporter.on_close(self.stats, {})
# specific reports ########################################################
def report_evaluation(self, sect, stats, previous_stats):
"""make the global evaluation report"""
# check with at least check 1 statements (usually 0 when there is a
# syntax error preventing pylint from further processing)
if stats['statement'] == 0:
raise utils.EmptyReport()
# get a global note for the code
evaluation = self.config.evaluation
try:
note = eval(evaluation, {}, self.stats) # pylint: disable=eval-used
except Exception as ex: # pylint: disable=broad-except
msg = 'An exception occurred while rating: %s' % ex
else:
stats['global_note'] = note
msg = 'Your code has been rated at %.2f/10' % note
pnote = previous_stats.get('global_note')
if pnote is not None:
msg += ' (previous run: %.2f/10, %+.2f)' % (pnote, note - pnote)
if self.config.comment:
msg = '%s\n%s' % (msg, config.get_note_message(note))
sect.append(ureports.Text(msg))
# some reporting functions ####################################################
def report_total_messages_stats(sect, stats, previous_stats):
"""make total errors / warnings report"""
lines = ['type', 'number', 'previous', 'difference']
lines += checkers.table_lines_from_stats(stats, previous_stats,
('convention', 'refactor',
'warning', 'error'))
sect.append(ureports.Table(children=lines, cols=4, rheaders=1))
def report_messages_stats(sect, stats, _):
"""make messages type report"""
if not stats['by_msg']:
# don't print this report when we didn't detected any errors
raise utils.EmptyReport()
in_order = sorted([(value, msg_id)
for msg_id, value in six.iteritems(stats['by_msg'])
if not msg_id.startswith('I')])
in_order.reverse()
lines = ('message id', 'occurrences')
for value, msg_id in in_order:
lines += (msg_id, str(value))
sect.append(ureports.Table(children=lines, cols=2, rheaders=1))
def report_messages_by_module_stats(sect, stats, _):
"""make errors / warnings by modules report"""
if len(stats['by_module']) == 1:
# don't print this report when we are analysing a single module
raise utils.EmptyReport()
by_mod = collections.defaultdict(dict)
for m_type in ('fatal', 'error', 'warning', 'refactor', 'convention'):
total = stats[m_type]
for module in six.iterkeys(stats['by_module']):
mod_total = stats['by_module'][module][m_type]
if total == 0:
percent = 0
else:
percent = float((mod_total)*100) / total
by_mod[module][m_type] = percent
sorted_result = []
for module, mod_info in six.iteritems(by_mod):
sorted_result.append((mod_info['error'],
mod_info['warning'],
mod_info['refactor'],
mod_info['convention'],
module))
sorted_result.sort()
sorted_result.reverse()
lines = ['module', 'error', 'warning', 'refactor', 'convention']
for line in sorted_result:
# Don't report clean modules.
if all(entry == 0 for entry in line[:-1]):
continue
lines.append(line[-1])
for val in line[:-1]:
lines.append('%.2f' % val)
if len(lines) == 5:
raise utils.EmptyReport()
sect.append(ureports.Table(children=lines, cols=5, rheaders=1))
# utilities ###################################################################
class ArgumentPreprocessingError(Exception):
"""Raised if an error occurs during argument preprocessing."""
def preprocess_options(args, search_for):
"""look for some options (keys of <search_for>) which have to be processed
before others
values of <search_for> are callback functions to call when the option is
found
"""
i = 0
while i < len(args):
arg = args[i]
if arg.startswith('--'):
try:
option, val = arg[2:].split('=', 1)
except ValueError:
option, val = arg[2:], None
try:
cb, takearg = search_for[option]
except KeyError:
i += 1
else:
del args[i]
if takearg and val is None:
if i >= len(args) or args[i].startswith('-'):
msg = 'Option %s expects a value' % option
raise ArgumentPreprocessingError(msg)
val = args[i]
del args[i]
elif not takearg and val is not None:
msg = "Option %s doesn't expects a value" % option
raise ArgumentPreprocessingError(msg)
cb(option, val)
else:
i += 1
@contextlib.contextmanager
def fix_import_path(args):
"""Prepare sys.path for running the linter checks.
Within this context, each of the given arguments is importable.
Paths are added to sys.path in corresponding order to the arguments.
We avoid adding duplicate directories to sys.path.
`sys.path` is reset to its original value upon exitign this context.
"""
orig = list(sys.path)
changes = []
for arg in args:
path = _get_python_path(arg)
if path in changes:
continue
else:
changes.append(path)
sys.path[:] = changes + sys.path
try:
yield
finally:
sys.path[:] = orig
class Run(object):
"""helper class to use as main for pylint :
run(*sys.argv[1:])
"""
LinterClass = PyLinter
option_groups = (
('Commands', 'Options which are actually commands. Options in this \
group are mutually exclusive.'),
)
def __init__(self, args, reporter=None, exit=True):
self._rcfile = None
self._plugins = []
try:
preprocess_options(args, {
# option: (callback, takearg)
'init-hook': (cb_init_hook, True),
'rcfile': (self.cb_set_rcfile, True),
'load-plugins': (self.cb_add_plugins, True),
})
except ArgumentPreprocessingError as ex:
print(ex, file=sys.stderr)
sys.exit(32)
self.linter = linter = self.LinterClass((
('rcfile',
{'action' : 'callback', 'callback' : lambda *args: 1,
'type': 'string', 'metavar': '<file>',
'help' : 'Specify a configuration file.'}),
('init-hook',
{'action' : 'callback', 'callback' : lambda *args: 1,
'type' : 'string', 'metavar': '<code>',
'level': 1,
'help' : 'Python code to execute, usually for sys.path '
'manipulation such as pygtk.require().'}),
('help-msg',
{'action' : 'callback', 'type' : 'string', 'metavar': '<msg-id>',
'callback' : self.cb_help_message,
'group': 'Commands',
'help' : 'Display a help message for the given message id and '
'exit. The value may be a comma separated list of message ids.'}),
('list-msgs',
{'action' : 'callback', 'metavar': '<msg-id>',
'callback' : self.cb_list_messages,
'group': 'Commands', 'level': 1,
'help' : "Generate pylint's messages."}),
('list-conf-levels',
{'action' : 'callback',
'callback' : cb_list_confidence_levels,
'group': 'Commands', 'level': 1,
'help' : "Generate pylint's messages."}),
('full-documentation',
{'action' : 'callback', 'metavar': '<msg-id>',
'callback' : self.cb_full_documentation,
'group': 'Commands', 'level': 1,
'help' : "Generate pylint's full documentation."}),
('generate-rcfile',
{'action' : 'callback', 'callback' : self.cb_generate_config,
'group': 'Commands',
'help' : 'Generate a sample configuration file according to '
'the current configuration. You can put other options '
'before this one to get them in the generated '
'configuration.'}),
('generate-man',
{'action' : 'callback', 'callback' : self.cb_generate_manpage,
'group': 'Commands',
'help' : "Generate pylint's man page.", 'hide': True}),
('errors-only',
{'action' : 'callback', 'callback' : self.cb_error_mode,
'short': 'E',
'help' : 'In error mode, checkers without error messages are '
'disabled and for others, only the ERROR messages are '
'displayed, and no reports are done by default'''}),
('py3k',
{'action' : 'callback', 'callback' : self.cb_python3_porting_mode,
'help' : 'In Python 3 porting mode, all checkers will be '
'disabled and only messages emitted by the porting '
'checker will be displayed'}),
('profile',
{'type' : 'yn', 'metavar' : '<y_or_n>',
'default': False, 'hide': True,
'help' : 'Profiled execution.'}),
), option_groups=self.option_groups, pylintrc=self._rcfile)
# register standard checkers
linter.load_default_plugins()
# load command line plugins
linter.load_plugin_modules(self._plugins)
# add some help section
linter.add_help_section('Environment variables', config.ENV_HELP, level=1)
# pylint: disable=bad-continuation
linter.add_help_section('Output',
'Using the default text output, the message format is : \n'
' \n'
' MESSAGE_TYPE: LINE_NUM:[OBJECT:] MESSAGE \n'
' \n'
'There are 5 kind of message types : \n'
' * (C) convention, for programming standard violation \n'
' * (R) refactor, for bad code smell \n'
' * (W) warning, for python specific problems \n'
' * (E) error, for probable bugs in the code \n'
' * (F) fatal, if an error occurred which prevented pylint from doing further\n'
'processing.\n'
, level=1)
linter.add_help_section('Output status code',
'Pylint should leave with following status code: \n'
' * 0 if everything went fine \n'
' * 1 if a fatal message was issued \n'
' * 2 if an error message was issued \n'
' * 4 if a warning message was issued \n'
' * 8 if a refactor message was issued \n'
' * 16 if a convention message was issued \n'
' * 32 on usage error \n'
' \n'
'status 1 to 16 will be bit-ORed so you can know which different categories has\n'
'been issued by analysing pylint output status code\n',
level=1)
# read configuration
linter.disable('pointless-except')
linter.disable('suppressed-message')
linter.disable('useless-suppression')
linter.read_config_file()
config_parser = linter.cfgfile_parser
# run init hook, if present, before loading plugins
if config_parser.has_option('MASTER', 'init-hook'):
cb_init_hook('init-hook',
textutils.unquote(config_parser.get('MASTER',
'init-hook')))
# is there some additional plugins in the file configuration, in
if config_parser.has_option('MASTER', 'load-plugins'):
plugins = textutils.splitstrip(
config_parser.get('MASTER', 'load-plugins'))
linter.load_plugin_modules(plugins)
# now we can load file config and command line, plugins (which can
# provide options) have been registered
linter.load_config_file()
if reporter:
# if a custom reporter is provided as argument, it may be overridden
# by file parameters, so re-set it here, but before command line
# parsing so it's still overrideable by command line option
linter.set_reporter(reporter)
try:
args = linter.load_command_line_configuration(args)
except SystemExit as exc:
if exc.code == 2: # bad options
exc.code = 32
raise
if not args:
print(linter.help())
sys.exit(32)
if linter.config.jobs < 0:
print("Jobs number (%d) should be greater than 0"
% linter.config.jobs, file=sys.stderr)
sys.exit(32)
if linter.config.jobs > 1 or linter.config.jobs == 0:
if multiprocessing is None:
print("Multiprocessing library is missing, "
"fallback to single process", file=sys.stderr)
linter.set_option("jobs", 1)
else:
if linter.config.jobs == 0:
linter.config.jobs = multiprocessing.cpu_count()
# insert current working directory to the python path to have a correct
# behaviour
if self.linter.config.profile:
with fix_import_path(args):
print('** profiled run', file=sys.stderr)
import cProfile, pstats
cProfile.runctx('linter.check(%r)' % args, globals(), locals(),
'stones.prof')
data = pstats.Stats('stones.prof')
data.strip_dirs()
data.sort_stats('time', 'calls')
data.print_stats(30)
else:
linter.check(args)
linter.generate_reports()
if exit:
sys.exit(self.linter.msg_status)
def cb_set_rcfile(self, name, value):
"""callback for option preprocessing (i.e. before option parsing)"""
self._rcfile = value
def cb_add_plugins(self, name, value):
"""callback for option preprocessing (i.e. before option parsing)"""
self._plugins.extend(textutils.splitstrip(value))
def cb_error_mode(self, *args, **kwargs):
"""error mode:
* disable all but error messages
* disable the 'miscellaneous' checker which can be safely deactivated in
debug
* disable reports
* do not save execution information
"""
self.linter.error_mode()
def cb_generate_config(self, *args, **kwargs):
"""optik callback for sample config file generation"""
self.linter.generate_config(skipsections=('COMMANDS',))
sys.exit(0)
def cb_generate_manpage(self, *args, **kwargs):
"""optik callback for sample config file generation"""
from pylint import __pkginfo__
self.linter.generate_manpage(__pkginfo__)
sys.exit(0)
def cb_help_message(self, option, optname, value, parser):
"""optik callback for printing some help about a particular message"""
self.linter.msgs_store.help_message(textutils.splitstrip(value))
sys.exit(0)
def cb_full_documentation(self, option, optname, value, parser):
"""optik callback for printing full documentation"""
self.linter.print_full_documentation()
sys.exit(0)
def cb_list_messages(self, option, optname, value, parser): # FIXME
"""optik callback for printing available messages"""
self.linter.msgs_store.list_messages()
sys.exit(0)
def cb_python3_porting_mode(self, *args, **kwargs):
"""Activate only the python3 porting checker."""
self.linter.disable('all')
self.linter.enable('python3')
def cb_list_confidence_levels(option, optname, value, parser):
for level in interfaces.CONFIDENCE_LEVELS:
print('%-18s: %s' % level)
sys.exit(0)
def cb_init_hook(optname, value):
"""exec arbitrary code to set sys.path for instance"""
exec(value) # pylint: disable=exec-used
if __name__ == '__main__':
Run(sys.argv[1:])
| bsd-3-clause | -3,678,381,702,746,857,000 | 41.385596 | 115 | 0.540035 | false |
onceuponatimeforever/oh-mainline | vendor/packages/docutils/test/test_parsers/test_rst/test_directives/test_figures.py | 16 | 7300 | #! /usr/bin/env python
# $Id: test_figures.py 7062 2011-06-30 22:14:29Z milde $
# Author: David Goodger <[email protected]>
# Copyright: This module has been placed in the public domain.
"""
Tests for images.py figure directives.
"""
from __init__ import DocutilsTestSupport
def suite():
s = DocutilsTestSupport.ParserTestSuite()
s.generateTests(totest)
return s
totest = {}
totest['figures'] = [
["""\
.. figure:: picture.png
""",
"""\
<document source="test data">
<figure>
<image uri="picture.png">
"""],
["""\
.. figure:: picture.png
A picture with a caption.
""",
"""\
<document source="test data">
<figure>
<image uri="picture.png">
<caption>
A picture with a caption.
"""],
["""\
.. figure:: picture.png
- A picture with an invalid caption.
""",
"""\
<document source="test data">
<figure>
<image uri="picture.png">
<system_message level="3" line="1" source="test data" type="ERROR">
<paragraph>
Figure caption must be a paragraph or empty comment.
<literal_block xml:space="preserve">
.. figure:: picture.png
\n\
- A picture with an invalid caption.
"""],
["""\
.. figure:: picture.png
..
A picture with a legend but no caption.
""",
"""\
<document source="test data">
<figure>
<image uri="picture.png">
<legend>
<paragraph>
A picture with a legend but no caption.
"""],
["""\
.. Figure:: picture.png
:height: 100
:width: 200
:scale: 50
A picture with image options and a caption.
""",
"""\
<document source="test data">
<figure>
<image height="100" scale="50" uri="picture.png" width="200">
<caption>
A picture with image options and a caption.
"""],
["""\
.. Figure:: picture.png
:height: 100
:alt: alternate text
:width: 200
:scale: 50
:figwidth: 300
:figclass: class1 class2
:name: fig:pix
A picture with image options on individual lines, and this caption.
""",
"""\
<document source="test data">
<figure classes="class1 class2" width="300px">
<image alt="alternate text" height="100" ids="fig-pix" names="fig:pix" scale="50" uri="picture.png" width="200">
<caption>
A picture with image options on individual lines, and this caption.
"""],
["""\
.. figure:: picture.png
:align: center
A figure with explicit alignment.
""",
"""\
<document source="test data">
<figure align="center">
<image uri="picture.png">
<caption>
A figure with explicit alignment.
"""],
["""\
.. figure:: picture.png
:align: top
A figure with wrong alignment.
""",
"""\
<document source="test data">
<system_message level="3" line="1" source="test data" type="ERROR">
<paragraph>
Error in "figure" directive:
invalid option value: (option: "align"; value: 'top')
"top" unknown; choose from "left", "center", or "right".
<literal_block xml:space="preserve">
.. figure:: picture.png
:align: top
A figure with wrong alignment.
"""],
["""\
This figure lacks a caption. It may still have a
"Figure 1."-style caption appended in the output.
.. figure:: picture.png
""",
"""\
<document source="test data">
<paragraph>
This figure lacks a caption. It may still have a
"Figure 1."-style caption appended in the output.
<figure>
<image uri="picture.png">
"""],
["""\
.. figure:: picture.png
A picture with a caption and a legend.
+-----------------------+-----------------------+
| Symbol | Meaning |
+=======================+=======================+
| .. image:: tent.png | Campground |
+-----------------------+-----------------------+
| .. image:: waves.png | Lake |
+-----------------------+-----------------------+
| .. image:: peak.png | Mountain |
+-----------------------+-----------------------+
""",
"""\
<document source="test data">
<figure>
<image uri="picture.png">
<caption>
A picture with a caption and a legend.
<legend>
<table>
<tgroup cols="2">
<colspec colwidth="23">
<colspec colwidth="23">
<thead>
<row>
<entry>
<paragraph>
Symbol
<entry>
<paragraph>
Meaning
<tbody>
<row>
<entry>
<image uri="tent.png">
<entry>
<paragraph>
Campground
<row>
<entry>
<image uri="waves.png">
<entry>
<paragraph>
Lake
<row>
<entry>
<image uri="peak.png">
<entry>
<paragraph>
Mountain
"""],
["""\
.. figure:: picture.png
..
A picture with a legend but no caption.
(The empty comment replaces the caption, which must
be a single paragraph.)
""",
"""\
<document source="test data">
<figure>
<image uri="picture.png">
<legend>
<paragraph>
A picture with a legend but no caption.
(The empty comment replaces the caption, which must
be a single paragraph.)
"""],
["""\
Testing for line-leaks:
.. figure:: picture.png
A picture with a caption.
.. figure:: picture.png
A picture with a caption.
.. figure:: picture.png
A picture with a caption.
.. figure:: picture.png
.. figure:: picture.png
.. figure:: picture.png
.. figure:: picture.png
A picture with a caption.
.. figure:: picture.png
.. figure:: picture.png
A picture with a caption.
.. figure:: picture.png
""",
"""\
<document source="test data">
<paragraph>
Testing for line-leaks:
<figure>
<image uri="picture.png">
<caption>
A picture with a caption.
<figure>
<image uri="picture.png">
<caption>
A picture with a caption.
<figure>
<image uri="picture.png">
<caption>
A picture with a caption.
<figure>
<image uri="picture.png">
<figure>
<image uri="picture.png">
<figure>
<image uri="picture.png">
<figure>
<image uri="picture.png">
<caption>
A picture with a caption.
<figure>
<image uri="picture.png">
<figure>
<image uri="picture.png">
<caption>
A picture with a caption.
<figure>
<image uri="picture.png">
"""],
]
if __name__ == '__main__':
import unittest
unittest.main(defaultTest='suite')
| agpl-3.0 | 128,939,058,580,258,200 | 23.914676 | 120 | 0.483425 | false |
l00py/KML_Lookup | TA-KML_lookup/bin/shapely/topology.py | 18 | 2257 | """
Intermediaries supporting GEOS topological operations
These methods all take Shapely geometries and other Python objects and delegate
to GEOS functions via ctypes.
These methods return ctypes objects that should be recast by the caller.
"""
from ctypes import byref, c_double
from shapely.geos import TopologicalError, lgeos
class Validating(object):
def _validate(self, ob, stop_prepared=False):
if ob is None or ob._geom is None:
raise ValueError("Null geometry supports no operations")
if stop_prepared and not hasattr(ob, 'type'):
raise ValueError("Prepared geometries cannot be operated on")
class Delegating(Validating):
def __init__(self, name):
self.fn = lgeos.methods[name]
def _check_topology(self, err, *geoms):
"""Raise TopologicalError if geoms are invalid.
Else, raise original error.
"""
for geom in geoms:
if not geom.is_valid:
raise TopologicalError(
"The operation '%s' could not be performed. "
"Likely cause is invalidity of the geometry %s" % (
self.fn.__name__, repr(geom)))
raise err
class BinaryRealProperty(Delegating):
def __call__(self, this, other):
self._validate(this)
self._validate(other, stop_prepared=True)
d = c_double()
retval = self.fn(this._geom, other._geom, byref(d))
return d.value
class UnaryRealProperty(Delegating):
def __call__(self, this):
self._validate(this)
d = c_double()
retval = self.fn(this._geom, byref(d))
return d.value
class BinaryTopologicalOp(Delegating):
def __call__(self, this, other, *args):
self._validate(this)
self._validate(other, stop_prepared=True)
product = self.fn(this._geom, other._geom, *args)
if product is None:
err = TopologicalError(
"This operation could not be performed. Reason: unknown")
self._check_topology(err, this, other)
return product
class UnaryTopologicalOp(Delegating):
def __call__(self, this, *args):
self._validate(this)
return self.fn(this._geom, *args)
| mit | -7,080,651,286,775,177,000 | 27.935897 | 79 | 0.618963 | false |
foreni-packages/golismero | thirdparty_libs/django/conf/locale/lt/formats.py | 104 | 1503 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = r'Y \m. E j \d.'
TIME_FORMAT = 'H:i:s'
DATETIME_FORMAT = r'Y \m. E j \d., H:i:s'
YEAR_MONTH_FORMAT = r'Y \m. F'
MONTH_DAY_FORMAT = r'E j \d.'
SHORT_DATE_FORMAT = 'Y-m-d'
SHORT_DATETIME_FORMAT = 'Y-m-d H:i'
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = (
'%Y-%m-%d', '%d.%m.%Y', '%d.%m.%y', # '2006-10-25', '25.10.2006', '25.10.06'
)
TIME_INPUT_FORMATS = (
'%H:%M:%S', # '14:30:59'
'%H:%M', # '14:30'
'%H.%M.%S', # '14.30.59'
'%H.%M', # '14.30'
)
DATETIME_INPUT_FORMATS = (
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59'
'%d.%m.%Y %H:%M', # '25.10.2006 14:30'
'%d.%m.%Y', # '25.10.2006'
'%d.%m.%y %H:%M:%S', # '25.10.06 14:30:59'
'%d.%m.%y %H:%M', # '25.10.06 14:30'
'%d.%m.%y %H.%M.%S', # '25.10.06 14.30.59'
'%d.%m.%y %H.%M', # '25.10.06 14.30'
'%d.%m.%y', # '25.10.06'
)
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
NUMBER_GROUPING = 3
| gpl-2.0 | -312,388,779,386,148,100 | 34.785714 | 80 | 0.526281 | false |
uclouvain/osis_louvain | base/business/education_groups/postponement.py | 1 | 7431 | ##############################################################################
#
# OSIS stands for Open Student Information System. It's an application
# designed to manage the core business of higher education institutions,
# such as universities, faculties, institutes and professional schools.
# The core business involves the administration of students, teachers,
# courses, programs and so on.
#
# Copyright (C) 2015-2017 Université catholique de Louvain (http://www.uclouvain.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of this license - GNU General Public License - is available
# at the root of the source code of this program. If not,
# see http://www.gnu.org/licenses/.
#
##############################################################################
from django import forms
from django.db import Error
from django.utils.translation import ugettext as _
from base.business.utils.model import model_to_dict_fk, compare_objects, update_object
from base.models.academic_year import AcademicYear, current_academic_year
from base.models.education_group_year import EducationGroupYear
EDUCATION_GROUP_MAX_POSTPONE_YEARS = 6
FIELD_TO_EXCLUDE = ['id', 'external_id', 'academic_year']
class ConsistencyError(Error):
def __init__(self, last_instance_updated, differences, *args, **kwargs):
self.last_instance_updated = last_instance_updated
self.differences = differences
super().__init__(*args, **kwargs)
def _compute_end_year(education_group):
"""
This function compute the end year that the postponement must achieve
:arg education_group: The education group that we want to postpone
"""
# Compute max postponement based on config EDUCATION_GROUP_MAX_POSTPONE_YEARS
max_postponement_end_year = current_academic_year().year + EDUCATION_GROUP_MAX_POSTPONE_YEARS
if education_group.end_year:
# Get the min [Prevent education_group.end_year > academic_year.year provided by system]
max_postponement_end_year = min(max_postponement_end_year, education_group.end_year)
# Lookup on database, get the latest existing education group year [Prevent desync end_date and data]
latest_egy = education_group.educationgroupyear_set.select_related('academic_year') \
.order_by('academic_year__year').last()
return max(max_postponement_end_year, latest_egy.academic_year.year)
def _postpone_m2m(education_group_year, postponed_egy):
fields_to_exclude = []
opts = education_group_year._meta
for f in opts.many_to_many:
if f.name in fields_to_exclude:
continue
m2m_cls = f.rel.through
# Remove records of postponed_egy
m2m_cls.objects.all().filter(education_group_year=postponed_egy).delete()
# Recreate records
for m2m_obj in m2m_cls.objects.all().filter(education_group_year_id=education_group_year):
m2m_data_to_postpone = model_to_dict_fk(m2m_obj, exclude=['id', 'external_id', 'education_group_year'])
m2m_cls(education_group_year=postponed_egy, **m2m_data_to_postpone).save()
def duplicate_education_group_year(old_education_group_year, new_academic_year, dict_new_value=None,
dict_initial_egy=None):
if not dict_new_value:
dict_new_value = model_to_dict_fk(old_education_group_year, exclude=FIELD_TO_EXCLUDE)
defaults_values = {x: v for x, v in dict_new_value.items() if not isinstance(v, list)}
postponed_egy, created = EducationGroupYear.objects.get_or_create(
education_group=old_education_group_year.education_group,
academic_year=new_academic_year,
# Create object without m2m relations
defaults=defaults_values
)
# During create of new postponed object, we need to update only the m2m relations
if created:
# Postpone the m2m [languages / secondary_domains]
_postpone_m2m(old_education_group_year, postponed_egy)
# During the update, we need to check if the postponed object has been modify
else:
dict_postponed_egy = model_to_dict_fk(postponed_egy, exclude=FIELD_TO_EXCLUDE)
differences = compare_objects(dict_initial_egy, dict_postponed_egy) \
if dict_initial_egy and dict_postponed_egy else {}
if differences:
raise ConsistencyError(postponed_egy, differences)
update_object(postponed_egy, dict_new_value)
# Postpone the m2m [languages / secondary_domains]
_postpone_m2m(old_education_group_year, postponed_egy)
return postponed_egy
class PostponementEducationGroupYearMixin:
"""
This mixin will report the modification to the futures years.
If one of the future year is already modified, it will stop the postponement and append a warning message
"""
field_to_exclude = FIELD_TO_EXCLUDE
dict_initial_egy = {}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.postpone_start_year = None
self.postpone_end_year = None
self.education_group_year_postponed = []
self.postponement_errors = {}
self.warnings = []
if not self._is_creation():
self.dict_initial_egy = model_to_dict_fk(
self.forms[forms.ModelForm].instance, exclude=self.field_to_exclude
)
def save(self):
education_group_year = super().save()
self.postpone_start_year = education_group_year.academic_year.year
self.postpone_end_year = _compute_end_year(education_group_year.education_group)
self._start_postponement(education_group_year)
return education_group_year
def _start_postponement(self, education_group_year):
dict_new_value = model_to_dict_fk(education_group_year, exclude=self.field_to_exclude)
for academic_year in AcademicYear.objects.filter(year__gt=self.postpone_start_year,
year__lte=self.postpone_end_year):
try:
postponed_egy = duplicate_education_group_year(
education_group_year, academic_year, dict_new_value, self.dict_initial_egy
)
self.education_group_year_postponed.append(postponed_egy)
except ConsistencyError as e:
self.add_postponement_errors(e)
def add_postponement_errors(self, consistency_error):
for difference in consistency_error.differences:
error = _("%(col_name)s has been already modified.") % {
"col_name": _(EducationGroupYear._meta.get_field(difference).verbose_name).title(),
}
self.warnings.append(
_("Consistency error in %(academic_year)s : %(error)s") % {
'academic_year': consistency_error.last_instance_updated.academic_year,
'error': error
}
)
| agpl-3.0 | 5,487,079,154,868,688,000 | 41.457143 | 115 | 0.656393 | false |
wogsland/QSTK | build/lib.linux-x86_64-2.7/QSTK/qstkfeat/classes.py | 8 | 1658 | '''
(c) 2011, 2012 Georgia Tech Research Corporation
This source code is released under the New BSD license. Please see
http://wiki.quantsoftware.org/index.php?title=QSTK_License
for license details.
Created on Nov 7, 2011
@author: John Cornwell
@contact: [email protected]
@summary: File containing various classification functions
'''
# 3rd Party Imports
import pandas as pand
import numpy as np
def class_fut_ret( d_data, i_lookforward=21, s_rel=None, b_use_open=False ):
'''
@summary: Calculate classification, uses future returns
@param d_data: Dictionary of data to use
@param i_lookforward: Number of days to look in the future
@param s_rel: Stock symbol that this should be relative to, ususally $SPX.
@param b_use_open: If True, stock will be purchased at T+1 open, sold at
T+i_lookforward close
@return: DataFrame containing values
'''
if b_use_open:
df_val = d_data['open'].copy()
else:
df_val = d_data['close'].copy()
na_val = df_val.values
if b_use_open:
na_val[:-(i_lookforward + 1), :] = ((na_val[i_lookforward + 1:, :] -
na_val[1:-(i_lookforward), :]) /
na_val[1:-(i_lookforward), :])
na_val[-(i_lookforward+1):, :] = np.nan
else:
na_val[:-i_lookforward, :] = ((na_val[i_lookforward:, :] -
na_val[:-i_lookforward, :]) /
na_val[:-i_lookforward, :])
na_val[-i_lookforward:, :] = np.nan
return df_val
if __name__ == '__main__':
pass
| bsd-3-clause | -710,117,080,440,805,200 | 29.703704 | 78 | 0.572376 | false |
Gui13/CouchPotatoServer | libs/subliminal/core.py | 46 | 12840 | # -*- coding: utf-8 -*-
# Copyright 2011-2012 Antoine Bertin <[email protected]>
#
# This file is part of subliminal.
#
# subliminal is free software; you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# subliminal is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with subliminal. If not, see <http://www.gnu.org/licenses/>.
from .exceptions import DownloadFailedError
from .services import ServiceConfig
from .tasks import DownloadTask, ListTask
from .utils import get_keywords
from .videos import Episode, Movie, scan
from .language import Language
from collections import defaultdict
from itertools import groupby
import bs4
import guessit
import logging
__all__ = ['SERVICES', 'LANGUAGE_INDEX', 'SERVICE_INDEX', 'SERVICE_CONFIDENCE', 'MATCHING_CONFIDENCE',
'create_list_tasks', 'create_download_tasks', 'consume_task', 'matching_confidence',
'key_subtitles', 'group_by_video']
logger = logging.getLogger(__name__)
SERVICES = ['opensubtitles', 'bierdopje', 'subswiki', 'subtitulos', 'thesubdb', 'addic7ed', 'tvsubtitles']
LANGUAGE_INDEX, SERVICE_INDEX, SERVICE_CONFIDENCE, MATCHING_CONFIDENCE = range(4)
def create_list_tasks(paths, languages, services, force, multi, cache_dir, max_depth, scan_filter):
"""Create a list of :class:`~subliminal.tasks.ListTask` from one or more paths using the given criteria
:param paths: path(s) to video file or folder
:type paths: string or list
:param set languages: languages to search for
:param list services: services to use for the search
:param bool force: force searching for subtitles even if some are detected
:param bool multi: search multiple languages for the same video
:param string cache_dir: path to the cache directory to use
:param int max_depth: maximum depth for scanning entries
:param function scan_filter: filter function that takes a path as argument and returns a boolean indicating whether it has to be filtered out (``True``) or not (``False``)
:return: the created tasks
:rtype: list of :class:`~subliminal.tasks.ListTask`
"""
scan_result = []
for p in paths:
scan_result.extend(scan(p, max_depth, scan_filter))
logger.debug(u'Found %d videos in %r with maximum depth %d' % (len(scan_result), paths, max_depth))
tasks = []
config = ServiceConfig(multi, cache_dir)
services = filter_services(services)
for video, detected_subtitles in scan_result:
detected_languages = set(s.language for s in detected_subtitles)
wanted_languages = languages.copy()
if not force and multi:
wanted_languages -= detected_languages
if not wanted_languages:
logger.debug(u'No need to list multi subtitles %r for %r because %r detected' % (languages, video, detected_languages))
continue
if not force and not multi and Language('Undetermined') in detected_languages:
logger.debug(u'No need to list single subtitles %r for %r because one detected' % (languages, video))
continue
logger.debug(u'Listing subtitles %r for %r with services %r' % (wanted_languages, video, services))
for service_name in services:
mod = __import__('services.' + service_name, globals=globals(), locals=locals(), fromlist=['Service'], level=-1)
service = mod.Service
if not service.check_validity(video, wanted_languages):
continue
task = ListTask(video, wanted_languages & service.languages, service_name, config)
logger.debug(u'Created task %r' % task)
tasks.append(task)
return tasks
def create_download_tasks(subtitles_by_video, languages, multi):
"""Create a list of :class:`~subliminal.tasks.DownloadTask` from a list results grouped by video
:param subtitles_by_video: :class:`~subliminal.tasks.ListTask` results with ordered subtitles
:type subtitles_by_video: dict of :class:`~subliminal.videos.Video` => [:class:`~subliminal.subtitles.Subtitle`]
:param languages: languages in preferred order
:type languages: :class:`~subliminal.language.language_list`
:param bool multi: download multiple languages for the same video
:return: the created tasks
:rtype: list of :class:`~subliminal.tasks.DownloadTask`
"""
tasks = []
for video, subtitles in subtitles_by_video.iteritems():
if not subtitles:
continue
if not multi:
task = DownloadTask(video, list(subtitles))
logger.debug(u'Created task %r' % task)
tasks.append(task)
continue
for _, by_language in groupby(subtitles, lambda s: languages.index(s.language)):
task = DownloadTask(video, list(by_language))
logger.debug(u'Created task %r' % task)
tasks.append(task)
return tasks
def consume_task(task, services=None):
"""Consume a task. If the ``services`` parameter is given, the function will attempt
to get the service from it. In case the service is not in ``services``, it will be initialized
and put in ``services``
:param task: task to consume
:type task: :class:`~subliminal.tasks.ListTask` or :class:`~subliminal.tasks.DownloadTask`
:param dict services: mapping between the service name and an instance of this service
:return: the result of the task
:rtype: list of :class:`~subliminal.subtitles.ResultSubtitle`
"""
if services is None:
services = {}
logger.info(u'Consuming %r' % task)
result = None
if isinstance(task, ListTask):
service = get_service(services, task.service, config=task.config)
result = service.list(task.video, task.languages)
elif isinstance(task, DownloadTask):
for subtitle in task.subtitles:
service = get_service(services, subtitle.service)
try:
service.download(subtitle)
result = [subtitle]
break
except DownloadFailedError:
logger.warning(u'Could not download subtitle %r, trying next' % subtitle)
continue
if result is None:
logger.error(u'No subtitles could be downloaded for video %r' % task.video)
return result
def matching_confidence(video, subtitle):
"""Compute the probability (confidence) that the subtitle matches the video
:param video: video to match
:type video: :class:`~subliminal.videos.Video`
:param subtitle: subtitle to match
:type subtitle: :class:`~subliminal.subtitles.Subtitle`
:return: the matching probability
:rtype: float
"""
guess = guessit.guess_file_info(subtitle.release, 'autodetect')
video_keywords = get_keywords(video.guess)
subtitle_keywords = get_keywords(guess) | subtitle.keywords
logger.debug(u'Video keywords %r - Subtitle keywords %r' % (video_keywords, subtitle_keywords))
replacement = {'keywords': len(video_keywords & subtitle_keywords)}
if isinstance(video, Episode):
replacement.update({'series': 0, 'season': 0, 'episode': 0})
matching_format = '{series:b}{season:b}{episode:b}{keywords:03b}'
best = matching_format.format(series=1, season=1, episode=1, keywords=len(video_keywords))
if guess['type'] in ['episode', 'episodesubtitle']:
if 'series' in guess and guess['series'].lower() == video.series.lower():
replacement['series'] = 1
if 'season' in guess and guess['season'] == video.season:
replacement['season'] = 1
if 'episodeNumber' in guess and guess['episodeNumber'] == video.episode:
replacement['episode'] = 1
elif isinstance(video, Movie):
replacement.update({'title': 0, 'year': 0})
matching_format = '{title:b}{year:b}{keywords:03b}'
best = matching_format.format(title=1, year=1, keywords=len(video_keywords))
if guess['type'] in ['movie', 'moviesubtitle']:
if 'title' in guess and guess['title'].lower() == video.title.lower():
replacement['title'] = 1
if 'year' in guess and guess['year'] == video.year:
replacement['year'] = 1
else:
logger.debug(u'Not able to compute confidence for %r' % video)
return 0.0
logger.debug(u'Found %r' % replacement)
confidence = float(int(matching_format.format(**replacement), 2)) / float(int(best, 2))
logger.info(u'Computed confidence %.4f for %r and %r' % (confidence, video, subtitle))
return confidence
def get_service(services, service_name, config=None):
"""Get a service from its name in the service dict with the specified config.
If the service does not exist in the service dict, it is created and added to the dict.
:param dict services: dict where to get existing services or put created ones
:param string service_name: name of the service to get
:param config: config to use for the service
:type config: :class:`~subliminal.services.ServiceConfig` or None
:return: the corresponding service
:rtype: :class:`~subliminal.services.ServiceBase`
"""
if service_name not in services:
mod = __import__('services.' + service_name, globals=globals(), locals=locals(), fromlist=['Service'], level=-1)
services[service_name] = mod.Service()
services[service_name].init()
services[service_name].config = config
return services[service_name]
def key_subtitles(subtitle, video, languages, services, order):
"""Create a key to sort subtitle using the given order
:param subtitle: subtitle to sort
:type subtitle: :class:`~subliminal.subtitles.ResultSubtitle`
:param video: video to match
:type video: :class:`~subliminal.videos.Video`
:param list languages: languages in preferred order
:param list services: services in preferred order
:param order: preferred order for subtitles sorting
:type list: list of :data:`LANGUAGE_INDEX`, :data:`SERVICE_INDEX`, :data:`SERVICE_CONFIDENCE`, :data:`MATCHING_CONFIDENCE`
:return: a key ready to use for subtitles sorting
:rtype: int
"""
key = ''
for sort_item in order:
if sort_item == LANGUAGE_INDEX:
key += '{0:03d}'.format(len(languages) - languages.index(subtitle.language) - 1)
key += '{0:01d}'.format(subtitle.language == languages[languages.index(subtitle.language)])
elif sort_item == SERVICE_INDEX:
key += '{0:02d}'.format(len(services) - services.index(subtitle.service) - 1)
elif sort_item == SERVICE_CONFIDENCE:
key += '{0:04d}'.format(int(subtitle.confidence * 1000))
elif sort_item == MATCHING_CONFIDENCE:
confidence = 0
if subtitle.release:
confidence = matching_confidence(video, subtitle)
key += '{0:04d}'.format(int(confidence * 1000))
return int(key)
def group_by_video(list_results):
"""Group the results of :class:`ListTasks <subliminal.tasks.ListTask>` into a
dictionary of :class:`~subliminal.videos.Video` => :class:`~subliminal.subtitles.Subtitle`
:param list_results:
:type list_results: list of result of :class:`~subliminal.tasks.ListTask`
:return: subtitles grouped by videos
:rtype: dict of :class:`~subliminal.videos.Video` => [:class:`~subliminal.subtitles.Subtitle`]
"""
result = defaultdict(list)
for video, subtitles in list_results:
result[video] += subtitles or []
return result
def filter_services(services):
"""Filter out services that are not available because of a missing feature
:param list services: service names to filter
:return: a copy of the initial list of service names without unavailable ones
:rtype: list
"""
filtered_services = services[:]
for service_name in services:
mod = __import__('services.' + service_name, globals=globals(), locals=locals(), fromlist=['Service'], level=-1)
service = mod.Service
if service.required_features is not None and bs4.builder_registry.lookup(*service.required_features) is None:
logger.warning(u'Service %s not available: none of available features could be used. One of %r required' % (service_name, service.required_features))
filtered_services.remove(service_name)
return filtered_services
| gpl-3.0 | -6,847,397,580,241,625,000 | 45.690909 | 175 | 0.671651 | false |
ioram7/keystone-federado-pgid2013 | build/paste/paste/util/ip4.py | 27 | 9271 | # -*- coding: iso-8859-15 -*-
"""IP4 address range set implementation.
Implements an IPv4-range type.
Copyright (C) 2006, Heiko Wundram.
Released under the MIT-license.
"""
# Version information
# -------------------
__author__ = "Heiko Wundram <[email protected]>"
__version__ = "0.2"
__revision__ = "3"
__date__ = "2006-01-20"
# Imports
# -------
import intset
import socket
# IP4Range class
# --------------
class IP4Range(intset.IntSet):
"""IP4 address range class with efficient storage of address ranges.
Supports all set operations."""
_MINIP4 = 0
_MAXIP4 = (1<<32) - 1
_UNITYTRANS = "".join([chr(n) for n in range(256)])
_IPREMOVE = "0123456789."
def __init__(self,*args):
"""Initialize an ip4range class. The constructor accepts an unlimited
number of arguments that may either be tuples in the form (start,stop),
integers, longs or strings, where start and stop in a tuple may
also be of the form integer, long or string.
Passing an integer or long means passing an IPv4-address that's already
been converted to integer notation, whereas passing a string specifies
an address where this conversion still has to be done. A string
address may be in the following formats:
- 1.2.3.4 - a plain address, interpreted as a single address
- 1.2.3 - a set of addresses, interpreted as 1.2.3.0-1.2.3.255
- localhost - hostname to look up, interpreted as single address
- 1.2.3<->5 - a set of addresses, interpreted as 1.2.3.0-1.2.5.255
- 1.2.0.0/16 - a set of addresses, interpreted as 1.2.0.0-1.2.255.255
Only the first three notations are valid if you use a string address in
a tuple, whereby notation 2 is interpreted as 1.2.3.0 if specified as
lower bound and 1.2.3.255 if specified as upper bound, not as a range
of addresses.
Specifying a range is done with the <-> operator. This is necessary
because '-' might be present in a hostname. '<->' shouldn't be, ever.
"""
# Special case copy constructor.
if len(args) == 1 and isinstance(args[0],IP4Range):
super(IP4Range,self).__init__(args[0])
return
# Convert arguments to tuple syntax.
args = list(args)
for i in range(len(args)):
argval = args[i]
if isinstance(argval,str):
if "<->" in argval:
# Type 4 address.
args[i] = self._parseRange(*argval.split("<->",1))
continue
elif "/" in argval:
# Type 5 address.
args[i] = self._parseMask(*argval.split("/",1))
else:
# Type 1, 2 or 3.
args[i] = self._parseAddrRange(argval)
elif isinstance(argval,tuple):
if len(tuple) <> 2:
raise ValueError("Tuple is of invalid length.")
addr1, addr2 = argval
if isinstance(addr1,str):
addr1 = self._parseAddrRange(addr1)[0]
elif not isinstance(addr1,(int,long)):
raise TypeError("Invalid argument.")
if isinstance(addr2,str):
addr2 = self._parseAddrRange(addr2)[1]
elif not isinstance(addr2,(int,long)):
raise TypeError("Invalid argument.")
args[i] = (addr1,addr2)
elif not isinstance(argval,(int,long)):
raise TypeError("Invalid argument.")
# Initialize the integer set.
super(IP4Range,self).__init__(min=self._MINIP4,max=self._MAXIP4,*args)
# Parsing functions
# -----------------
def _parseRange(self,addr1,addr2):
naddr1, naddr1len = _parseAddr(addr1)
naddr2, naddr2len = _parseAddr(addr2)
if naddr2len < naddr1len:
naddr2 += naddr1&(((1<<((naddr1len-naddr2len)*8))-1)<<
(naddr2len*8))
naddr2len = naddr1len
elif naddr2len > naddr1len:
raise ValueError("Range has more dots than address.")
naddr1 <<= (4-naddr1len)*8
naddr2 <<= (4-naddr2len)*8
naddr2 += (1<<((4-naddr2len)*8))-1
return (naddr1,naddr2)
def _parseMask(self,addr,mask):
naddr, naddrlen = _parseAddr(addr)
naddr <<= (4-naddrlen)*8
try:
if not mask:
masklen = 0
else:
masklen = int(mask)
if not 0 <= masklen <= 32:
raise ValueError
except ValueError:
try:
mask = _parseAddr(mask,False)
except ValueError:
raise ValueError("Mask isn't parseable.")
remaining = 0
masklen = 0
if not mask:
masklen = 0
else:
while not (mask&1):
remaining += 1
while (mask&1):
mask >>= 1
masklen += 1
if remaining+masklen <> 32:
raise ValueError("Mask isn't a proper host mask.")
naddr1 = naddr & (((1<<masklen)-1)<<(32-masklen))
naddr2 = naddr1 + (1<<(32-masklen)) - 1
return (naddr1,naddr2)
def _parseAddrRange(self,addr):
naddr, naddrlen = _parseAddr(addr)
naddr1 = naddr<<((4-naddrlen)*8)
naddr2 = ( (naddr<<((4-naddrlen)*8)) +
(1<<((4-naddrlen)*8)) - 1 )
return (naddr1,naddr2)
# Utility functions
# -----------------
def _int2ip(self,num):
rv = []
for i in range(4):
rv.append(str(num&255))
num >>= 8
return ".".join(reversed(rv))
# Iterating
# ---------
def iteraddresses(self):
"""Returns an iterator which iterates over ips in this iprange. An
IP is returned in string form (e.g. '1.2.3.4')."""
for v in super(IP4Range,self).__iter__():
yield self._int2ip(v)
def iterranges(self):
"""Returns an iterator which iterates over ip-ip ranges which build
this iprange if combined. An ip-ip pair is returned in string form
(e.g. '1.2.3.4-2.3.4.5')."""
for r in self._ranges:
if r[1]-r[0] == 1:
yield self._int2ip(r[0])
else:
yield '%s-%s' % (self._int2ip(r[0]),self._int2ip(r[1]-1))
def itermasks(self):
"""Returns an iterator which iterates over ip/mask pairs which build
this iprange if combined. An IP/Mask pair is returned in string form
(e.g. '1.2.3.0/24')."""
for r in self._ranges:
for v in self._itermasks(r):
yield v
def _itermasks(self,r):
ranges = [r]
while ranges:
cur = ranges.pop()
curmask = 0
while True:
curmasklen = 1<<(32-curmask)
start = (cur[0]+curmasklen-1)&(((1<<curmask)-1)<<(32-curmask))
if start >= cur[0] and start+curmasklen <= cur[1]:
break
else:
curmask += 1
yield "%s/%s" % (self._int2ip(start),curmask)
if cur[0] < start:
ranges.append((cur[0],start))
if cur[1] > start+curmasklen:
ranges.append((start+curmasklen,cur[1]))
__iter__ = iteraddresses
# Printing
# --------
def __repr__(self):
"""Returns a string which can be used to reconstruct this iprange."""
rv = []
for start, stop in self._ranges:
if stop-start == 1:
rv.append("%r" % (self._int2ip(start),))
else:
rv.append("(%r,%r)" % (self._int2ip(start),
self._int2ip(stop-1)))
return "%s(%s)" % (self.__class__.__name__,",".join(rv))
def _parseAddr(addr,lookup=True):
if lookup and addr.translate(IP4Range._UNITYTRANS, IP4Range._IPREMOVE):
try:
addr = socket.gethostbyname(addr)
except socket.error:
raise ValueError("Invalid Hostname as argument.")
naddr = 0
for naddrpos, part in enumerate(addr.split(".")):
if naddrpos >= 4:
raise ValueError("Address contains more than four parts.")
try:
if not part:
part = 0
else:
part = int(part)
if not 0 <= part < 256:
raise ValueError
except ValueError:
raise ValueError("Address part out of range.")
naddr <<= 8
naddr += part
return naddr, naddrpos+1
def ip2int(addr, lookup=True):
return _parseAddr(addr, lookup=lookup)[0]
if __name__ == "__main__":
# Little test script.
x = IP4Range("172.22.162.250/24")
y = IP4Range("172.22.162.250","172.22.163.250","172.22.163.253<->255")
print x
for val in x.itermasks():
print val
for val in y.itermasks():
print val
for val in (x|y).itermasks():
print val
for val in (x^y).iterranges():
print val
for val in x:
print val
| apache-2.0 | 4,442,647,899,851,012,600 | 32.959707 | 79 | 0.520764 | false |
Lujeni/ansible | lib/ansible/inventory/manager.py | 4 | 24970 | # (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#############################################
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import fnmatch
import os
import sys
import re
import itertools
import traceback
from operator import attrgetter
from random import shuffle
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleOptionsError, AnsibleParserError
from ansible.inventory.data import InventoryData
from ansible.module_utils.six import string_types
from ansible.module_utils._text import to_bytes, to_text
from ansible.parsing.utils.addresses import parse_address
from ansible.plugins.loader import inventory_loader
from ansible.utils.helpers import deduplicate_list
from ansible.utils.path import unfrackpath
from ansible.utils.display import Display
from ansible.utils.vars import combine_vars
from ansible.vars.plugins import get_vars_from_inventory_sources
display = Display()
IGNORED_ALWAYS = [br"^\.", b"^host_vars$", b"^group_vars$", b"^vars_plugins$"]
IGNORED_PATTERNS = [to_bytes(x) for x in C.INVENTORY_IGNORE_PATTERNS]
IGNORED_EXTS = [b'%s$' % to_bytes(re.escape(x)) for x in C.INVENTORY_IGNORE_EXTS]
IGNORED = re.compile(b'|'.join(IGNORED_ALWAYS + IGNORED_PATTERNS + IGNORED_EXTS))
PATTERN_WITH_SUBSCRIPT = re.compile(
r'''^
(.+) # A pattern expression ending with...
\[(?: # A [subscript] expression comprising:
(-?[0-9]+)| # A single positive or negative number
([0-9]+)([:-]) # Or an x:y or x: range.
([0-9]*)
)\]
$
''', re.X
)
def order_patterns(patterns):
''' takes a list of patterns and reorders them by modifier to apply them consistently '''
# FIXME: this goes away if we apply patterns incrementally or by groups
pattern_regular = []
pattern_intersection = []
pattern_exclude = []
for p in patterns:
if not p:
continue
if p[0] == "!":
pattern_exclude.append(p)
elif p[0] == "&":
pattern_intersection.append(p)
else:
pattern_regular.append(p)
# if no regular pattern was given, hence only exclude and/or intersection
# make that magically work
if pattern_regular == []:
pattern_regular = ['all']
# when applying the host selectors, run those without the "&" or "!"
# first, then the &s, then the !s.
return pattern_regular + pattern_intersection + pattern_exclude
def split_host_pattern(pattern):
"""
Takes a string containing host patterns separated by commas (or a list
thereof) and returns a list of single patterns (which may not contain
commas). Whitespace is ignored.
Also accepts ':' as a separator for backwards compatibility, but it is
not recommended due to the conflict with IPv6 addresses and host ranges.
Example: 'a,b[1], c[2:3] , d' -> ['a', 'b[1]', 'c[2:3]', 'd']
"""
if isinstance(pattern, list):
return list(itertools.chain(*map(split_host_pattern, pattern)))
elif not isinstance(pattern, string_types):
pattern = to_text(pattern, errors='surrogate_or_strict')
# If it's got commas in it, we'll treat it as a straightforward
# comma-separated list of patterns.
if u',' in pattern:
patterns = pattern.split(u',')
# If it doesn't, it could still be a single pattern. This accounts for
# non-separator uses of colons: IPv6 addresses and [x:y] host ranges.
else:
try:
(base, port) = parse_address(pattern, allow_ranges=True)
patterns = [pattern]
except Exception:
# The only other case we accept is a ':'-separated list of patterns.
# This mishandles IPv6 addresses, and is retained only for backwards
# compatibility.
patterns = re.findall(
to_text(r'''(?: # We want to match something comprising:
[^\s:\[\]] # (anything other than whitespace or ':[]'
| # ...or...
\[[^\]]*\] # a single complete bracketed expression)
)+ # occurring once or more
'''), pattern, re.X
)
return [p.strip() for p in patterns if p.strip()]
class InventoryManager(object):
''' Creates and manages inventory '''
def __init__(self, loader, sources=None):
# base objects
self._loader = loader
self._inventory = InventoryData()
# a list of host(names) to contain current inquiries to
self._restriction = None
self._subset = None
# caches
self._hosts_patterns_cache = {} # resolved full patterns
self._pattern_cache = {} # resolved individual patterns
# the inventory dirs, files, script paths or lists of hosts
if sources is None:
self._sources = []
elif isinstance(sources, string_types):
self._sources = [sources]
else:
self._sources = sources
# get to work!
self.parse_sources(cache=True)
@property
def localhost(self):
return self._inventory.localhost
@property
def groups(self):
return self._inventory.groups
@property
def hosts(self):
return self._inventory.hosts
def add_host(self, host, group=None, port=None):
return self._inventory.add_host(host, group, port)
def add_group(self, group):
return self._inventory.add_group(group)
def get_groups_dict(self):
return self._inventory.get_groups_dict()
def reconcile_inventory(self):
self.clear_caches()
return self._inventory.reconcile_inventory()
def get_host(self, hostname):
return self._inventory.get_host(hostname)
def _fetch_inventory_plugins(self):
''' sets up loaded inventory plugins for usage '''
display.vvvv('setting up inventory plugins')
plugins = []
for name in C.INVENTORY_ENABLED:
plugin = inventory_loader.get(name)
if plugin:
plugins.append(plugin)
else:
display.warning('Failed to load inventory plugin, skipping %s' % name)
if not plugins:
raise AnsibleError("No inventory plugins available to generate inventory, make sure you have at least one whitelisted.")
return plugins
def parse_sources(self, cache=False):
''' iterate over inventory sources and parse each one to populate it'''
parsed = False
# allow for multiple inventory parsing
for source in self._sources:
if source:
if ',' not in source:
source = unfrackpath(source, follow=False)
parse = self.parse_source(source, cache=cache)
if parse and not parsed:
parsed = True
if parsed:
# do post processing
self._inventory.reconcile_inventory()
else:
if C.INVENTORY_UNPARSED_IS_FAILED:
raise AnsibleError("No inventory was parsed, please check your configuration and options.")
else:
display.warning("No inventory was parsed, only implicit localhost is available")
for group in self.groups.values():
group.vars = combine_vars(group.vars, get_vars_from_inventory_sources(self._loader, self._sources, [group], 'inventory'))
for host in self.hosts.values():
host.vars = combine_vars(host.vars, get_vars_from_inventory_sources(self._loader, self._sources, [host], 'inventory'))
def parse_source(self, source, cache=False):
''' Generate or update inventory for the source provided '''
parsed = False
display.debug(u'Examining possible inventory source: %s' % source)
# use binary for path functions
b_source = to_bytes(source)
# process directories as a collection of inventories
if os.path.isdir(b_source):
display.debug(u'Searching for inventory files in directory: %s' % source)
for i in sorted(os.listdir(b_source)):
display.debug(u'Considering %s' % i)
# Skip hidden files and stuff we explicitly ignore
if IGNORED.search(i):
continue
# recursively deal with directory entries
fullpath = to_text(os.path.join(b_source, i), errors='surrogate_or_strict')
parsed_this_one = self.parse_source(fullpath, cache=cache)
display.debug(u'parsed %s as %s' % (fullpath, parsed_this_one))
if not parsed:
parsed = parsed_this_one
else:
# left with strings or files, let plugins figure it out
# set so new hosts can use for inventory_file/dir vars
self._inventory.current_source = source
# try source with each plugin
failures = []
for plugin in self._fetch_inventory_plugins():
plugin_name = to_text(getattr(plugin, '_load_name', getattr(plugin, '_original_path', '')))
display.debug(u'Attempting to use plugin %s (%s)' % (plugin_name, plugin._original_path))
# initialize and figure out if plugin wants to attempt parsing this file
try:
plugin_wants = bool(plugin.verify_file(source))
except Exception:
plugin_wants = False
if plugin_wants:
try:
# FIXME in case plugin fails 1/2 way we have partial inventory
plugin.parse(self._inventory, self._loader, source, cache=cache)
try:
plugin.update_cache_if_changed()
except AttributeError:
# some plugins might not implement caching
pass
parsed = True
display.vvv('Parsed %s inventory source with %s plugin' % (source, plugin_name))
break
except AnsibleParserError as e:
display.debug('%s was not parsable by %s' % (source, plugin_name))
tb = ''.join(traceback.format_tb(sys.exc_info()[2]))
failures.append({'src': source, 'plugin': plugin_name, 'exc': e, 'tb': tb})
except Exception as e:
display.debug('%s failed while attempting to parse %s' % (plugin_name, source))
tb = ''.join(traceback.format_tb(sys.exc_info()[2]))
failures.append({'src': source, 'plugin': plugin_name, 'exc': AnsibleError(e), 'tb': tb})
else:
display.vvv("%s declined parsing %s as it did not pass its verify_file() method" % (plugin_name, source))
else:
if not parsed and failures:
# only if no plugin processed files should we show errors.
for fail in failures:
display.warning(u'\n* Failed to parse %s with %s plugin: %s' % (to_text(fail['src']), fail['plugin'], to_text(fail['exc'])))
if 'tb' in fail:
display.vvv(to_text(fail['tb']))
if C.INVENTORY_ANY_UNPARSED_IS_FAILED:
raise AnsibleError(u'Completely failed to parse inventory source %s' % (source))
if not parsed:
if source != '/etc/ansible/hosts' or os.path.exists(source):
# only warn if NOT using the default and if using it, only if the file is present
display.warning("Unable to parse %s as an inventory source" % source)
# clear up, jic
self._inventory.current_source = None
return parsed
def clear_caches(self):
''' clear all caches '''
self._hosts_patterns_cache = {}
self._pattern_cache = {}
# FIXME: flush inventory cache
def refresh_inventory(self):
''' recalculate inventory '''
self.clear_caches()
self._inventory = InventoryData()
self.parse_sources(cache=False)
def _match_list(self, items, pattern_str):
# compile patterns
try:
if not pattern_str[0] == '~':
pattern = re.compile(fnmatch.translate(pattern_str))
else:
pattern = re.compile(pattern_str[1:])
except Exception:
raise AnsibleError('Invalid host list pattern: %s' % pattern_str)
# apply patterns
results = []
for item in items:
if pattern.match(item):
results.append(item)
return results
def get_hosts(self, pattern="all", ignore_limits=False, ignore_restrictions=False, order=None):
"""
Takes a pattern or list of patterns and returns a list of matching
inventory host names, taking into account any active restrictions
or applied subsets
"""
hosts = []
# Check if pattern already computed
if isinstance(pattern, list):
pattern_list = pattern[:]
else:
pattern_list = [pattern]
if pattern_list:
if not ignore_limits and self._subset:
pattern_list.extend(self._subset)
if not ignore_restrictions and self._restriction:
pattern_list.extend(self._restriction)
# This is only used as a hash key in the self._hosts_patterns_cache dict
# a tuple is faster than stringifying
pattern_hash = tuple(pattern_list)
if pattern_hash not in self._hosts_patterns_cache:
patterns = split_host_pattern(pattern)
hosts[:] = self._evaluate_patterns(patterns)
# mainly useful for hostvars[host] access
if not ignore_limits and self._subset:
# exclude hosts not in a subset, if defined
subset_uuids = set(s._uuid for s in self._evaluate_patterns(self._subset))
hosts[:] = [h for h in hosts if h._uuid in subset_uuids]
if not ignore_restrictions and self._restriction:
# exclude hosts mentioned in any restriction (ex: failed hosts)
hosts[:] = [h for h in hosts if h.name in self._restriction]
self._hosts_patterns_cache[pattern_hash] = deduplicate_list(hosts)
# sort hosts list if needed (should only happen when called from strategy)
if order in ['sorted', 'reverse_sorted']:
hosts[:] = sorted(self._hosts_patterns_cache[pattern_hash][:], key=attrgetter('name'), reverse=(order == 'reverse_sorted'))
elif order == 'reverse_inventory':
hosts[:] = self._hosts_patterns_cache[pattern_hash][::-1]
else:
hosts[:] = self._hosts_patterns_cache[pattern_hash][:]
if order == 'shuffle':
shuffle(hosts)
elif order not in [None, 'inventory']:
raise AnsibleOptionsError("Invalid 'order' specified for inventory hosts: %s" % order)
return hosts
def _evaluate_patterns(self, patterns):
"""
Takes a list of patterns and returns a list of matching host names,
taking into account any negative and intersection patterns.
"""
patterns = order_patterns(patterns)
hosts = []
for p in patterns:
# avoid resolving a pattern that is a plain host
if p in self._inventory.hosts:
hosts.append(self._inventory.get_host(p))
else:
that = self._match_one_pattern(p)
if p[0] == "!":
that = set(that)
hosts = [h for h in hosts if h not in that]
elif p[0] == "&":
that = set(that)
hosts = [h for h in hosts if h in that]
else:
existing_hosts = set(y.name for y in hosts)
hosts.extend([h for h in that if h.name not in existing_hosts])
return hosts
def _match_one_pattern(self, pattern):
"""
Takes a single pattern and returns a list of matching host names.
Ignores intersection (&) and exclusion (!) specifiers.
The pattern may be:
1. A regex starting with ~, e.g. '~[abc]*'
2. A shell glob pattern with ?/*/[chars]/[!chars], e.g. 'foo*'
3. An ordinary word that matches itself only, e.g. 'foo'
The pattern is matched using the following rules:
1. If it's 'all', it matches all hosts in all groups.
2. Otherwise, for each known group name:
(a) if it matches the group name, the results include all hosts
in the group or any of its children.
(b) otherwise, if it matches any hosts in the group, the results
include the matching hosts.
This means that 'foo*' may match one or more groups (thus including all
hosts therein) but also hosts in other groups.
The built-in groups 'all' and 'ungrouped' are special. No pattern can
match these group names (though 'all' behaves as though it matches, as
described above). The word 'ungrouped' can match a host of that name,
and patterns like 'ungr*' and 'al*' can match either hosts or groups
other than all and ungrouped.
If the pattern matches one or more group names according to these rules,
it may have an optional range suffix to select a subset of the results.
This is allowed only if the pattern is not a regex, i.e. '~foo[1]' does
not work (the [1] is interpreted as part of the regex), but 'foo*[1]'
would work if 'foo*' matched the name of one or more groups.
Duplicate matches are always eliminated from the results.
"""
if pattern[0] in ("&", "!"):
pattern = pattern[1:]
if pattern not in self._pattern_cache:
(expr, slice) = self._split_subscript(pattern)
hosts = self._enumerate_matches(expr)
try:
hosts = self._apply_subscript(hosts, slice)
except IndexError:
raise AnsibleError("No hosts matched the subscripted pattern '%s'" % pattern)
self._pattern_cache[pattern] = hosts
return self._pattern_cache[pattern]
def _split_subscript(self, pattern):
"""
Takes a pattern, checks if it has a subscript, and returns the pattern
without the subscript and a (start,end) tuple representing the given
subscript (or None if there is no subscript).
Validates that the subscript is in the right syntax, but doesn't make
sure the actual indices make sense in context.
"""
# Do not parse regexes for enumeration info
if pattern[0] == '~':
return (pattern, None)
# We want a pattern followed by an integer or range subscript.
# (We can't be more restrictive about the expression because the
# fnmatch semantics permit [\[:\]] to occur.)
subscript = None
m = PATTERN_WITH_SUBSCRIPT.match(pattern)
if m:
(pattern, idx, start, sep, end) = m.groups()
if idx:
subscript = (int(idx), None)
else:
if not end:
end = -1
subscript = (int(start), int(end))
if sep == '-':
display.warning("Use [x:y] inclusive subscripts instead of [x-y] which has been removed")
return (pattern, subscript)
def _apply_subscript(self, hosts, subscript):
"""
Takes a list of hosts and a (start,end) tuple and returns the subset of
hosts based on the subscript (which may be None to return all hosts).
"""
if not hosts or not subscript:
return hosts
(start, end) = subscript
if end:
if end == -1:
end = len(hosts) - 1
return hosts[start:end + 1]
else:
return [hosts[start]]
def _enumerate_matches(self, pattern):
"""
Returns a list of host names matching the given pattern according to the
rules explained above in _match_one_pattern.
"""
results = []
# check if pattern matches group
matching_groups = self._match_list(self._inventory.groups, pattern)
if matching_groups:
for groupname in matching_groups:
results.extend(self._inventory.groups[groupname].get_hosts())
# check hosts if no groups matched or it is a regex/glob pattern
if not matching_groups or pattern[0] == '~' or any(special in pattern for special in ('.', '?', '*', '[')):
# pattern might match host
matching_hosts = self._match_list(self._inventory.hosts, pattern)
if matching_hosts:
for hostname in matching_hosts:
results.append(self._inventory.hosts[hostname])
if not results and pattern in C.LOCALHOST:
# get_host autocreates implicit when needed
implicit = self._inventory.get_host(pattern)
if implicit:
results.append(implicit)
# Display warning if specified host pattern did not match any groups or hosts
if not results and not matching_groups and pattern != 'all':
msg = "Could not match supplied host pattern, ignoring: %s" % pattern
display.debug(msg)
if C.HOST_PATTERN_MISMATCH == 'warning':
display.warning(msg)
elif C.HOST_PATTERN_MISMATCH == 'error':
raise AnsibleError(msg)
# no need to write 'ignore' state
return results
def list_hosts(self, pattern="all"):
""" return a list of hostnames for a pattern """
# FIXME: cache?
result = [h for h in self.get_hosts(pattern)]
# allow implicit localhost if pattern matches and no other results
if len(result) == 0 and pattern in C.LOCALHOST:
result = [pattern]
return result
def list_groups(self):
# FIXME: cache?
return sorted(self._inventory.groups.keys(), key=lambda x: x)
def restrict_to_hosts(self, restriction):
"""
Restrict list operations to the hosts given in restriction. This is used
to batch serial operations in main playbook code, don't use this for other
reasons.
"""
if restriction is None:
return
elif not isinstance(restriction, list):
restriction = [restriction]
self._restriction = set(to_text(h.name) for h in restriction)
def subset(self, subset_pattern):
"""
Limits inventory results to a subset of inventory that matches a given
pattern, such as to select a given geographic of numeric slice amongst
a previous 'hosts' selection that only select roles, or vice versa.
Corresponds to --limit parameter to ansible-playbook
"""
if subset_pattern is None:
self._subset = None
else:
subset_patterns = split_host_pattern(subset_pattern)
results = []
# allow Unix style @filename data
for x in subset_patterns:
if x[0] == "@":
fd = open(x[1:])
results.extend([to_text(l.strip()) for l in fd.read().split("\n")])
fd.close()
else:
results.append(to_text(x))
self._subset = results
def remove_restriction(self):
""" Do not restrict list operations """
self._restriction = None
def clear_pattern_cache(self):
self._pattern_cache = {}
| gpl-3.0 | -5,601,324,338,152,402,000 | 38.384858 | 148 | 0.580096 | false |
evansd/django | django/conf/locale/cs/formats.py | 65 | 1635 | # This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j. E Y'
TIME_FORMAT = 'G:i'
DATETIME_FORMAT = 'j. E Y G:i'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j. F'
SHORT_DATE_FORMAT = 'd.m.Y'
SHORT_DATETIME_FORMAT = 'd.m.Y G:i'
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = [
'%d.%m.%Y', '%d.%m.%y', # '05.01.2006', '05.01.06'
'%d. %m. %Y', '%d. %m. %y', # '5. 1. 2006', '5. 1. 06'
# '%d. %B %Y', '%d. %b. %Y', # '25. October 2006', '25. Oct. 2006'
]
# Kept ISO formats as one is in first position
TIME_INPUT_FORMATS = [
'%H:%M:%S', # '04:30:59'
'%H.%M', # '04.30'
'%H:%M', # '04:30'
]
DATETIME_INPUT_FORMATS = [
'%d.%m.%Y %H:%M:%S', # '05.01.2006 04:30:59'
'%d.%m.%Y %H:%M:%S.%f', # '05.01.2006 04:30:59.000200'
'%d.%m.%Y %H.%M', # '05.01.2006 04.30'
'%d.%m.%Y %H:%M', # '05.01.2006 04:30'
'%d.%m.%Y', # '05.01.2006'
'%d. %m. %Y %H:%M:%S', # '05. 01. 2006 04:30:59'
'%d. %m. %Y %H:%M:%S.%f', # '05. 01. 2006 04:30:59.000200'
'%d. %m. %Y %H.%M', # '05. 01. 2006 04.30'
'%d. %m. %Y %H:%M', # '05. 01. 2006 04:30'
'%d. %m. %Y', # '05. 01. 2006'
'%Y-%m-%d %H.%M', # '2006-01-05 04.30'
]
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '\xa0' # non-breaking space
NUMBER_GROUPING = 3
| bsd-3-clause | -6,390,870,274,618,545,000 | 37.928571 | 77 | 0.523547 | false |
AICP/external_chromium_org | tools/win/split_link/install_split_link.py | 146 | 3104 | # Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import _winreg
import os
import shutil
import subprocess
import sys
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
def IsExe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
def FindInPath(program):
fpath, _ = os.path.split(program)
if fpath:
if IsExe(program):
return program
else:
for path in os.environ['PATH'].split(os.pathsep):
path = path.strip('"')
exe_file = os.path.join(path, program)
if not path or not os.path.isabs(path):
continue
if IsExe(exe_file):
return exe_file
return None
def EscapeForCommandLineAndCString(path):
"""Quoted sufficiently to be passed on the compile command line as a define
to be turned into a string in the target C program."""
path = '"' + path + '"'
return path.replace('\\', '\\\\').replace('"', '\\"')
def main():
# Switch to our own dir.
os.chdir(BASE_DIR)
link = FindInPath('link.exe')
mt = FindInPath('mt.exe')
if not link or not mt:
print("Couldn't find link.exe or mt.exe in PATH. "
"Must run from Administrator Visual Studio Command Prompt.")
return 1
link_backup = os.path.join(os.path.split(link)[0], 'link.exe.split_link.exe')
# Don't re-backup link.exe, so only copy link.exe to backup if it's
# not there already.
if not os.path.exists(link_backup):
try:
print 'Saving original link.exe...'
shutil.copyfile(link, link_backup)
except IOError:
print(("Wasn't able to back up %s to %s. "
"Not running with Administrator privileges?")
% (link, link_backup))
return 1
# Build our linker shim.
print 'Building split_link.exe...'
split_link_py = os.path.abspath('split_link.py')
script_path = EscapeForCommandLineAndCString(split_link_py)
python = EscapeForCommandLineAndCString(sys.executable)
subprocess.check_call('cl.exe /nologo /Ox /Zi /W4 /WX /D_UNICODE /DUNICODE'
' /D_CRT_SECURE_NO_WARNINGS /EHsc split_link.cc'
' /DPYTHON_PATH="%s"'
' /DSPLIT_LINK_SCRIPT_PATH="%s"'
' /link shell32.lib shlwapi.lib /out:split_link.exe' % (
python, script_path))
# Copy shim into place.
print 'Copying split_link.exe over link.exe...'
try:
shutil.copyfile('split_link.exe', link)
_winreg.SetValue(_winreg.HKEY_CURRENT_USER,
'Software\\Chromium\\split_link_installed',
_winreg.REG_SZ,
link_backup)
_winreg.SetValue(_winreg.HKEY_CURRENT_USER,
'Software\\Chromium\\split_link_mt_path',
_winreg.REG_SZ,
mt)
except IOError:
print("Wasn't able to copy split_link.exe over %s. "
"Not running with Administrator privileges?" % link)
return 1
return 0
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause | 6,032,807,087,503,230,000 | 30.04 | 80 | 0.611147 | false |
imsparsh/python-for-android | python-build/python-libs/gdata/src/gdata/tlslite/errors.py | 333 | 5795 | """Exception classes.
@sort: TLSError, TLSAbruptCloseError, TLSAlert, TLSLocalAlert, TLSRemoteAlert,
TLSAuthenticationError, TLSNoAuthenticationError, TLSAuthenticationTypeError,
TLSFingerprintError, TLSAuthorizationError, TLSValidationError, TLSFaultError
"""
from constants import AlertDescription, AlertLevel
class TLSError(Exception):
"""Base class for all TLS Lite exceptions."""
pass
class TLSAbruptCloseError(TLSError):
"""The socket was closed without a proper TLS shutdown.
The TLS specification mandates that an alert of some sort
must be sent before the underlying socket is closed. If the socket
is closed without this, it could signify that an attacker is trying
to truncate the connection. It could also signify a misbehaving
TLS implementation, or a random network failure.
"""
pass
class TLSAlert(TLSError):
"""A TLS alert has been signalled."""
pass
_descriptionStr = {\
AlertDescription.close_notify: "close_notify",\
AlertDescription.unexpected_message: "unexpected_message",\
AlertDescription.bad_record_mac: "bad_record_mac",\
AlertDescription.decryption_failed: "decryption_failed",\
AlertDescription.record_overflow: "record_overflow",\
AlertDescription.decompression_failure: "decompression_failure",\
AlertDescription.handshake_failure: "handshake_failure",\
AlertDescription.no_certificate: "no certificate",\
AlertDescription.bad_certificate: "bad_certificate",\
AlertDescription.unsupported_certificate: "unsupported_certificate",\
AlertDescription.certificate_revoked: "certificate_revoked",\
AlertDescription.certificate_expired: "certificate_expired",\
AlertDescription.certificate_unknown: "certificate_unknown",\
AlertDescription.illegal_parameter: "illegal_parameter",\
AlertDescription.unknown_ca: "unknown_ca",\
AlertDescription.access_denied: "access_denied",\
AlertDescription.decode_error: "decode_error",\
AlertDescription.decrypt_error: "decrypt_error",\
AlertDescription.export_restriction: "export_restriction",\
AlertDescription.protocol_version: "protocol_version",\
AlertDescription.insufficient_security: "insufficient_security",\
AlertDescription.internal_error: "internal_error",\
AlertDescription.user_canceled: "user_canceled",\
AlertDescription.no_renegotiation: "no_renegotiation",\
AlertDescription.unknown_srp_username: "unknown_srp_username",\
AlertDescription.missing_srp_username: "missing_srp_username"}
class TLSLocalAlert(TLSAlert):
"""A TLS alert has been signalled by the local implementation.
@type description: int
@ivar description: Set to one of the constants in
L{tlslite.constants.AlertDescription}
@type level: int
@ivar level: Set to one of the constants in
L{tlslite.constants.AlertLevel}
@type message: str
@ivar message: Description of what went wrong.
"""
def __init__(self, alert, message=None):
self.description = alert.description
self.level = alert.level
self.message = message
def __str__(self):
alertStr = TLSAlert._descriptionStr.get(self.description)
if alertStr == None:
alertStr = str(self.description)
if self.message:
return alertStr + ": " + self.message
else:
return alertStr
class TLSRemoteAlert(TLSAlert):
"""A TLS alert has been signalled by the remote implementation.
@type description: int
@ivar description: Set to one of the constants in
L{tlslite.constants.AlertDescription}
@type level: int
@ivar level: Set to one of the constants in
L{tlslite.constants.AlertLevel}
"""
def __init__(self, alert):
self.description = alert.description
self.level = alert.level
def __str__(self):
alertStr = TLSAlert._descriptionStr.get(self.description)
if alertStr == None:
alertStr = str(self.description)
return alertStr
class TLSAuthenticationError(TLSError):
"""The handshake succeeded, but the other party's authentication
was inadequate.
This exception will only be raised when a
L{tlslite.Checker.Checker} has been passed to a handshake function.
The Checker will be invoked once the handshake completes, and if
the Checker objects to how the other party authenticated, a
subclass of this exception will be raised.
"""
pass
class TLSNoAuthenticationError(TLSAuthenticationError):
"""The Checker was expecting the other party to authenticate with a
certificate chain, but this did not occur."""
pass
class TLSAuthenticationTypeError(TLSAuthenticationError):
"""The Checker was expecting the other party to authenticate with a
different type of certificate chain."""
pass
class TLSFingerprintError(TLSAuthenticationError):
"""The Checker was expecting the other party to authenticate with a
certificate chain that matches a different fingerprint."""
pass
class TLSAuthorizationError(TLSAuthenticationError):
"""The Checker was expecting the other party to authenticate with a
certificate chain that has a different authorization."""
pass
class TLSValidationError(TLSAuthenticationError):
"""The Checker has determined that the other party's certificate
chain is invalid."""
pass
class TLSFaultError(TLSError):
"""The other party responded incorrectly to an induced fault.
This exception will only occur during fault testing, when a
TLSConnection's fault variable is set to induce some sort of
faulty behavior, and the other party doesn't respond appropriately.
"""
pass
| apache-2.0 | -6,756,249,802,120,865,000 | 37.892617 | 78 | 0.716652 | false |
meduz/scikit-learn | examples/linear_model/plot_ransac.py | 73 | 1859 | """
===========================================
Robust linear model estimation using RANSAC
===========================================
In this example we see how to robustly fit a linear model to faulty data using
the RANSAC algorithm.
"""
import numpy as np
from matplotlib import pyplot as plt
from sklearn import linear_model, datasets
n_samples = 1000
n_outliers = 50
X, y, coef = datasets.make_regression(n_samples=n_samples, n_features=1,
n_informative=1, noise=10,
coef=True, random_state=0)
# Add outlier data
np.random.seed(0)
X[:n_outliers] = 3 + 0.5 * np.random.normal(size=(n_outliers, 1))
y[:n_outliers] = -3 + 10 * np.random.normal(size=n_outliers)
# Fit line using all data
model = linear_model.LinearRegression()
model.fit(X, y)
# Robustly fit linear model with RANSAC algorithm
model_ransac = linear_model.RANSACRegressor(linear_model.LinearRegression())
model_ransac.fit(X, y)
inlier_mask = model_ransac.inlier_mask_
outlier_mask = np.logical_not(inlier_mask)
# Predict data of estimated models
line_X = np.arange(-5, 5)
line_y = model.predict(line_X[:, np.newaxis])
line_y_ransac = model_ransac.predict(line_X[:, np.newaxis])
# Compare estimated coefficients
print("Estimated coefficients (true, normal, RANSAC):")
print(coef, model.coef_, model_ransac.estimator_.coef_)
lw = 2
plt.scatter(X[inlier_mask], y[inlier_mask], color='yellowgreen', marker='.',
label='Inliers')
plt.scatter(X[outlier_mask], y[outlier_mask], color='gold', marker='.',
label='Outliers')
plt.plot(line_X, line_y, color='navy', linestyle='-', linewidth=lw,
label='Linear regressor')
plt.plot(line_X, line_y_ransac, color='cornflowerblue', linestyle='-',
linewidth=lw, label='RANSAC regressor')
plt.legend(loc='lower right')
plt.show()
| bsd-3-clause | -2,685,503,581,709,986,300 | 31.051724 | 78 | 0.65035 | false |
sandeepraju/git-talk | cli.py | 1 | 1686 | import argparse
import os
from gittalk import GitTalk
from gittalk.utils import which, make_sure_path_exists
def run():
"""
`run` drives the command line interface for Git Talk.
It exposes a command line interface through which users
can interact with Git Talk to configure or invoke various
functionalities.
"""
# do explict dependency checks
try:
import Tkinter
except Exception as e:
print 'Make sure your Python has Tkinter installed before using GitTalk!'
if not which('ffmpeg'):
print 'Please make sure FFmpeg is installed before using GitTalk!'
# create a folder to be used by GitTalk
make_sure_path_exists(os.path.join(os.environ['HOME'], '.gittalk'))
parser = argparse.ArgumentParser(description='Audio & Video annotations to your code via Git')
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument('-e', '--enable', action='store_true', required=False,
help='Enable Git Talk in the current Git repository.',
dest='enable')
group.add_argument('-d', '--disable', action='store_true', required=False,
help='Disable Git Talk in the current Git repository.',
dest='disable')
group.add_argument('-t', '--trigger', action='store_true', required=False,
help='Trigger Git Talk.',
dest='trigger')
args = parser.parse_args()
gt = GitTalk()
if args.enable:
gt.enable()
elif args.disable:
gt.disable()
elif args.trigger:
gt.trigger()
if __name__ == '__main__':
run()
| bsd-3-clause | 1,389,969,434,073,118,700 | 30.811321 | 98 | 0.616251 | false |
jimi-c/ansible | lib/ansible/plugins/action/add_host.py | 49 | 3384 | # (c) 2012-2014, Michael DeHaan <[email protected]>
# Copyright 2012, Seth Vidal <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.errors import AnsibleError
from ansible.module_utils.six import string_types
from ansible.plugins.action import ActionBase
from ansible.parsing.utils.addresses import parse_address
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class ActionModule(ActionBase):
''' Create inventory hosts and groups in the memory inventory'''
# We need to be able to modify the inventory
BYPASS_HOST_LOOP = True
TRANSFERS_FILES = False
def run(self, tmp=None, task_vars=None):
self._supports_check_mode = True
result = super(ActionModule, self).run(tmp, task_vars)
del tmp # tmp no longer has any effect
# Parse out any hostname:port patterns
new_name = self._task.args.get('name', self._task.args.get('hostname', self._task.args.get('host', None)))
if new_name is None:
result['failed'] = True
result['msg'] = 'name or hostname arg needs to be provided'
return result
display.vv("creating host via 'add_host': hostname=%s" % new_name)
try:
name, port = parse_address(new_name, allow_ranges=False)
except:
# not a parsable hostname, but might still be usable
name = new_name
port = None
if port:
self._task.args['ansible_ssh_port'] = port
groups = self._task.args.get('groupname', self._task.args.get('groups', self._task.args.get('group', '')))
# add it to the group if that was specified
new_groups = []
if groups:
if isinstance(groups, list):
group_list = groups
elif isinstance(groups, string_types):
group_list = groups.split(",")
else:
raise AnsibleError("Groups must be specified as a list.", obj=self._task)
for group_name in group_list:
if group_name not in new_groups:
new_groups.append(group_name.strip())
# Add any variables to the new_host
host_vars = dict()
special_args = frozenset(('name', 'hostname', 'groupname', 'groups'))
for k in self._task.args.keys():
if k not in special_args:
host_vars[k] = self._task.args[k]
result['changed'] = True
result['add_host'] = dict(host_name=name, groups=new_groups, host_vars=host_vars)
return result
| gpl-3.0 | -4,637,411,742,304,762,000 | 35.387097 | 114 | 0.641253 | false |
eliangidoni/rethinkdb | test/regression/issue_4383.py | 12 | 4815 | #!/usr/bin/env python
# Copyright 2015-2016 RethinkDB, all rights reserved.
'''Test that a backfill will resume after restarting a cluster'''
import os, pprint, sys, time
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir, 'common')))
import driver, scenario_common, utils, vcoptparse
op = vcoptparse.OptParser()
op["num_rows"] = vcoptparse.IntFlag("--num-rows", 50000)
scenario_common.prepare_option_parser_mode_flags(op)
opts = op.parse(sys.argv)
_, command_prefix, server_options = scenario_common.parse_mode_flags(opts)
r = utils.import_python_driver()
dbName, tableName = utils.get_test_db_table()
num_shards = 16
utils.print_with_time("Starting cluster of three servers")
with driver.Cluster(initial_servers=['source1', 'source2', 'target'], output_folder='.', console_output=True, command_prefix=command_prefix, extra_options=server_options) as cluster:
source_a = cluster['source1']
source_b = cluster['source2']
target = cluster['target']
conn = r.connect(host=source_a.host, port=source_a.driver_port)
utils.print_with_time("Creating a table")
if dbName not in r.db_list().run(conn):
r.db_create(dbName).run(conn)
if tableName in r.db(dbName).table_list().run(conn):
r.db(dbName).table_drop(tableName)
r.db("rethinkdb").table("table_config").insert({
"name":tableName, "db": dbName,
"shards": [{"primary_replica":"source1", "replicas":["source1", "source2"]}] * num_shards
}).run(conn)
tbl = r.db(dbName).table(tableName)
tbl.wait(wait_for="all_replicas_ready").run(conn)
utils.print_with_time("Inserting %d documents" % opts["num_rows"])
chunkSize = 2000
for startId in range(0, opts["num_rows"], chunkSize):
endId = min(startId + chunkSize, opts["num_rows"])
res = tbl.insert(r.range(startId, endId).map({
"value": r.row,
"padding": "x" * 100
}), durability="soft").run(conn)
assert res["inserted"] == endId - startId
utils.print_with_time(" Progress: %d/%d" % (endId, opts["num_rows"]))
tbl.sync().run(conn)
utils.print_with_time("Beginning replication to second server")
tbl.config().update({
"shards": [{"primary_replica": "source1", "replicas": ["source1", "source2", "target"]}] * num_shards
}).run(conn)
utils.print_with_time("Waiting a few seconds for backfill to get going")
deadline = time.time() + 2
while True:
status = tbl.status().run(conn)
try:
assert status["status"]["ready_for_writes"] == True, 'Table is not ready for writes:\n' + pprint.pformat(status)
assert status["status"]["all_replicas_ready"] == False, 'All replicas incorrectly reporting ready:\n' + pprint.pformat(status)
break
except AssertionError:
if time.time() > deadline:
raise
else:
time.sleep(.05)
utils.print_with_time("Shutting down servers")
cluster.check_and_stop()
utils.print_with_time("Restarting servers")
source_a.start()
source_b.start()
target.start()
conn = r.connect(host=source_a.host, port=source_a.driver_port)
conn_target = r.connect(host=target.host, port=target.driver_port)
utils.print_with_time("Checking that table is available for writes")
try:
tbl.wait(wait_for="ready_for_writes", timeout=30).run(conn)
except r.ReqlRuntimeError, e:
status = r.db("rethinkdb").table("_debug_table_status").nth(0).run(conn)
pprint.pprint(status)
raise
try:
tbl.wait(wait_for="ready_for_writes", timeout=3).run(conn_target)
except r.ReqlRuntimeError, e:
pprint.pprint(r.db("rethinkdb").table("_debug_table_status").nth(0).run(conn_target))
raise
utils.print_with_time("Making sure the backfill didn't end")
status = tbl.status().run(conn)
for shard in status['shards']:
for server in shard['replicas']:
if server['server'] == 'target' and server['state'] == 'backfilling':
break # this will cause a double break, bypassing the outer else
else:
continue
break
else:
raise AssertionError('There were no shards listed as backfilling:\n' + pprint.pformat(status))
for job in r.db('rethinkdb').table('jobs').filter({'type':'backfill'}).run(conn):
if job['info']['db'] == dbName and job['info']['table'] == tableName:
break
else:
raise AssertionError('Did not find any job backfilling this table')
assert not status["status"]["all_replicas_ready"], 'All replicas incorrectly reporting ready:\n' + pprint.pformat(status)
utils.print_with_time("Cleaning up")
utils.print_with_time("Done.")
| agpl-3.0 | -6,407,156,258,282,020,000 | 41.610619 | 182 | 0.637591 | false |
prculley/gramps | gramps/gen/filters/rules/person/_matchidof.py | 4 | 1777 | #
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2002-2006 Donald N. Allingham
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
#-------------------------------------------------------------------------
#
# Standard Python modules
#
#-------------------------------------------------------------------------
from ....const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from .. import Rule
#-------------------------------------------------------------------------
#
# HasIdOf
#
#-------------------------------------------------------------------------
class MatchIdOf(Rule):
"""Rule that checks for a person with a specific Gramps ID"""
labels = [ _('ID:') ]
name = _('Person with <Id>')
description = _("Matches person with a specified Gramps ID")
category = _('General filters')
def apply(self,db,person):
return person.gramps_id.find(self.list[0]) !=-1
| gpl-2.0 | 4,904,196,178,665,654,000 | 34.54 | 79 | 0.527856 | false |
optima-ict/odoo | openerp/workflow/service.py | 50 | 4084 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from helpers import Session
from helpers import Record
from openerp.workflow.instance import WorkflowInstance
# import instance
class WorkflowService(object):
CACHE = {}
@classmethod
def clear_cache(cls, dbname):
cls.CACHE[dbname] = {}
@classmethod
def new(cls, cr, uid, model_name, record_id):
return cls(Session(cr, uid), Record(model_name, record_id))
def __init__(self, session, record):
assert isinstance(session, Session)
assert isinstance(record, Record)
self.session = session
self.record = record
self.cr = self.session.cr
def write(self):
self.cr.execute('select id from wkf_instance where res_id=%s and res_type=%s and state=%s',
(self.record.id or None, self.record.model or None, 'active')
)
for (instance_id,) in self.cr.fetchall():
WorkflowInstance(self.session, self.record, {'id': instance_id}).update()
def trigger(self):
self.cr.execute('select instance_id from wkf_triggers where res_id=%s and model=%s', (self.record.id, self.record.model))
res = self.cr.fetchall()
for (instance_id,) in res:
self.cr.execute('select %s,res_type,res_id from wkf_instance where id=%s', (self.session.uid, instance_id,))
current_uid, current_model_name, current_record_id = self.cr.fetchone()
current_session = Session(self.session.cr, current_uid)
current_record = Record(current_model_name, current_record_id)
WorkflowInstance(current_session, current_record, {'id': instance_id}).update()
def delete(self):
WorkflowInstance(self.session, self.record, {}).delete()
def create(self):
WorkflowService.CACHE.setdefault(self.cr.dbname, {})
wkf_ids = WorkflowService.CACHE[self.cr.dbname].get(self.record.model, None)
if not wkf_ids:
self.cr.execute('select id from wkf where osv=%s and on_create=True', (self.record.model,))
wkf_ids = self.cr.fetchall()
WorkflowService.CACHE[self.cr.dbname][self.record.model] = wkf_ids
for (wkf_id, ) in wkf_ids:
WorkflowInstance.create(self.session, self.record, wkf_id)
def validate(self, signal):
result = False
# ids of all active workflow instances for a corresponding resource (id, model_nam)
self.cr.execute('select id from wkf_instance where res_id=%s and res_type=%s and state=%s', (self.record.id, self.record.model, 'active'))
# TODO: Refactor the workflow instance object
for (instance_id,) in self.cr.fetchall():
wi = WorkflowInstance(self.session, self.record, {'id': instance_id})
res2 = wi.validate(signal)
result = result or res2
return result
def redirect(self, new_rid):
# get ids of wkf instances for the old resource (res_id)
# CHECKME: shouldn't we get only active instances?
self.cr.execute('select id, wkf_id from wkf_instance where res_id=%s and res_type=%s', (self.record.id, self.record.model))
for old_inst_id, workflow_id in self.cr.fetchall():
# first active instance for new resource (new_rid), using same wkf
self.cr.execute(
'SELECT id '\
'FROM wkf_instance '\
'WHERE res_id=%s AND res_type=%s AND wkf_id=%s AND state=%s',
(new_rid, self.record.model, workflow_id, 'active'))
new_id = self.cr.fetchone()
if new_id:
# select all workitems which "wait" for the old instance
self.cr.execute('select id from wkf_workitem where subflow_id=%s', (old_inst_id,))
for (item_id,) in self.cr.fetchall():
# redirect all those workitems to the wkf instance of the new resource
self.cr.execute('update wkf_workitem set subflow_id=%s where id=%s', (new_id[0], item_id))
| agpl-3.0 | -3,959,377,991,663,151,000 | 41.541667 | 146 | 0.620715 | false |
lukas-krecan/tensorflow | tensorflow/python/summary/event_accumulator_test.py | 2 | 21287 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import tensorflow.python.platform
from six.moves import xrange # pylint: disable=redefined-builtin
import tensorflow as tf
from tensorflow.core.framework import graph_pb2
from tensorflow.core.util.event_pb2 import SessionLog
from tensorflow.python.platform import gfile
from tensorflow.python.platform import googletest
from tensorflow.python.platform import logging
from tensorflow.python.summary import event_accumulator as ea
class _EventGenerator(object):
def __init__(self):
self.items = []
def Load(self):
while self.items:
yield self.items.pop(0)
def AddScalar(self, tag, wall_time=0, step=0, value=0):
event = tf.Event(
wall_time=wall_time, step=step,
summary=tf.Summary(
value=[tf.Summary.Value(tag=tag, simple_value=value)]
)
)
self.AddEvent(event)
def AddHistogram(self, tag, wall_time=0, step=0, hmin=1, hmax=2, hnum=3,
hsum=4, hsum_squares=5, hbucket_limit=None, hbucket=None):
histo = tf.HistogramProto(min=hmin, max=hmax, num=hnum, sum=hsum,
sum_squares=hsum_squares,
bucket_limit=hbucket_limit,
bucket=hbucket)
event = tf.Event(
wall_time=wall_time,
step=step,
summary=tf.Summary(value=[tf.Summary.Value(tag=tag, histo=histo)]))
self.AddEvent(event)
def AddImage(self, tag, wall_time=0, step=0, encoded_image_string=b'imgstr',
width=150, height=100):
image = tf.Summary.Image(encoded_image_string=encoded_image_string,
width=width, height=height)
event = tf.Event(
wall_time=wall_time,
step=step,
summary=tf.Summary(
value=[tf.Summary.Value(tag=tag, image=image)]))
self.AddEvent(event)
def AddEvent(self, event):
self.items.append(event)
class EventAccumulatorTest(tf.test.TestCase):
def assertTagsEqual(self, tags1, tags2):
# Make sure the two dictionaries have the same keys.
self.assertItemsEqual(tags1, tags2)
# Additionally, make sure each key in the dictionary maps to the same value.
for key in tags1:
if isinstance(tags1[key], list):
# We don't care about the order of the values in lists, thus asserting
# only if the items are equal.
self.assertItemsEqual(tags1[key], tags2[key])
else:
# Make sure the values are equal.
self.assertEqual(tags1[key], tags2[key])
class MockingEventAccumulatorTest(EventAccumulatorTest):
def setUp(self):
super(MockingEventAccumulatorTest, self).setUp()
self.stubs = googletest.StubOutForTesting()
self.empty = {ea.IMAGES: [],
ea.SCALARS: [],
ea.HISTOGRAMS: [],
ea.COMPRESSED_HISTOGRAMS: [],
ea.GRAPH: False}
self._real_constructor = ea.EventAccumulator
self._real_generator = ea._GeneratorFromPath
def _FakeAccumulatorConstructor(generator, *args, **kwargs):
ea._GeneratorFromPath = lambda x: generator
return self._real_constructor(generator, *args, **kwargs)
ea.EventAccumulator = _FakeAccumulatorConstructor
def tearDown(self):
self.stubs.CleanUp()
ea.EventAccumulator = self._real_constructor
ea._GeneratorFromPath = self._real_generator
def testEmptyAccumulator(self):
gen = _EventGenerator()
x = ea.EventAccumulator(gen)
x.Reload()
self.assertEqual(x.Tags(), self.empty)
def testTags(self):
gen = _EventGenerator()
gen.AddScalar('s1')
gen.AddScalar('s2')
gen.AddHistogram('hst1')
gen.AddHistogram('hst2')
gen.AddImage('im1')
gen.AddImage('im2')
acc = ea.EventAccumulator(gen)
acc.Reload()
self.assertTagsEqual(
acc.Tags(), {
ea.IMAGES: ['im1', 'im2'],
ea.SCALARS: ['s1', 's2'],
ea.HISTOGRAMS: ['hst1', 'hst2'],
ea.COMPRESSED_HISTOGRAMS: ['hst1', 'hst2'],
ea.GRAPH: False})
def testReload(self):
gen = _EventGenerator()
acc = ea.EventAccumulator(gen)
acc.Reload()
self.assertEqual(acc.Tags(), self.empty)
gen.AddScalar('s1')
gen.AddScalar('s2')
gen.AddHistogram('hst1')
gen.AddHistogram('hst2')
gen.AddImage('im1')
gen.AddImage('im2')
self.assertEqual(acc.Tags(), self.empty)
acc.Reload()
self.assertTagsEqual(acc.Tags(), {
ea.IMAGES: ['im1', 'im2'],
ea.SCALARS: ['s1', 's2'],
ea.HISTOGRAMS: ['hst1', 'hst2'],
ea.COMPRESSED_HISTOGRAMS: ['hst1', 'hst2'],
ea.GRAPH: False})
def testScalars(self):
gen = _EventGenerator()
acc = ea.EventAccumulator(gen)
s1 = ea.ScalarEvent(wall_time=1, step=10, value=32)
s2 = ea.ScalarEvent(wall_time=2, step=12, value=64)
gen.AddScalar('s1', wall_time=1, step=10, value=32)
gen.AddScalar('s2', wall_time=2, step=12, value=64)
acc.Reload()
self.assertEqual(acc.Scalars('s1'), [s1])
self.assertEqual(acc.Scalars('s2'), [s2])
def testHistograms(self):
gen = _EventGenerator()
acc = ea.EventAccumulator(gen)
val1 = ea.HistogramValue(min=1, max=2, num=3, sum=4, sum_squares=5,
bucket_limit=[1, 2, 3], bucket=[0, 3, 0])
val2 = ea.HistogramValue(min=-2, max=3, num=4, sum=5, sum_squares=6,
bucket_limit=[2, 3, 4], bucket=[1, 3, 0])
hst1 = ea.HistogramEvent(wall_time=1, step=10, histogram_value=val1)
hst2 = ea.HistogramEvent(wall_time=2, step=12, histogram_value=val2)
gen.AddHistogram('hst1', wall_time=1, step=10, hmin=1, hmax=2, hnum=3,
hsum=4, hsum_squares=5, hbucket_limit=[1, 2, 3],
hbucket=[0, 3, 0])
gen.AddHistogram('hst2', wall_time=2, step=12, hmin=-2, hmax=3, hnum=4,
hsum=5, hsum_squares=6, hbucket_limit=[2, 3, 4],
hbucket=[1, 3, 0])
acc.Reload()
self.assertEqual(acc.Histograms('hst1'), [hst1])
self.assertEqual(acc.Histograms('hst2'), [hst2])
def testCompressedHistograms(self):
gen = _EventGenerator()
acc = ea.EventAccumulator(gen, compression_bps=(0, 2500, 5000, 7500, 10000))
gen.AddHistogram('hst1', wall_time=1, step=10, hmin=1, hmax=2, hnum=3,
hsum=4, hsum_squares=5, hbucket_limit=[1, 2, 3],
hbucket=[0, 3, 0])
gen.AddHistogram('hst2', wall_time=2, step=12, hmin=-2, hmax=3, hnum=4,
hsum=5, hsum_squares=6, hbucket_limit=[2, 3, 4],
hbucket=[1, 3, 0])
acc.Reload()
# Create the expected values after compressing hst1
expected_vals1 = [ea.CompressedHistogramValue(bp, val) for bp, val in [(
0, 1.0), (2500, 1.25), (5000, 1.5), (7500, 1.75), (10000, 2.0)]]
expected_cmphst1 = ea.CompressedHistogramEvent(
wall_time=1,
step=10,
compressed_histogram_values=expected_vals1)
self.assertEqual(acc.CompressedHistograms('hst1'), [expected_cmphst1])
# Create the expected values after compressing hst2
expected_vals2 = [
ea.CompressedHistogramValue(bp, val)
for bp, val in [(0, -2), (2500, 2), (5000, 2 + 1 / 3), (7500, 2 + 2 / 3
), (10000, 3)]
]
expected_cmphst2 = ea.CompressedHistogramEvent(
wall_time=2,
step=12,
compressed_histogram_values=expected_vals2)
self.assertEqual(acc.CompressedHistograms('hst2'), [expected_cmphst2])
def testPercentile(self):
def AssertExpectedForBps(bps, expected):
output = acc._Percentile(
bps, bucket_limit, cumsum_weights, histo_min, histo_max, histo_num)
self.assertAlmostEqual(expected, output)
gen = _EventGenerator()
acc = ea.EventAccumulator(gen)
bucket_limit = [1, 2, 3, 4]
histo_num = 100
## All weights in the first bucket
cumsum_weights = [10000, 10000, 10000, 10000]
histo_min = -1
histo_max = .9
AssertExpectedForBps(0, histo_min)
AssertExpectedForBps(2500, ea._Remap(2500, 0, 10000, histo_min, histo_max))
AssertExpectedForBps(5000, ea._Remap(5000, 0, 10000, histo_min, histo_max))
AssertExpectedForBps(7500, ea._Remap(7500, 0, 10000, histo_min, histo_max))
AssertExpectedForBps(10000, histo_max)
## All weights in second bucket
cumsum_weights = [0, 10000, 10000, 10000]
histo_min = 1.1
histo_max = 1.8
AssertExpectedForBps(0, histo_min)
AssertExpectedForBps(2500, ea._Remap(2500, 0, 10000, histo_min, histo_max))
AssertExpectedForBps(5000, ea._Remap(5000, 0, 10000, histo_min, histo_max))
AssertExpectedForBps(7500, ea._Remap(7500, 0, 10000, histo_min, histo_max))
AssertExpectedForBps(10000, histo_max)
## All weights in the last bucket
cumsum_weights = [0, 0, 0, 10000]
histo_min = 3.1
histo_max = 3.6
AssertExpectedForBps(0, histo_min)
AssertExpectedForBps(2500, ea._Remap(2500, 0, 10000, histo_min, histo_max))
AssertExpectedForBps(5000, ea._Remap(5000, 0, 10000, histo_min, histo_max))
AssertExpectedForBps(7500, ea._Remap(7500, 0, 10000, histo_min, histo_max))
AssertExpectedForBps(10000, histo_max)
## Weights distributed between two buckets
cumsum_weights = [0, 4000, 10000, 10000]
histo_min = 1.1
histo_max = 2.9
AssertExpectedForBps(0, histo_min)
AssertExpectedForBps(2500, ea._Remap(2500, 0, 4000, histo_min,
bucket_limit[1]))
AssertExpectedForBps(5000, ea._Remap(5000, 4000, 10000, bucket_limit[1],
histo_max))
AssertExpectedForBps(7500, ea._Remap(7500, 4000, 10000, bucket_limit[1],
histo_max))
AssertExpectedForBps(10000, histo_max)
## Weights distributed between all buckets
cumsum_weights = [1000, 4000, 8000, 10000]
histo_min = -1
histo_max = 3.9
AssertExpectedForBps(0, histo_min)
AssertExpectedForBps(2500, ea._Remap(2500, 1000, 4000, bucket_limit[0],
bucket_limit[1]))
AssertExpectedForBps(5000, ea._Remap(5000, 4000, 8000, bucket_limit[1],
bucket_limit[2]))
AssertExpectedForBps(7500, ea._Remap(7500, 4000, 8000, bucket_limit[1],
bucket_limit[2]))
AssertExpectedForBps(9000, ea._Remap(9000, 8000, 10000, bucket_limit[2],
histo_max))
AssertExpectedForBps(10000, histo_max)
## Most weight in first bucket
cumsum_weights = [9000, 10000, 10000, 10000]
histo_min = -1
histo_max = 1.1
AssertExpectedForBps(0, histo_min)
AssertExpectedForBps(2500, ea._Remap(2500, 0, 9000, histo_min,
bucket_limit[0]))
AssertExpectedForBps(5000, ea._Remap(5000, 0, 9000, histo_min,
bucket_limit[0]))
AssertExpectedForBps(7500, ea._Remap(7500, 0, 9000, histo_min,
bucket_limit[0]))
AssertExpectedForBps(9500, ea._Remap(9500, 9000, 10000, bucket_limit[0],
histo_max))
AssertExpectedForBps(10000, histo_max)
def testImages(self):
gen = _EventGenerator()
acc = ea.EventAccumulator(gen)
im1 = ea.ImageEvent(wall_time=1, step=10, encoded_image_string=b'big',
width=400, height=300)
im2 = ea.ImageEvent(wall_time=2, step=12, encoded_image_string=b'small',
width=40, height=30)
gen.AddImage('im1', wall_time=1, step=10, encoded_image_string=b'big',
width=400, height=300)
gen.AddImage('im2', wall_time=2, step=12, encoded_image_string=b'small',
width=40, height=30)
acc.Reload()
self.assertEqual(acc.Images('im1'), [im1])
self.assertEqual(acc.Images('im2'), [im2])
def testActivation(self):
gen = _EventGenerator()
acc = ea.EventAccumulator(gen)
self.assertFalse(acc._activated)
with self.assertRaises(RuntimeError):
acc.Tags()
with self.assertRaises(RuntimeError):
acc.Scalars('s1')
acc.Reload()
self.assertTrue(acc._activated)
acc._activated = False
def testKeyError(self):
gen = _EventGenerator()
acc = ea.EventAccumulator(gen)
acc.Reload()
with self.assertRaises(KeyError):
acc.Scalars('s1')
with self.assertRaises(KeyError):
acc.Scalars('hst1')
with self.assertRaises(KeyError):
acc.Scalars('im1')
with self.assertRaises(KeyError):
acc.Histograms('s1')
with self.assertRaises(KeyError):
acc.Histograms('im1')
with self.assertRaises(KeyError):
acc.Images('s1')
with self.assertRaises(KeyError):
acc.Images('hst1')
def testNonValueEvents(self):
"""Tests that non-value events in the generator don't cause early exits."""
gen = _EventGenerator()
acc = ea.EventAccumulator(gen)
gen.AddScalar('s1', wall_time=1, step=10, value=20)
gen.AddEvent(tf.Event(
wall_time=2, step=20, file_version='nots2'))
gen.AddScalar('s3', wall_time=3, step=100, value=1)
gen.AddHistogram('hst1')
gen.AddImage('im1')
acc.Reload()
self.assertTagsEqual(acc.Tags(), {
ea.IMAGES: ['im1'],
ea.SCALARS: ['s1', 's3'],
ea.HISTOGRAMS: ['hst1'],
ea.COMPRESSED_HISTOGRAMS: ['hst1'],
ea.GRAPH: False})
def testExpiredDataDiscardedAfterRestartForFileVersionLessThan2(self):
"""Tests that events are discarded after a restart is detected.
If a step value is observed to be lower than what was previously seen,
this should force a discard of all previous items with the same tag
that are outdated.
Only file versions < 2 use this out-of-order discard logic. Later versions
discard events based on the step value of SessionLog.START.
"""
warnings = []
self.stubs.Set(logging, 'warn', warnings.append)
gen = _EventGenerator()
acc = ea.EventAccumulator(gen)
gen.AddEvent(tf.Event(wall_time=0, step=0, file_version='brain.Event:1'))
gen.AddScalar('s1', wall_time=1, step=100, value=20)
gen.AddScalar('s1', wall_time=1, step=200, value=20)
gen.AddScalar('s1', wall_time=1, step=300, value=20)
acc.Reload()
## Check that number of items are what they should be
self.assertEqual([x.step for x in acc.Scalars('s1')], [100, 200, 300])
gen.AddScalar('s1', wall_time=1, step=101, value=20)
gen.AddScalar('s1', wall_time=1, step=201, value=20)
gen.AddScalar('s1', wall_time=1, step=301, value=20)
acc.Reload()
## Check that we have discarded 200 and 300 from s1
self.assertEqual([x.step for x in acc.Scalars('s1')], [100, 101, 201, 301])
def testEventsDiscardedPerTagAfterRestartForFileVersionLessThan2(self):
"""Tests that event discards after restart, only affect the misordered tag.
If a step value is observed to be lower than what was previously seen,
this should force a discard of all previous items that are outdated, but
only for the out of order tag. Other tags should remain unaffected.
Only file versions < 2 use this out-of-order discard logic. Later versions
discard events based on the step value of SessionLog.START.
"""
warnings = []
self.stubs.Set(logging, 'warn', warnings.append)
gen = _EventGenerator()
acc = ea.EventAccumulator(gen)
gen.AddEvent(tf.Event(wall_time=0, step=0, file_version='brain.Event:1'))
gen.AddScalar('s1', wall_time=1, step=100, value=20)
gen.AddScalar('s1', wall_time=1, step=200, value=20)
gen.AddScalar('s1', wall_time=1, step=300, value=20)
gen.AddScalar('s1', wall_time=1, step=101, value=20)
gen.AddScalar('s1', wall_time=1, step=201, value=20)
gen.AddScalar('s1', wall_time=1, step=301, value=20)
gen.AddScalar('s2', wall_time=1, step=101, value=20)
gen.AddScalar('s2', wall_time=1, step=201, value=20)
gen.AddScalar('s2', wall_time=1, step=301, value=20)
acc.Reload()
## Check that we have discarded 200 and 300
self.assertEqual([x.step for x in acc.Scalars('s1')], [100, 101, 201, 301])
## Check that s1 discards do not affect s2
## i.e. check that only events from the out of order tag are discarded
self.assertEqual([x.step for x in acc.Scalars('s2')], [101, 201, 301])
def testOnlySummaryEventsTriggerDiscards(self):
"""Test that file version event does not trigger data purge."""
gen = _EventGenerator()
acc = ea.EventAccumulator(gen)
gen.AddScalar('s1', wall_time=1, step=100, value=20)
ev1 = tf.Event(wall_time=2, step=0, file_version='brain.Event:1')
graph_bytes = graph_pb2.GraphDef().SerializeToString()
ev2 = tf.Event(wall_time=3, step=0, graph_def=graph_bytes)
gen.AddEvent(ev1)
gen.AddEvent(ev2)
acc.Reload()
self.assertEqual([x.step for x in acc.Scalars('s1')], [100])
def testSessionLogStartMessageDiscardsExpiredEvents(self):
"""Test that SessionLog.START message discards expired events.
This discard logic is preferred over the out-of-order step discard logic,
but this logic can only be used for event protos which have the SessionLog
enum, which was introduced to event.proto for file_version >= brain.Event:2.
"""
gen = _EventGenerator()
acc = ea.EventAccumulator(gen)
gen.AddEvent(tf.Event(wall_time=0, step=1, file_version='brain.Event:2'))
gen.AddScalar('s1', wall_time=1, step=100, value=20)
gen.AddScalar('s1', wall_time=1, step=200, value=20)
gen.AddScalar('s1', wall_time=1, step=300, value=20)
gen.AddScalar('s1', wall_time=1, step=400, value=20)
gen.AddScalar('s2', wall_time=1, step=202, value=20)
gen.AddScalar('s2', wall_time=1, step=203, value=20)
slog = SessionLog(status=SessionLog.START)
gen.AddEvent(tf.Event(wall_time=2, step=201, session_log=slog))
acc.Reload()
self.assertEqual([x.step for x in acc.Scalars('s1')], [100, 200])
self.assertEqual([x.step for x in acc.Scalars('s2')], [])
class RealisticEventAccumulatorTest(EventAccumulatorTest):
def setUp(self):
super(RealisticEventAccumulatorTest, self).setUp()
def testScalarsRealistically(self):
"""Test accumulator by writing values and then reading them."""
def FakeScalarSummary(tag, value):
value = tf.Summary.Value(tag=tag, simple_value=value)
summary = tf.Summary(value=[value])
return summary
directory = os.path.join(self.get_temp_dir(), 'values_dir')
if gfile.IsDirectory(directory):
gfile.DeleteRecursively(directory)
gfile.MkDir(directory)
writer = tf.train.SummaryWriter(directory, max_queue=100)
graph_def = tf.GraphDef(node=[tf.NodeDef(name='A', op='Mul')])
# Add a graph to the summary writer.
writer.add_graph(graph_def)
# Write a bunch of events using the writer
for i in xrange(30):
summ_id = FakeScalarSummary('id', i)
summ_sq = FakeScalarSummary('sq', i*i)
writer.add_summary(summ_id, i*5)
writer.add_summary(summ_sq, i*5)
writer.flush()
# Verify that we can load those events properly
acc = ea.EventAccumulator(directory)
acc.Reload()
self.assertTagsEqual(acc.Tags(), {
ea.IMAGES: [],
ea.SCALARS: ['id', 'sq'],
ea.HISTOGRAMS: [],
ea.COMPRESSED_HISTOGRAMS: [],
ea.GRAPH: True})
id_events = acc.Scalars('id')
sq_events = acc.Scalars('sq')
self.assertEqual(30, len(id_events))
self.assertEqual(30, len(sq_events))
for i in xrange(30):
self.assertEqual(i*5, id_events[i].step)
self.assertEqual(i*5, sq_events[i].step)
self.assertEqual(i, id_events[i].value)
self.assertEqual(i*i, sq_events[i].value)
# Write a few more events to test incremental reloading
for i in xrange(30, 40):
summ_id = FakeScalarSummary('id', i)
summ_sq = FakeScalarSummary('sq', i*i)
writer.add_summary(summ_id, i*5)
writer.add_summary(summ_sq, i*5)
writer.flush()
# Verify we can now see all of the data
acc.Reload()
self.assertEqual(40, len(id_events))
self.assertEqual(40, len(sq_events))
for i in xrange(40):
self.assertEqual(i*5, id_events[i].step)
self.assertEqual(i*5, sq_events[i].step)
self.assertEqual(i, id_events[i].value)
self.assertEqual(i*i, sq_events[i].value)
self.assertProtoEquals(graph_def, acc.Graph())
if __name__ == '__main__':
tf.test.main()
| apache-2.0 | 969,499,024,860,666,100 | 37.424188 | 80 | 0.634425 | false |
nenel83/fuzzy-avenger | src/ibmiotf/device.py | 2 | 6759 | # *****************************************************************************
# Copyright (c) 2014 IBM Corporation and other Contributors.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Eclipse Public License v1.0
# which accompanies this distribution, and is available at
# http://www.eclipse.org/legal/epl-v10.html
#
# Contributors:
# David Parker - Initial Contribution
# *****************************************************************************
import json
import re
import pytz
from datetime import datetime
from ibmiotf import AbstractClient, Message, InvalidEventException, UnsupportedAuthenticationMethod, ConfigurationException, ConnectionException, MissingMessageEncoderException
from ibmiotf.codecs import jsonCodec, jsonIotfCodec
# Support Python 2.7 and 3.4 versions of configparser
try:
import configparser
except ImportError:
import ConfigParser as configparser
COMMAND_RE = re.compile("iot-2/cmd/(.+)/fmt/(.+)")
class Command:
def __init__(self, pahoMessage, messageEncoderModules):
result = COMMAND_RE.match(pahoMessage.topic)
if result:
self.command = result.group(1)
self.format = result.group(2)
if self.format in messageEncoderModules:
message = messageEncoderModules[self.format].decode(pahoMessage)
self.timestamp = message.timestamp
self.data = message.data
else:
raise MissingMessageDecoderException(self.format)
else:
raise InvalidEventException("Received command on invalid topic: %s" % (pahoMessage.topic))
class Client(AbstractClient):
def __init__(self, options):
self.__options = options
if self.__options['org'] == None:
raise ConfigurationException("Missing required property: org")
if self.__options['type'] == None:
raise ConfigurationException("Missing required property: type")
if self.__options['id'] == None:
raise ConfigurationException("Missing required property: id")
if self.__options['org'] != "quickstart":
if self.__options['auth-method'] == None:
raise ConfigurationException("Missing required property: auth-method")
if (self.__options['auth-method'] == "token"):
if self.__options['auth-token'] == None:
raise ConfigurationException("Missing required property for token based authentication: auth-token")
else:
raise UnsupportedAuthenticationMethod(options['authMethod'])
AbstractClient.__init__(
self,
organization = options['org'],
clientId = "d:" + options['org'] + ":" + options['type'] + ":" + options['id'],
username = "use-token-auth" if (options['auth-method'] == "token") else None,
password = options['auth-token']
)
# Add handler for commands if not connected to QuickStart
if self.__options['org'] != "quickstart":
self.client.message_callback_add("iot-2/cmd/+/fmt/+", self.__onCommand)
# Initialize user supplied callback
self.commandCallback = None
self.client.on_connect = self.on_connect
self.setMessageEncoderModule('json', jsonCodec)
self.setMessageEncoderModule('json-iotf', jsonIotfCodec)
'''
This is called after the client has received a CONNACK message from the broker in response to calling connect().
The parameter rc is an integer giving the return code:
0: Success
1: Refused - unacceptable protocol version
2: Refused - identifier rejected
3: Refused - server unavailable
4: Refused - bad user name or password
5: Refused - not authorised
'''
def on_connect(self, client, userdata, flags, rc):
if rc == 0:
self.connectEvent.set()
self.logger.info("Connected successfully: %s" % self.clientId)
if self.__options['org'] != "quickstart":
self.__subscribeToCommands()
elif rc == 5:
self.logAndRaiseException(ConnectionException("Not authorized: s (%s, %s, %s)" % (self.clientId, self.username, self.password)))
else:
self.logAndRaiseException(ConnectionException("Connection failed: RC= %s" % (rc)))
def publishEvent(self, event, msgFormat, data, qos=0):
if not self.connectEvent.wait():
self.logger.warning("Unable to send event %s because device is not currently connected")
return False
else:
self.logger.debug("Sending event %s with data %s" % (event, json.dumps(data)))
topic = 'iot-2/evt/'+event+'/fmt/' + msgFormat
if msgFormat in self.messageEncoderModules:
payload = self.messageEncoderModules[msgFormat].encode(data, datetime.now(pytz.timezone('UTC')))
self.client.publish(topic, payload=payload, qos=qos, retain=False)
return True
else:
raise MissingMessageEncoderException(msgFormat)
def __subscribeToCommands(self):
if self.__options['org'] == "quickstart":
self.logger.warning("QuickStart applications do not support commands")
return False
if not self.connectEvent.wait():
self.logger.warning("Unable to subscribe to commands because device is not currently connected")
return False
else:
topic = 'iot-2/cmd/+/fmt/json'
self.client.subscribe(topic, qos=2)
return True
'''
Internal callback for device command messages, parses source device from topic string and
passes the information on to the registerd device command callback
'''
def __onCommand(self, client, userdata, pahoMessage):
self.recv = self.recv + 1
try:
command = Command(pahoMessage, self.messageEncoderModules)
self.logger.debug("Received command '%s'" % (command.command))
if self.commandCallback: self.commandCallback(command)
except InvalidEventException as e:
self.logger.critical(str(e))
def ParseConfigFile(configFilePath):
parms = configparser.ConfigParser()
sectionHeader = "device"
try:
with open(configFilePath) as f:
try:
parms.read_file(f)
organization = parms.get(sectionHeader, "org", fallback=None)
deviceType = parms.get(sectionHeader, "type", fallback=None)
deviceId = parms.get(sectionHeader, "id", fallback=None)
authMethod = parms.get(sectionHeader, "auth-method", fallback=None)
authToken = parms.get(sectionHeader, "auth-token", fallback=None)
except AttributeError:
# Python 2.7 support
# https://docs.python.org/3/library/configparser.html#configparser.ConfigParser.read_file
parms.readfp(f)
organization = parms.get(sectionHeader, "org", None)
deviceType = parms.get(sectionHeader, "type", None)
deviceId = parms.get(sectionHeader, "id", None)
authMethod = parms.get(sectionHeader, "auth-method", None)
authToken = parms.get(sectionHeader, "auth-token", None)
except IOError as e:
reason = "Error reading device configuration file '%s' (%s)" % (configFilePath,e[1])
raise ConfigurationException(reason)
return {'org': organization, 'type': deviceType, 'id': deviceId, 'auth-method': authMethod, 'auth-token': authToken}
| epl-1.0 | 18,999,335,720,013,590 | 35.733696 | 176 | 0.70617 | false |
sunzhxjs/JobGIS | lib/python2.7/site-packages/jinja2/nodes.py | 342 | 28954 | # -*- coding: utf-8 -*-
"""
jinja2.nodes
~~~~~~~~~~~~
This module implements additional nodes derived from the ast base node.
It also provides some node tree helper functions like `in_lineno` and
`get_nodes` used by the parser and translator in order to normalize
python and jinja nodes.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import types
import operator
from collections import deque
from jinja2.utils import Markup
from jinja2._compat import izip, with_metaclass, text_type
#: the types we support for context functions
_context_function_types = (types.FunctionType, types.MethodType)
_binop_to_func = {
'*': operator.mul,
'/': operator.truediv,
'//': operator.floordiv,
'**': operator.pow,
'%': operator.mod,
'+': operator.add,
'-': operator.sub
}
_uaop_to_func = {
'not': operator.not_,
'+': operator.pos,
'-': operator.neg
}
_cmpop_to_func = {
'eq': operator.eq,
'ne': operator.ne,
'gt': operator.gt,
'gteq': operator.ge,
'lt': operator.lt,
'lteq': operator.le,
'in': lambda a, b: a in b,
'notin': lambda a, b: a not in b
}
class Impossible(Exception):
"""Raised if the node could not perform a requested action."""
class NodeType(type):
"""A metaclass for nodes that handles the field and attribute
inheritance. fields and attributes from the parent class are
automatically forwarded to the child."""
def __new__(cls, name, bases, d):
for attr in 'fields', 'attributes':
storage = []
storage.extend(getattr(bases[0], attr, ()))
storage.extend(d.get(attr, ()))
assert len(bases) == 1, 'multiple inheritance not allowed'
assert len(storage) == len(set(storage)), 'layout conflict'
d[attr] = tuple(storage)
d.setdefault('abstract', False)
return type.__new__(cls, name, bases, d)
class EvalContext(object):
"""Holds evaluation time information. Custom attributes can be attached
to it in extensions.
"""
def __init__(self, environment, template_name=None):
self.environment = environment
if callable(environment.autoescape):
self.autoescape = environment.autoescape(template_name)
else:
self.autoescape = environment.autoescape
self.volatile = False
def save(self):
return self.__dict__.copy()
def revert(self, old):
self.__dict__.clear()
self.__dict__.update(old)
def get_eval_context(node, ctx):
if ctx is None:
if node.environment is None:
raise RuntimeError('if no eval context is passed, the '
'node must have an attached '
'environment.')
return EvalContext(node.environment)
return ctx
class Node(with_metaclass(NodeType, object)):
"""Baseclass for all Jinja2 nodes. There are a number of nodes available
of different types. There are four major types:
- :class:`Stmt`: statements
- :class:`Expr`: expressions
- :class:`Helper`: helper nodes
- :class:`Template`: the outermost wrapper node
All nodes have fields and attributes. Fields may be other nodes, lists,
or arbitrary values. Fields are passed to the constructor as regular
positional arguments, attributes as keyword arguments. Each node has
two attributes: `lineno` (the line number of the node) and `environment`.
The `environment` attribute is set at the end of the parsing process for
all nodes automatically.
"""
fields = ()
attributes = ('lineno', 'environment')
abstract = True
def __init__(self, *fields, **attributes):
if self.abstract:
raise TypeError('abstract nodes are not instanciable')
if fields:
if len(fields) != len(self.fields):
if not self.fields:
raise TypeError('%r takes 0 arguments' %
self.__class__.__name__)
raise TypeError('%r takes 0 or %d argument%s' % (
self.__class__.__name__,
len(self.fields),
len(self.fields) != 1 and 's' or ''
))
for name, arg in izip(self.fields, fields):
setattr(self, name, arg)
for attr in self.attributes:
setattr(self, attr, attributes.pop(attr, None))
if attributes:
raise TypeError('unknown attribute %r' %
next(iter(attributes)))
def iter_fields(self, exclude=None, only=None):
"""This method iterates over all fields that are defined and yields
``(key, value)`` tuples. Per default all fields are returned, but
it's possible to limit that to some fields by providing the `only`
parameter or to exclude some using the `exclude` parameter. Both
should be sets or tuples of field names.
"""
for name in self.fields:
if (exclude is only is None) or \
(exclude is not None and name not in exclude) or \
(only is not None and name in only):
try:
yield name, getattr(self, name)
except AttributeError:
pass
def iter_child_nodes(self, exclude=None, only=None):
"""Iterates over all direct child nodes of the node. This iterates
over all fields and yields the values of they are nodes. If the value
of a field is a list all the nodes in that list are returned.
"""
for field, item in self.iter_fields(exclude, only):
if isinstance(item, list):
for n in item:
if isinstance(n, Node):
yield n
elif isinstance(item, Node):
yield item
def find(self, node_type):
"""Find the first node of a given type. If no such node exists the
return value is `None`.
"""
for result in self.find_all(node_type):
return result
def find_all(self, node_type):
"""Find all the nodes of a given type. If the type is a tuple,
the check is performed for any of the tuple items.
"""
for child in self.iter_child_nodes():
if isinstance(child, node_type):
yield child
for result in child.find_all(node_type):
yield result
def set_ctx(self, ctx):
"""Reset the context of a node and all child nodes. Per default the
parser will all generate nodes that have a 'load' context as it's the
most common one. This method is used in the parser to set assignment
targets and other nodes to a store context.
"""
todo = deque([self])
while todo:
node = todo.popleft()
if 'ctx' in node.fields:
node.ctx = ctx
todo.extend(node.iter_child_nodes())
return self
def set_lineno(self, lineno, override=False):
"""Set the line numbers of the node and children."""
todo = deque([self])
while todo:
node = todo.popleft()
if 'lineno' in node.attributes:
if node.lineno is None or override:
node.lineno = lineno
todo.extend(node.iter_child_nodes())
return self
def set_environment(self, environment):
"""Set the environment for all nodes."""
todo = deque([self])
while todo:
node = todo.popleft()
node.environment = environment
todo.extend(node.iter_child_nodes())
return self
def __eq__(self, other):
return type(self) is type(other) and \
tuple(self.iter_fields()) == tuple(other.iter_fields())
def __ne__(self, other):
return not self.__eq__(other)
# Restore Python 2 hashing behavior on Python 3
__hash__ = object.__hash__
def __repr__(self):
return '%s(%s)' % (
self.__class__.__name__,
', '.join('%s=%r' % (arg, getattr(self, arg, None)) for
arg in self.fields)
)
class Stmt(Node):
"""Base node for all statements."""
abstract = True
class Helper(Node):
"""Nodes that exist in a specific context only."""
abstract = True
class Template(Node):
"""Node that represents a template. This must be the outermost node that
is passed to the compiler.
"""
fields = ('body',)
class Output(Stmt):
"""A node that holds multiple expressions which are then printed out.
This is used both for the `print` statement and the regular template data.
"""
fields = ('nodes',)
class Extends(Stmt):
"""Represents an extends statement."""
fields = ('template',)
class For(Stmt):
"""The for loop. `target` is the target for the iteration (usually a
:class:`Name` or :class:`Tuple`), `iter` the iterable. `body` is a list
of nodes that are used as loop-body, and `else_` a list of nodes for the
`else` block. If no else node exists it has to be an empty list.
For filtered nodes an expression can be stored as `test`, otherwise `None`.
"""
fields = ('target', 'iter', 'body', 'else_', 'test', 'recursive')
class If(Stmt):
"""If `test` is true, `body` is rendered, else `else_`."""
fields = ('test', 'body', 'else_')
class Macro(Stmt):
"""A macro definition. `name` is the name of the macro, `args` a list of
arguments and `defaults` a list of defaults if there are any. `body` is
a list of nodes for the macro body.
"""
fields = ('name', 'args', 'defaults', 'body')
class CallBlock(Stmt):
"""Like a macro without a name but a call instead. `call` is called with
the unnamed macro as `caller` argument this node holds.
"""
fields = ('call', 'args', 'defaults', 'body')
class FilterBlock(Stmt):
"""Node for filter sections."""
fields = ('body', 'filter')
class Block(Stmt):
"""A node that represents a block."""
fields = ('name', 'body', 'scoped')
class Include(Stmt):
"""A node that represents the include tag."""
fields = ('template', 'with_context', 'ignore_missing')
class Import(Stmt):
"""A node that represents the import tag."""
fields = ('template', 'target', 'with_context')
class FromImport(Stmt):
"""A node that represents the from import tag. It's important to not
pass unsafe names to the name attribute. The compiler translates the
attribute lookups directly into getattr calls and does *not* use the
subscript callback of the interface. As exported variables may not
start with double underscores (which the parser asserts) this is not a
problem for regular Jinja code, but if this node is used in an extension
extra care must be taken.
The list of names may contain tuples if aliases are wanted.
"""
fields = ('template', 'names', 'with_context')
class ExprStmt(Stmt):
"""A statement that evaluates an expression and discards the result."""
fields = ('node',)
class Assign(Stmt):
"""Assigns an expression to a target."""
fields = ('target', 'node')
class AssignBlock(Stmt):
"""Assigns a block to a target."""
fields = ('target', 'body')
class Expr(Node):
"""Baseclass for all expressions."""
abstract = True
def as_const(self, eval_ctx=None):
"""Return the value of the expression as constant or raise
:exc:`Impossible` if this was not possible.
An :class:`EvalContext` can be provided, if none is given
a default context is created which requires the nodes to have
an attached environment.
.. versionchanged:: 2.4
the `eval_ctx` parameter was added.
"""
raise Impossible()
def can_assign(self):
"""Check if it's possible to assign something to this node."""
return False
class BinExpr(Expr):
"""Baseclass for all binary expressions."""
fields = ('left', 'right')
operator = None
abstract = True
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
# intercepted operators cannot be folded at compile time
if self.environment.sandboxed and \
self.operator in self.environment.intercepted_binops:
raise Impossible()
f = _binop_to_func[self.operator]
try:
return f(self.left.as_const(eval_ctx), self.right.as_const(eval_ctx))
except Exception:
raise Impossible()
class UnaryExpr(Expr):
"""Baseclass for all unary expressions."""
fields = ('node',)
operator = None
abstract = True
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
# intercepted operators cannot be folded at compile time
if self.environment.sandboxed and \
self.operator in self.environment.intercepted_unops:
raise Impossible()
f = _uaop_to_func[self.operator]
try:
return f(self.node.as_const(eval_ctx))
except Exception:
raise Impossible()
class Name(Expr):
"""Looks up a name or stores a value in a name.
The `ctx` of the node can be one of the following values:
- `store`: store a value in the name
- `load`: load that name
- `param`: like `store` but if the name was defined as function parameter.
"""
fields = ('name', 'ctx')
def can_assign(self):
return self.name not in ('true', 'false', 'none',
'True', 'False', 'None')
class Literal(Expr):
"""Baseclass for literals."""
abstract = True
class Const(Literal):
"""All constant values. The parser will return this node for simple
constants such as ``42`` or ``"foo"`` but it can be used to store more
complex values such as lists too. Only constants with a safe
representation (objects where ``eval(repr(x)) == x`` is true).
"""
fields = ('value',)
def as_const(self, eval_ctx=None):
return self.value
@classmethod
def from_untrusted(cls, value, lineno=None, environment=None):
"""Return a const object if the value is representable as
constant value in the generated code, otherwise it will raise
an `Impossible` exception.
"""
from .compiler import has_safe_repr
if not has_safe_repr(value):
raise Impossible()
return cls(value, lineno=lineno, environment=environment)
class TemplateData(Literal):
"""A constant template string."""
fields = ('data',)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
if eval_ctx.volatile:
raise Impossible()
if eval_ctx.autoescape:
return Markup(self.data)
return self.data
class Tuple(Literal):
"""For loop unpacking and some other things like multiple arguments
for subscripts. Like for :class:`Name` `ctx` specifies if the tuple
is used for loading the names or storing.
"""
fields = ('items', 'ctx')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return tuple(x.as_const(eval_ctx) for x in self.items)
def can_assign(self):
for item in self.items:
if not item.can_assign():
return False
return True
class List(Literal):
"""Any list literal such as ``[1, 2, 3]``"""
fields = ('items',)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return [x.as_const(eval_ctx) for x in self.items]
class Dict(Literal):
"""Any dict literal such as ``{1: 2, 3: 4}``. The items must be a list of
:class:`Pair` nodes.
"""
fields = ('items',)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return dict(x.as_const(eval_ctx) for x in self.items)
class Pair(Helper):
"""A key, value pair for dicts."""
fields = ('key', 'value')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return self.key.as_const(eval_ctx), self.value.as_const(eval_ctx)
class Keyword(Helper):
"""A key, value pair for keyword arguments where key is a string."""
fields = ('key', 'value')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return self.key, self.value.as_const(eval_ctx)
class CondExpr(Expr):
"""A conditional expression (inline if expression). (``{{
foo if bar else baz }}``)
"""
fields = ('test', 'expr1', 'expr2')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
if self.test.as_const(eval_ctx):
return self.expr1.as_const(eval_ctx)
# if we evaluate to an undefined object, we better do that at runtime
if self.expr2 is None:
raise Impossible()
return self.expr2.as_const(eval_ctx)
class Filter(Expr):
"""This node applies a filter on an expression. `name` is the name of
the filter, the rest of the fields are the same as for :class:`Call`.
If the `node` of a filter is `None` the contents of the last buffer are
filtered. Buffers are created by macros and filter blocks.
"""
fields = ('node', 'name', 'args', 'kwargs', 'dyn_args', 'dyn_kwargs')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
if eval_ctx.volatile or self.node is None:
raise Impossible()
# we have to be careful here because we call filter_ below.
# if this variable would be called filter, 2to3 would wrap the
# call in a list beause it is assuming we are talking about the
# builtin filter function here which no longer returns a list in
# python 3. because of that, do not rename filter_ to filter!
filter_ = self.environment.filters.get(self.name)
if filter_ is None or getattr(filter_, 'contextfilter', False):
raise Impossible()
obj = self.node.as_const(eval_ctx)
args = [x.as_const(eval_ctx) for x in self.args]
if getattr(filter_, 'evalcontextfilter', False):
args.insert(0, eval_ctx)
elif getattr(filter_, 'environmentfilter', False):
args.insert(0, self.environment)
kwargs = dict(x.as_const(eval_ctx) for x in self.kwargs)
if self.dyn_args is not None:
try:
args.extend(self.dyn_args.as_const(eval_ctx))
except Exception:
raise Impossible()
if self.dyn_kwargs is not None:
try:
kwargs.update(self.dyn_kwargs.as_const(eval_ctx))
except Exception:
raise Impossible()
try:
return filter_(obj, *args, **kwargs)
except Exception:
raise Impossible()
class Test(Expr):
"""Applies a test on an expression. `name` is the name of the test, the
rest of the fields are the same as for :class:`Call`.
"""
fields = ('node', 'name', 'args', 'kwargs', 'dyn_args', 'dyn_kwargs')
class Call(Expr):
"""Calls an expression. `args` is a list of arguments, `kwargs` a list
of keyword arguments (list of :class:`Keyword` nodes), and `dyn_args`
and `dyn_kwargs` has to be either `None` or a node that is used as
node for dynamic positional (``*args``) or keyword (``**kwargs``)
arguments.
"""
fields = ('node', 'args', 'kwargs', 'dyn_args', 'dyn_kwargs')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
if eval_ctx.volatile:
raise Impossible()
obj = self.node.as_const(eval_ctx)
# don't evaluate context functions
args = [x.as_const(eval_ctx) for x in self.args]
if isinstance(obj, _context_function_types):
if getattr(obj, 'contextfunction', False):
raise Impossible()
elif getattr(obj, 'evalcontextfunction', False):
args.insert(0, eval_ctx)
elif getattr(obj, 'environmentfunction', False):
args.insert(0, self.environment)
kwargs = dict(x.as_const(eval_ctx) for x in self.kwargs)
if self.dyn_args is not None:
try:
args.extend(self.dyn_args.as_const(eval_ctx))
except Exception:
raise Impossible()
if self.dyn_kwargs is not None:
try:
kwargs.update(self.dyn_kwargs.as_const(eval_ctx))
except Exception:
raise Impossible()
try:
return obj(*args, **kwargs)
except Exception:
raise Impossible()
class Getitem(Expr):
"""Get an attribute or item from an expression and prefer the item."""
fields = ('node', 'arg', 'ctx')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
if self.ctx != 'load':
raise Impossible()
try:
return self.environment.getitem(self.node.as_const(eval_ctx),
self.arg.as_const(eval_ctx))
except Exception:
raise Impossible()
def can_assign(self):
return False
class Getattr(Expr):
"""Get an attribute or item from an expression that is a ascii-only
bytestring and prefer the attribute.
"""
fields = ('node', 'attr', 'ctx')
def as_const(self, eval_ctx=None):
if self.ctx != 'load':
raise Impossible()
try:
eval_ctx = get_eval_context(self, eval_ctx)
return self.environment.getattr(self.node.as_const(eval_ctx),
self.attr)
except Exception:
raise Impossible()
def can_assign(self):
return False
class Slice(Expr):
"""Represents a slice object. This must only be used as argument for
:class:`Subscript`.
"""
fields = ('start', 'stop', 'step')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
def const(obj):
if obj is None:
return None
return obj.as_const(eval_ctx)
return slice(const(self.start), const(self.stop), const(self.step))
class Concat(Expr):
"""Concatenates the list of expressions provided after converting them to
unicode.
"""
fields = ('nodes',)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return ''.join(text_type(x.as_const(eval_ctx)) for x in self.nodes)
class Compare(Expr):
"""Compares an expression with some other expressions. `ops` must be a
list of :class:`Operand`\s.
"""
fields = ('expr', 'ops')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
result = value = self.expr.as_const(eval_ctx)
try:
for op in self.ops:
new_value = op.expr.as_const(eval_ctx)
result = _cmpop_to_func[op.op](value, new_value)
value = new_value
except Exception:
raise Impossible()
return result
class Operand(Helper):
"""Holds an operator and an expression."""
fields = ('op', 'expr')
if __debug__:
Operand.__doc__ += '\nThe following operators are available: ' + \
', '.join(sorted('``%s``' % x for x in set(_binop_to_func) |
set(_uaop_to_func) | set(_cmpop_to_func)))
class Mul(BinExpr):
"""Multiplies the left with the right node."""
operator = '*'
class Div(BinExpr):
"""Divides the left by the right node."""
operator = '/'
class FloorDiv(BinExpr):
"""Divides the left by the right node and truncates conver the
result into an integer by truncating.
"""
operator = '//'
class Add(BinExpr):
"""Add the left to the right node."""
operator = '+'
class Sub(BinExpr):
"""Subtract the right from the left node."""
operator = '-'
class Mod(BinExpr):
"""Left modulo right."""
operator = '%'
class Pow(BinExpr):
"""Left to the power of right."""
operator = '**'
class And(BinExpr):
"""Short circuited AND."""
operator = 'and'
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return self.left.as_const(eval_ctx) and self.right.as_const(eval_ctx)
class Or(BinExpr):
"""Short circuited OR."""
operator = 'or'
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return self.left.as_const(eval_ctx) or self.right.as_const(eval_ctx)
class Not(UnaryExpr):
"""Negate the expression."""
operator = 'not'
class Neg(UnaryExpr):
"""Make the expression negative."""
operator = '-'
class Pos(UnaryExpr):
"""Make the expression positive (noop for most expressions)"""
operator = '+'
# Helpers for extensions
class EnvironmentAttribute(Expr):
"""Loads an attribute from the environment object. This is useful for
extensions that want to call a callback stored on the environment.
"""
fields = ('name',)
class ExtensionAttribute(Expr):
"""Returns the attribute of an extension bound to the environment.
The identifier is the identifier of the :class:`Extension`.
This node is usually constructed by calling the
:meth:`~jinja2.ext.Extension.attr` method on an extension.
"""
fields = ('identifier', 'name')
class ImportedName(Expr):
"""If created with an import name the import name is returned on node
access. For example ``ImportedName('cgi.escape')`` returns the `escape`
function from the cgi module on evaluation. Imports are optimized by the
compiler so there is no need to assign them to local variables.
"""
fields = ('importname',)
class InternalName(Expr):
"""An internal name in the compiler. You cannot create these nodes
yourself but the parser provides a
:meth:`~jinja2.parser.Parser.free_identifier` method that creates
a new identifier for you. This identifier is not available from the
template and is not threated specially by the compiler.
"""
fields = ('name',)
def __init__(self):
raise TypeError('Can\'t create internal names. Use the '
'`free_identifier` method on a parser.')
class MarkSafe(Expr):
"""Mark the wrapped expression as safe (wrap it as `Markup`)."""
fields = ('expr',)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return Markup(self.expr.as_const(eval_ctx))
class MarkSafeIfAutoescape(Expr):
"""Mark the wrapped expression as safe (wrap it as `Markup`) but
only if autoescaping is active.
.. versionadded:: 2.5
"""
fields = ('expr',)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
if eval_ctx.volatile:
raise Impossible()
expr = self.expr.as_const(eval_ctx)
if eval_ctx.autoescape:
return Markup(expr)
return expr
class ContextReference(Expr):
"""Returns the current template context. It can be used like a
:class:`Name` node, with a ``'load'`` ctx and will return the
current :class:`~jinja2.runtime.Context` object.
Here an example that assigns the current template name to a
variable named `foo`::
Assign(Name('foo', ctx='store'),
Getattr(ContextReference(), 'name'))
"""
class Continue(Stmt):
"""Continue a loop."""
class Break(Stmt):
"""Break a loop."""
class Scope(Stmt):
"""An artificial scope."""
fields = ('body',)
class EvalContextModifier(Stmt):
"""Modifies the eval context. For each option that should be modified,
a :class:`Keyword` has to be added to the :attr:`options` list.
Example to change the `autoescape` setting::
EvalContextModifier(options=[Keyword('autoescape', Const(True))])
"""
fields = ('options',)
class ScopedEvalContextModifier(EvalContextModifier):
"""Modifies the eval context and reverts it later. Works exactly like
:class:`EvalContextModifier` but will only modify the
:class:`~jinja2.nodes.EvalContext` for nodes in the :attr:`body`.
"""
fields = ('body',)
# make sure nobody creates custom nodes
def _failing_new(*args, **kwargs):
raise TypeError('can\'t create custom node types')
NodeType.__new__ = staticmethod(_failing_new); del _failing_new
| mit | -570,928,503,058,687,740 | 30.505985 | 81 | 0.600539 | false |
alephu5/Soundbyte | environment/lib/python3.3/site-packages/IPython/utils/tests/test_traitlets.py | 2 | 29996 | # encoding: utf-8
"""
Tests for IPython.utils.traitlets.
Authors:
* Brian Granger
* Enthought, Inc. Some of the code in this file comes from enthought.traits
and is licensed under the BSD license. Also, many of the ideas also come
from enthought.traits even though our implementation is very different.
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008-2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import re
import sys
from unittest import TestCase
import nose.tools as nt
from nose import SkipTest
from IPython.utils.traitlets import (
HasTraits, MetaHasTraits, TraitType, Any, CBytes, Dict,
Int, Long, Integer, Float, Complex, Bytes, Unicode, TraitError,
Undefined, Type, This, Instance, TCPAddress, List, Tuple,
ObjectName, DottedObjectName, CRegExp, link
)
from IPython.utils import py3compat
from IPython.testing.decorators import skipif
#-----------------------------------------------------------------------------
# Helper classes for testing
#-----------------------------------------------------------------------------
class HasTraitsStub(HasTraits):
def _notify_trait(self, name, old, new):
self._notify_name = name
self._notify_old = old
self._notify_new = new
#-----------------------------------------------------------------------------
# Test classes
#-----------------------------------------------------------------------------
class TestTraitType(TestCase):
def test_get_undefined(self):
class A(HasTraits):
a = TraitType
a = A()
self.assertEqual(a.a, Undefined)
def test_set(self):
class A(HasTraitsStub):
a = TraitType
a = A()
a.a = 10
self.assertEqual(a.a, 10)
self.assertEqual(a._notify_name, 'a')
self.assertEqual(a._notify_old, Undefined)
self.assertEqual(a._notify_new, 10)
def test_validate(self):
class MyTT(TraitType):
def validate(self, inst, value):
return -1
class A(HasTraitsStub):
tt = MyTT
a = A()
a.tt = 10
self.assertEqual(a.tt, -1)
def test_default_validate(self):
class MyIntTT(TraitType):
def validate(self, obj, value):
if isinstance(value, int):
return value
self.error(obj, value)
class A(HasTraits):
tt = MyIntTT(10)
a = A()
self.assertEqual(a.tt, 10)
# Defaults are validated when the HasTraits is instantiated
class B(HasTraits):
tt = MyIntTT('bad default')
self.assertRaises(TraitError, B)
def test_is_valid_for(self):
class MyTT(TraitType):
def is_valid_for(self, value):
return True
class A(HasTraits):
tt = MyTT
a = A()
a.tt = 10
self.assertEqual(a.tt, 10)
def test_value_for(self):
class MyTT(TraitType):
def value_for(self, value):
return 20
class A(HasTraits):
tt = MyTT
a = A()
a.tt = 10
self.assertEqual(a.tt, 20)
def test_info(self):
class A(HasTraits):
tt = TraitType
a = A()
self.assertEqual(A.tt.info(), 'any value')
def test_error(self):
class A(HasTraits):
tt = TraitType
a = A()
self.assertRaises(TraitError, A.tt.error, a, 10)
def test_dynamic_initializer(self):
class A(HasTraits):
x = Int(10)
def _x_default(self):
return 11
class B(A):
x = Int(20)
class C(A):
def _x_default(self):
return 21
a = A()
self.assertEqual(a._trait_values, {})
self.assertEqual(list(a._trait_dyn_inits.keys()), ['x'])
self.assertEqual(a.x, 11)
self.assertEqual(a._trait_values, {'x': 11})
b = B()
self.assertEqual(b._trait_values, {'x': 20})
self.assertEqual(list(a._trait_dyn_inits.keys()), ['x'])
self.assertEqual(b.x, 20)
c = C()
self.assertEqual(c._trait_values, {})
self.assertEqual(list(a._trait_dyn_inits.keys()), ['x'])
self.assertEqual(c.x, 21)
self.assertEqual(c._trait_values, {'x': 21})
# Ensure that the base class remains unmolested when the _default
# initializer gets overridden in a subclass.
a = A()
c = C()
self.assertEqual(a._trait_values, {})
self.assertEqual(list(a._trait_dyn_inits.keys()), ['x'])
self.assertEqual(a.x, 11)
self.assertEqual(a._trait_values, {'x': 11})
class TestHasTraitsMeta(TestCase):
def test_metaclass(self):
self.assertEqual(type(HasTraits), MetaHasTraits)
class A(HasTraits):
a = Int
a = A()
self.assertEqual(type(a.__class__), MetaHasTraits)
self.assertEqual(a.a,0)
a.a = 10
self.assertEqual(a.a,10)
class B(HasTraits):
b = Int()
b = B()
self.assertEqual(b.b,0)
b.b = 10
self.assertEqual(b.b,10)
class C(HasTraits):
c = Int(30)
c = C()
self.assertEqual(c.c,30)
c.c = 10
self.assertEqual(c.c,10)
def test_this_class(self):
class A(HasTraits):
t = This()
tt = This()
class B(A):
tt = This()
ttt = This()
self.assertEqual(A.t.this_class, A)
self.assertEqual(B.t.this_class, A)
self.assertEqual(B.tt.this_class, B)
self.assertEqual(B.ttt.this_class, B)
class TestHasTraitsNotify(TestCase):
def setUp(self):
self._notify1 = []
self._notify2 = []
def notify1(self, name, old, new):
self._notify1.append((name, old, new))
def notify2(self, name, old, new):
self._notify2.append((name, old, new))
def test_notify_all(self):
class A(HasTraits):
a = Int
b = Float
a = A()
a.on_trait_change(self.notify1)
a.a = 0
self.assertEqual(len(self._notify1),0)
a.b = 0.0
self.assertEqual(len(self._notify1),0)
a.a = 10
self.assertTrue(('a',0,10) in self._notify1)
a.b = 10.0
self.assertTrue(('b',0.0,10.0) in self._notify1)
self.assertRaises(TraitError,setattr,a,'a','bad string')
self.assertRaises(TraitError,setattr,a,'b','bad string')
self._notify1 = []
a.on_trait_change(self.notify1,remove=True)
a.a = 20
a.b = 20.0
self.assertEqual(len(self._notify1),0)
def test_notify_one(self):
class A(HasTraits):
a = Int
b = Float
a = A()
a.on_trait_change(self.notify1, 'a')
a.a = 0
self.assertEqual(len(self._notify1),0)
a.a = 10
self.assertTrue(('a',0,10) in self._notify1)
self.assertRaises(TraitError,setattr,a,'a','bad string')
def test_subclass(self):
class A(HasTraits):
a = Int
class B(A):
b = Float
b = B()
self.assertEqual(b.a,0)
self.assertEqual(b.b,0.0)
b.a = 100
b.b = 100.0
self.assertEqual(b.a,100)
self.assertEqual(b.b,100.0)
def test_notify_subclass(self):
class A(HasTraits):
a = Int
class B(A):
b = Float
b = B()
b.on_trait_change(self.notify1, 'a')
b.on_trait_change(self.notify2, 'b')
b.a = 0
b.b = 0.0
self.assertEqual(len(self._notify1),0)
self.assertEqual(len(self._notify2),0)
b.a = 10
b.b = 10.0
self.assertTrue(('a',0,10) in self._notify1)
self.assertTrue(('b',0.0,10.0) in self._notify2)
def test_static_notify(self):
class A(HasTraits):
a = Int
_notify1 = []
def _a_changed(self, name, old, new):
self._notify1.append((name, old, new))
a = A()
a.a = 0
# This is broken!!!
self.assertEqual(len(a._notify1),0)
a.a = 10
self.assertTrue(('a',0,10) in a._notify1)
class B(A):
b = Float
_notify2 = []
def _b_changed(self, name, old, new):
self._notify2.append((name, old, new))
b = B()
b.a = 10
b.b = 10.0
self.assertTrue(('a',0,10) in b._notify1)
self.assertTrue(('b',0.0,10.0) in b._notify2)
def test_notify_args(self):
def callback0():
self.cb = ()
def callback1(name):
self.cb = (name,)
def callback2(name, new):
self.cb = (name, new)
def callback3(name, old, new):
self.cb = (name, old, new)
class A(HasTraits):
a = Int
a = A()
a.on_trait_change(callback0, 'a')
a.a = 10
self.assertEqual(self.cb,())
a.on_trait_change(callback0, 'a', remove=True)
a.on_trait_change(callback1, 'a')
a.a = 100
self.assertEqual(self.cb,('a',))
a.on_trait_change(callback1, 'a', remove=True)
a.on_trait_change(callback2, 'a')
a.a = 1000
self.assertEqual(self.cb,('a',1000))
a.on_trait_change(callback2, 'a', remove=True)
a.on_trait_change(callback3, 'a')
a.a = 10000
self.assertEqual(self.cb,('a',1000,10000))
a.on_trait_change(callback3, 'a', remove=True)
self.assertEqual(len(a._trait_notifiers['a']),0)
def test_notify_only_once(self):
class A(HasTraits):
listen_to = ['a']
a = Int(0)
b = 0
def __init__(self, **kwargs):
super(A, self).__init__(**kwargs)
self.on_trait_change(self.listener1, ['a'])
def listener1(self, name, old, new):
self.b += 1
class B(A):
c = 0
d = 0
def __init__(self, **kwargs):
super(B, self).__init__(**kwargs)
self.on_trait_change(self.listener2)
def listener2(self, name, old, new):
self.c += 1
def _a_changed(self, name, old, new):
self.d += 1
b = B()
b.a += 1
self.assertEqual(b.b, b.c)
self.assertEqual(b.b, b.d)
b.a += 1
self.assertEqual(b.b, b.c)
self.assertEqual(b.b, b.d)
class TestHasTraits(TestCase):
def test_trait_names(self):
class A(HasTraits):
i = Int
f = Float
a = A()
self.assertEqual(sorted(a.trait_names()),['f','i'])
self.assertEqual(sorted(A.class_trait_names()),['f','i'])
def test_trait_metadata(self):
class A(HasTraits):
i = Int(config_key='MY_VALUE')
a = A()
self.assertEqual(a.trait_metadata('i','config_key'), 'MY_VALUE')
def test_traits(self):
class A(HasTraits):
i = Int
f = Float
a = A()
self.assertEqual(a.traits(), dict(i=A.i, f=A.f))
self.assertEqual(A.class_traits(), dict(i=A.i, f=A.f))
def test_traits_metadata(self):
class A(HasTraits):
i = Int(config_key='VALUE1', other_thing='VALUE2')
f = Float(config_key='VALUE3', other_thing='VALUE2')
j = Int(0)
a = A()
self.assertEqual(a.traits(), dict(i=A.i, f=A.f, j=A.j))
traits = a.traits(config_key='VALUE1', other_thing='VALUE2')
self.assertEqual(traits, dict(i=A.i))
# This passes, but it shouldn't because I am replicating a bug in
# traits.
traits = a.traits(config_key=lambda v: True)
self.assertEqual(traits, dict(i=A.i, f=A.f, j=A.j))
def test_init(self):
class A(HasTraits):
i = Int()
x = Float()
a = A(i=1, x=10.0)
self.assertEqual(a.i, 1)
self.assertEqual(a.x, 10.0)
def test_positional_args(self):
class A(HasTraits):
i = Int(0)
def __init__(self, i):
super(A, self).__init__()
self.i = i
a = A(5)
self.assertEqual(a.i, 5)
# should raise TypeError if no positional arg given
self.assertRaises(TypeError, A)
#-----------------------------------------------------------------------------
# Tests for specific trait types
#-----------------------------------------------------------------------------
class TestType(TestCase):
def test_default(self):
class B(object): pass
class A(HasTraits):
klass = Type
a = A()
self.assertEqual(a.klass, None)
a.klass = B
self.assertEqual(a.klass, B)
self.assertRaises(TraitError, setattr, a, 'klass', 10)
def test_value(self):
class B(object): pass
class C(object): pass
class A(HasTraits):
klass = Type(B)
a = A()
self.assertEqual(a.klass, B)
self.assertRaises(TraitError, setattr, a, 'klass', C)
self.assertRaises(TraitError, setattr, a, 'klass', object)
a.klass = B
def test_allow_none(self):
class B(object): pass
class C(B): pass
class A(HasTraits):
klass = Type(B, allow_none=False)
a = A()
self.assertEqual(a.klass, B)
self.assertRaises(TraitError, setattr, a, 'klass', None)
a.klass = C
self.assertEqual(a.klass, C)
def test_validate_klass(self):
class A(HasTraits):
klass = Type('no strings allowed')
self.assertRaises(ImportError, A)
class A(HasTraits):
klass = Type('rub.adub.Duck')
self.assertRaises(ImportError, A)
def test_validate_default(self):
class B(object): pass
class A(HasTraits):
klass = Type('bad default', B)
self.assertRaises(ImportError, A)
class C(HasTraits):
klass = Type(None, B, allow_none=False)
self.assertRaises(TraitError, C)
def test_str_klass(self):
class A(HasTraits):
klass = Type('IPython.utils.ipstruct.Struct')
from IPython.utils.ipstruct import Struct
a = A()
a.klass = Struct
self.assertEqual(a.klass, Struct)
self.assertRaises(TraitError, setattr, a, 'klass', 10)
class TestInstance(TestCase):
def test_basic(self):
class Foo(object): pass
class Bar(Foo): pass
class Bah(object): pass
class A(HasTraits):
inst = Instance(Foo)
a = A()
self.assertTrue(a.inst is None)
a.inst = Foo()
self.assertTrue(isinstance(a.inst, Foo))
a.inst = Bar()
self.assertTrue(isinstance(a.inst, Foo))
self.assertRaises(TraitError, setattr, a, 'inst', Foo)
self.assertRaises(TraitError, setattr, a, 'inst', Bar)
self.assertRaises(TraitError, setattr, a, 'inst', Bah())
def test_unique_default_value(self):
class Foo(object): pass
class A(HasTraits):
inst = Instance(Foo,(),{})
a = A()
b = A()
self.assertTrue(a.inst is not b.inst)
def test_args_kw(self):
class Foo(object):
def __init__(self, c): self.c = c
class Bar(object): pass
class Bah(object):
def __init__(self, c, d):
self.c = c; self.d = d
class A(HasTraits):
inst = Instance(Foo, (10,))
a = A()
self.assertEqual(a.inst.c, 10)
class B(HasTraits):
inst = Instance(Bah, args=(10,), kw=dict(d=20))
b = B()
self.assertEqual(b.inst.c, 10)
self.assertEqual(b.inst.d, 20)
class C(HasTraits):
inst = Instance(Foo)
c = C()
self.assertTrue(c.inst is None)
def test_bad_default(self):
class Foo(object): pass
class A(HasTraits):
inst = Instance(Foo, allow_none=False)
self.assertRaises(TraitError, A)
def test_instance(self):
class Foo(object): pass
def inner():
class A(HasTraits):
inst = Instance(Foo())
self.assertRaises(TraitError, inner)
class TestThis(TestCase):
def test_this_class(self):
class Foo(HasTraits):
this = This
f = Foo()
self.assertEqual(f.this, None)
g = Foo()
f.this = g
self.assertEqual(f.this, g)
self.assertRaises(TraitError, setattr, f, 'this', 10)
def test_this_inst(self):
class Foo(HasTraits):
this = This()
f = Foo()
f.this = Foo()
self.assertTrue(isinstance(f.this, Foo))
def test_subclass(self):
class Foo(HasTraits):
t = This()
class Bar(Foo):
pass
f = Foo()
b = Bar()
f.t = b
b.t = f
self.assertEqual(f.t, b)
self.assertEqual(b.t, f)
def test_subclass_override(self):
class Foo(HasTraits):
t = This()
class Bar(Foo):
t = This()
f = Foo()
b = Bar()
f.t = b
self.assertEqual(f.t, b)
self.assertRaises(TraitError, setattr, b, 't', f)
class TraitTestBase(TestCase):
"""A best testing class for basic trait types."""
def assign(self, value):
self.obj.value = value
def coerce(self, value):
return value
def test_good_values(self):
if hasattr(self, '_good_values'):
for value in self._good_values:
self.assign(value)
self.assertEqual(self.obj.value, self.coerce(value))
def test_bad_values(self):
if hasattr(self, '_bad_values'):
for value in self._bad_values:
try:
self.assertRaises(TraitError, self.assign, value)
except AssertionError:
assert False, value
def test_default_value(self):
if hasattr(self, '_default_value'):
self.assertEqual(self._default_value, self.obj.value)
def tearDown(self):
# restore default value after tests, if set
if hasattr(self, '_default_value'):
self.obj.value = self._default_value
class AnyTrait(HasTraits):
value = Any
class AnyTraitTest(TraitTestBase):
obj = AnyTrait()
_default_value = None
_good_values = [10.0, 'ten', u'ten', [10], {'ten': 10},(10,), None, 1j]
_bad_values = []
class IntTrait(HasTraits):
value = Int(99)
class TestInt(TraitTestBase):
obj = IntTrait()
_default_value = 99
_good_values = [10, -10]
_bad_values = ['ten', u'ten', [10], {'ten': 10},(10,), None, 1j,
10.1, -10.1, '10L', '-10L', '10.1', '-10.1', u'10L',
u'-10L', u'10.1', u'-10.1', '10', '-10', u'10', u'-10']
if not py3compat.PY3:
_bad_values.extend([long(10), long(-10), 10*sys.maxint, -10*sys.maxint])
class LongTrait(HasTraits):
value = Long(99 if py3compat.PY3 else long(99))
class TestLong(TraitTestBase):
obj = LongTrait()
_default_value = 99 if py3compat.PY3 else long(99)
_good_values = [10, -10]
_bad_values = ['ten', u'ten', [10], {'ten': 10},(10,),
None, 1j, 10.1, -10.1, '10', '-10', '10L', '-10L', '10.1',
'-10.1', u'10', u'-10', u'10L', u'-10L', u'10.1',
u'-10.1']
if not py3compat.PY3:
# maxint undefined on py3, because int == long
_good_values.extend([long(10), long(-10), 10*sys.maxint, -10*sys.maxint])
_bad_values.extend([[long(10)], (long(10),)])
@skipif(py3compat.PY3, "not relevant on py3")
def test_cast_small(self):
"""Long casts ints to long"""
self.obj.value = 10
self.assertEqual(type(self.obj.value), long)
class IntegerTrait(HasTraits):
value = Integer(1)
class TestInteger(TestLong):
obj = IntegerTrait()
_default_value = 1
def coerce(self, n):
return int(n)
@skipif(py3compat.PY3, "not relevant on py3")
def test_cast_small(self):
"""Integer casts small longs to int"""
if py3compat.PY3:
raise SkipTest("not relevant on py3")
self.obj.value = long(100)
self.assertEqual(type(self.obj.value), int)
class FloatTrait(HasTraits):
value = Float(99.0)
class TestFloat(TraitTestBase):
obj = FloatTrait()
_default_value = 99.0
_good_values = [10, -10, 10.1, -10.1]
_bad_values = ['ten', u'ten', [10], {'ten': 10},(10,), None,
1j, '10', '-10', '10L', '-10L', '10.1', '-10.1', u'10',
u'-10', u'10L', u'-10L', u'10.1', u'-10.1']
if not py3compat.PY3:
_bad_values.extend([long(10), long(-10)])
class ComplexTrait(HasTraits):
value = Complex(99.0-99.0j)
class TestComplex(TraitTestBase):
obj = ComplexTrait()
_default_value = 99.0-99.0j
_good_values = [10, -10, 10.1, -10.1, 10j, 10+10j, 10-10j,
10.1j, 10.1+10.1j, 10.1-10.1j]
_bad_values = [u'10L', u'-10L', 'ten', [10], {'ten': 10},(10,), None]
if not py3compat.PY3:
_bad_values.extend([long(10), long(-10)])
class BytesTrait(HasTraits):
value = Bytes(b'string')
class TestBytes(TraitTestBase):
obj = BytesTrait()
_default_value = b'string'
_good_values = [b'10', b'-10', b'10L',
b'-10L', b'10.1', b'-10.1', b'string']
_bad_values = [10, -10, 10.1, -10.1, 1j, [10],
['ten'],{'ten': 10},(10,), None, u'string']
if not py3compat.PY3:
_bad_values.extend([long(10), long(-10)])
class UnicodeTrait(HasTraits):
value = Unicode(u'unicode')
class TestUnicode(TraitTestBase):
obj = UnicodeTrait()
_default_value = u'unicode'
_good_values = ['10', '-10', '10L', '-10L', '10.1',
'-10.1', '', u'', 'string', u'string', u"€"]
_bad_values = [10, -10, 10.1, -10.1, 1j,
[10], ['ten'], [u'ten'], {'ten': 10},(10,), None]
if not py3compat.PY3:
_bad_values.extend([long(10), long(-10)])
class ObjectNameTrait(HasTraits):
value = ObjectName("abc")
class TestObjectName(TraitTestBase):
obj = ObjectNameTrait()
_default_value = "abc"
_good_values = ["a", "gh", "g9", "g_", "_G", u"a345_"]
_bad_values = [1, "", u"€", "9g", "!", "#abc", "aj@", "a.b", "a()", "a[0]",
object(), object]
if sys.version_info[0] < 3:
_bad_values.append(u"þ")
else:
_good_values.append(u"þ") # þ=1 is valid in Python 3 (PEP 3131).
class DottedObjectNameTrait(HasTraits):
value = DottedObjectName("a.b")
class TestDottedObjectName(TraitTestBase):
obj = DottedObjectNameTrait()
_default_value = "a.b"
_good_values = ["A", "y.t", "y765.__repr__", "os.path.join", u"os.path.join"]
_bad_values = [1, u"abc.€", "_.@", ".", ".abc", "abc.", ".abc."]
if sys.version_info[0] < 3:
_bad_values.append(u"t.þ")
else:
_good_values.append(u"t.þ")
class TCPAddressTrait(HasTraits):
value = TCPAddress()
class TestTCPAddress(TraitTestBase):
obj = TCPAddressTrait()
_default_value = ('127.0.0.1',0)
_good_values = [('localhost',0),('192.168.0.1',1000),('www.google.com',80)]
_bad_values = [(0,0),('localhost',10.0),('localhost',-1)]
class ListTrait(HasTraits):
value = List(Int)
class TestList(TraitTestBase):
obj = ListTrait()
_default_value = []
_good_values = [[], [1], list(range(10)), (1,2)]
_bad_values = [10, [1,'a'], 'a']
def coerce(self, value):
if value is not None:
value = list(value)
return value
class LenListTrait(HasTraits):
value = List(Int, [0], minlen=1, maxlen=2)
class TestLenList(TraitTestBase):
obj = LenListTrait()
_default_value = [0]
_good_values = [[1], [1,2], (1,2)]
_bad_values = [10, [1,'a'], 'a', [], list(range(3))]
def coerce(self, value):
if value is not None:
value = list(value)
return value
class TupleTrait(HasTraits):
value = Tuple(Int)
class TestTupleTrait(TraitTestBase):
obj = TupleTrait()
_default_value = None
_good_values = [(1,), None, (0,), [1]]
_bad_values = [10, (1,2), ('a'), ()]
def coerce(self, value):
if value is not None:
value = tuple(value)
return value
def test_invalid_args(self):
self.assertRaises(TypeError, Tuple, 5)
self.assertRaises(TypeError, Tuple, default_value='hello')
t = Tuple(Int, CBytes, default_value=(1,5))
class LooseTupleTrait(HasTraits):
value = Tuple((1,2,3))
class TestLooseTupleTrait(TraitTestBase):
obj = LooseTupleTrait()
_default_value = (1,2,3)
_good_values = [(1,), None, [1], (0,), tuple(range(5)), tuple('hello'), ('a',5), ()]
_bad_values = [10, 'hello', {}]
def coerce(self, value):
if value is not None:
value = tuple(value)
return value
def test_invalid_args(self):
self.assertRaises(TypeError, Tuple, 5)
self.assertRaises(TypeError, Tuple, default_value='hello')
t = Tuple(Int, CBytes, default_value=(1,5))
class MultiTupleTrait(HasTraits):
value = Tuple(Int, Bytes, default_value=[99,b'bottles'])
class TestMultiTuple(TraitTestBase):
obj = MultiTupleTrait()
_default_value = (99,b'bottles')
_good_values = [(1,b'a'), (2,b'b')]
_bad_values = ((),10, b'a', (1,b'a',3), (b'a',1), (1, u'a'))
class CRegExpTrait(HasTraits):
value = CRegExp(r'')
class TestCRegExp(TraitTestBase):
def coerce(self, value):
return re.compile(value)
obj = CRegExpTrait()
_default_value = re.compile(r'')
_good_values = [r'\d+', re.compile(r'\d+')]
_bad_values = [r'(', None, ()]
class DictTrait(HasTraits):
value = Dict()
def test_dict_assignment():
d = dict()
c = DictTrait()
c.value = d
d['a'] = 5
nt.assert_equal(d, c.value)
nt.assert_true(c.value is d)
class TestLink(TestCase):
def test_connect_same(self):
"""Verify two traitlets of the same type can be linked together using link."""
# Create two simple classes with Int traitlets.
class A(HasTraits):
value = Int()
a = A(value=9)
b = A(value=8)
# Conenct the two classes.
c = link((a, 'value'), (b, 'value'))
# Make sure the values are the same at the point of linking.
self.assertEqual(a.value, b.value)
# Change one of the values to make sure they stay in sync.
a.value = 5
self.assertEqual(a.value, b.value)
b.value = 6
self.assertEqual(a.value, b.value)
def test_link_different(self):
"""Verify two traitlets of different types can be linked together using link."""
# Create two simple classes with Int traitlets.
class A(HasTraits):
value = Int()
class B(HasTraits):
count = Int()
a = A(value=9)
b = B(count=8)
# Conenct the two classes.
c = link((a, 'value'), (b, 'count'))
# Make sure the values are the same at the point of linking.
self.assertEqual(a.value, b.count)
# Change one of the values to make sure they stay in sync.
a.value = 5
self.assertEqual(a.value, b.count)
b.count = 4
self.assertEqual(a.value, b.count)
def test_unlink(self):
"""Verify two linked traitlets can be unlinked."""
# Create two simple classes with Int traitlets.
class A(HasTraits):
value = Int()
a = A(value=9)
b = A(value=8)
# Connect the two classes.
c = link((a, 'value'), (b, 'value'))
a.value = 4
c.unlink()
# Change one of the values to make sure they don't stay in sync.
a.value = 5
self.assertNotEqual(a.value, b.value)
def test_callbacks(self):
"""Verify two linked traitlets have their callbacks called once."""
# Create two simple classes with Int traitlets.
class A(HasTraits):
value = Int()
class B(HasTraits):
count = Int()
a = A(value=9)
b = B(count=8)
# Register callbacks that count.
callback_count = []
def a_callback(name, old, new):
callback_count.append('a')
a.on_trait_change(a_callback, 'value')
def b_callback(name, old, new):
callback_count.append('b')
b.on_trait_change(b_callback, 'count')
# Connect the two classes.
c = link((a, 'value'), (b, 'count'))
# Make sure b's count was set to a's value once.
self.assertEqual(''.join(callback_count), 'b')
del callback_count[:]
# Make sure a's value was set to b's count once.
b.count = 5
self.assertEqual(''.join(callback_count), 'ba')
del callback_count[:]
# Make sure b's count was set to a's value once.
a.value = 4
self.assertEqual(''.join(callback_count), 'ab')
del callback_count[:]
| gpl-3.0 | -5,250,593,033,654,834,000 | 26.383562 | 88 | 0.520193 | false |
pakal/django-recurly | django_recurly/handlers.py | 1 | 3931 | """
Push notifications are not meant to be actionable and should not be used for
critical account functions like provisioning accounts. Use the receipt of a
push notification to trigger an API query, validating both the push
notification action and the details of the action.
http://docs.recurly.com/push-notifications
"""
from django_recurly import signals
# Push notification signal handlers
def new(sender, **kwargs):
"""Create the account and the subscription
We do these at the same time (rather than using
the new_account signal) to avoid concurrency problems.
"""
from django_recurly import models
models.Account.handle_notification(**kwargs)
def update(sender, **kwargs):
"""Update a subscription and account"""
from django_recurly import models
models.Account.handle_notification(**kwargs)
def payment(sender, **kwargs):
"""Update a payment and account"""
from django_recurly import models
models.Payment.handle_notification(**kwargs)
# Connect push notification signals
#signals.new_account_notification.connect(new)
signals.new_subscription_notification.connect(new)
signals.updated_subscription_notification.connect(update)
signals.expired_subscription_notification.connect(update)
signals.canceled_subscription_notification.connect(update)
signals.renewed_subscription_notification.connect(update)
signals.reactivated_account_notification.connect(update)
signals.canceled_account_notification.connect(update)
signals.billing_info_updated_notification.connect(update)
signals.successful_payment_notification.connect(payment)
signals.failed_payment_notification.connect(payment)
signals.successful_refund_notification.connect(payment)
signals.void_payment_notification.connect(payment)
## Model signal handlers ##
def account_post_save(sender, instance, created, **kwargs):
if created:
signals.account_created.send(sender=sender, account=instance)
else:
signals.account_updated.send(sender=sender, account=instance)
was_active = not created and instance._previous_state['state'] == 'active'
now_active = instance.is_active()
# Send account closed/opened signals
if was_active and not now_active:
signals.account_closed.send(sender=sender, account=instance)
elif not was_active and now_active:
signals.account_opened.send(sender=sender, account=instance)
def billing_info_post_save(sender, instance, created, **kwargs):
if created:
signals.billing_info_created.send(sender=sender, billing_info=instance)
else:
signals.billing_info_updated.send(sender=sender, billing_info=instance)
def subscription_post_save(sender, instance, created, **kwargs):
if created:
signals.subscription_created.send(sender=sender, subscription=instance)
else:
signals.subscription_updated.send(sender=sender, subscription=instance)
was_current = not created and instance._previous_state['state'] != 'expired'
now_current = instance.state != 'expired'
# Send subscription current/expired signals
if was_current and not now_current:
signals.subscription_expired.send(sender=sender, subscription=instance)
elif not was_current and now_current:
signals.subscription_current.send(sender=sender, subscription=instance)
def payment_post_save(sender, instance, created, **kwargs):
if created:
signals.payment_created.send(sender=sender, payment=instance)
else:
signals.payment_updated.send(sender=sender, payment=instance)
def token_post_save(sender, instance, created, **kwargs):
if type == 'subscription':
signals.subscription_token_created.send(sender=sender, token=instance)
elif type == 'billing_info':
signals.billing_info_token_created.send(sender=sender, payment=instance)
elif type == 'invoice':
signals.invoice_token_created.send(sender=sender, payment=instance)
| bsd-3-clause | -1,967,348,816,661,633,800 | 35.738318 | 80 | 0.752226 | false |
kustodian/ansible-modules-core | windows/win_msi.py | 68 | 1736 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, Matt Martz <[email protected]>, and others
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
DOCUMENTATION = '''
---
module: win_msi
version_added: "1.7"
short_description: Installs and uninstalls Windows MSI files
description:
- Installs or uninstalls a Windows MSI file that is already located on the
target server
options:
path:
description:
- File system path to the MSI file to install
required: true
state:
description:
- Whether the MSI file should be installed or uninstalled
choices:
- present
- absent
default: present
creates:
description:
- Path to a file created by installing the MSI to prevent from
attempting to reinstall the package on every run
author: Matt Martz
'''
EXAMPLES = '''
# Install an MSI file
- win_msi: path=C:\\\\7z920-x64.msi
# Uninstall an MSI file
- win_msi: path=C:\\\\7z920-x64.msi state=absent
'''
| gpl-3.0 | -2,217,112,990,770,587,000 | 28.931034 | 78 | 0.682604 | false |
unaizalakain/django | tests/invalid_models_tests/test_backend_specific.py | 191 | 1024 | # -*- encoding: utf-8 -*-
from __future__ import unicode_literals
from django.core.checks import Error
from django.db import connections, models
from django.test import mock
from .base import IsolatedModelsTestCase
def dummy_allow_migrate(db, app_label, **hints):
# Prevent checks from being run on the 'other' database, which doesn't have
# its check_field() method mocked in the test.
return db == 'default'
class BackendSpecificChecksTests(IsolatedModelsTestCase):
@mock.patch('django.db.models.fields.router.allow_migrate', new=dummy_allow_migrate)
def test_check_field(self):
""" Test if backend specific checks are performed. """
error = Error('an error', hint=None)
class Model(models.Model):
field = models.IntegerField()
field = Model._meta.get_field('field')
with mock.patch.object(connections['default'].validation, 'check_field', return_value=[error]):
errors = field.check()
self.assertEqual(errors, [error])
| bsd-3-clause | -7,799,513,931,742,926,000 | 32.032258 | 103 | 0.685547 | false |
ahmadia/bokeh | bokeh/server/views/backbone.py | 29 | 11075 | from __future__ import absolute_import
import logging
log = logging.getLogger(__name__)
from flask import request, jsonify
from bokeh import protocol
from .bbauth import (
handle_auth_error
)
from ..app import bokeh_app
from ..crossdomain import crossdomain
from ..serverbb import get_temporary_docid, BokehServerTransaction
from ..views import make_json
from ..models import docs
def init_bokeh(clientdoc):
request.bokeh_server_document = clientdoc
clientdoc.autostore = False
clientdoc.autoadd = False
@bokeh_app.route("/bokeh/bb/<docid>/gc", methods=['POST'])
@handle_auth_error
def gc(docid):
# client = request.headers.get('client', 'python') # todo: not used?
doc = docs.Doc.load(bokeh_app.servermodel_storage, docid)
bokehuser = bokeh_app.current_user()
temporary_docid = get_temporary_docid(request, docid)
t = BokehServerTransaction(
bokehuser, doc, 'rw', temporary_docid=temporary_docid
)
t.load(gc=True)
t.save()
return jsonify(status='success')
# bulk upsert
@bokeh_app.route("/bokeh/bb/<docid>/bulkupsert", methods=['POST'])
@handle_auth_error
def bulk_upsert(docid):
''' Update or insert new objects for a given :class:`Document <bokeh.document.Document>`.
:param docid: id of the :class:`Document <bokeh.document.Document>`
to update or insert into
:status 200: when user is authorized
:status 401: when user is not authorized
'''
# endpoint is only used by python, therefore we don't process
# callbacks here
client = request.headers.get('client', 'python')
doc = docs.Doc.load(bokeh_app.servermodel_storage, docid)
bokehuser = bokeh_app.current_user()
temporary_docid = get_temporary_docid(request, docid)
t = BokehServerTransaction(
bokehuser, doc, 'rw', temporary_docid=temporary_docid
)
t.load()
clientdoc = t.clientdoc
data = protocol.deserialize_json(request.data.decode('utf-8'))
if client == 'python':
clientdoc.load(*data, events='none', dirty=True)
else:
clientdoc.load(*data, events='existing', dirty=True)
t.save()
msg = ws_update(clientdoc, t.write_docid, t.changed)
return make_json(msg)
def ws_update(clientdoc, docid, models):
log.debug("sending wsupdate to %s", docid)
attrs = clientdoc.dump(*models)
msg = protocol.serialize_json({'msgtype' : 'modelpush',
'modelspecs' : attrs
})
bokeh_app.publisher.send("bokehplot:" + docid, msg)
return msg
def ws_delete(clientdoc, docid, models):
attrs = clientdoc.dump(*models)
msg = {
'msgtype' : 'modeldel',
'modelspecs' : attrs,
}
msg = protocol.serialize_json(msg)
bokeh_app.wsmanager.send("bokehplot:" + docid, msg)
return msg
# backbone functionality
@bokeh_app.route("/bokeh/bb/<docid>/<typename>/", methods=['POST'])
@handle_auth_error
def create(docid, typename):
''' Update or insert new objects for a given :class:`Document <bokeh.document.Document>`.
:param docid: id of the :class:`Document <bokeh.document.Document>`
to update or insert into
:status 200: when user is authorized
:status 401: when user is not authorized
'''
doc = docs.Doc.load(bokeh_app.servermodel_storage, docid)
bokehuser = bokeh_app.current_user()
temporary_docid = get_temporary_docid(request, docid)
t = BokehServerTransaction(
bokehuser, doc, 'rw', temporary_docid=temporary_docid
)
t.load()
modeldata = protocol.deserialize_json(request.data.decode('utf-8'))
modeldata = [{'type' : typename,
'attributes' : modeldata}]
t.clientdoc.load(*modeldata, dirty=True)
t.save()
ws_update(t.clientdoc, t.write_docid, modeldata)
return protocol.serialize_json(modeldata[0]['attributes'])
@handle_auth_error
def _bulkget(docid, typename=None):
doc = docs.Doc.load(bokeh_app.servermodel_storage, docid)
bokehuser = bokeh_app.current_user()
temporary_docid = get_temporary_docid(request, docid)
t = BokehServerTransaction(
bokehuser, doc, 'r', temporary_docid=temporary_docid
)
t.load()
clientdoc = t.clientdoc
all_models = clientdoc._models.values()
if typename is not None:
attrs = clientdoc.dump(*[x for x in all_models \
if x.__view_model__==typename])
attrs = [x['attributes'] for x in attrs]
return make_json(protocol.serialize_json(attrs))
else:
attrs = clientdoc.dump(*all_models)
return make_json(protocol.serialize_json(attrs))
@bokeh_app.route("/bokeh/bb/<docid>/", methods=['GET'])
def bulkget_without_typename(docid):
''' Retrieve all objects for a given :class:`Document <bokeh.document.Document>`.
:param docid: id of the :class:`Document <bokeh.document.Document>`
to update or insert into
:status 200: when user is authorized
:status 401: when user is not authorized
'''
return _bulkget(docid)
@bokeh_app.route("/bokeh/bb/<docid>/<typename>/", methods=['GET'])
def bulkget_with_typename(docid, typename):
''' Retrieve all objects of a specified typename for a
given :class:`Document <bokeh.document.Document>`.
:param docid: id of the :class:`Document <bokeh.document.Document>`
to update or insert into
:param typename: the type of objects to find and return
:status 200: when user is authorized
:status 401: when user is not authorized
'''
return _bulkget(docid, typename)
@crossdomain(origin="*", methods=['PATCH', 'GET', 'PUT'], headers=None)
def _handle_specific_model(docid, typename, id, method):
if method == 'PUT':
return update(docid, typename, id)
elif method == 'PATCH':
return update(docid, typename, id)
elif method == 'GET':
return getbyid(docid, typename, id)
elif method == 'DELETE':
return delete(docid, typename, id)
# route for working with individual models
@bokeh_app.route("/bokeh/bb/<docid>/<typename>/<id>/", methods=['GET', 'OPTIONS'])
def _handle_specific_model_get(docid, typename, id):
''' Retrieve a specific model with a given id and typename for a
given :class:`Document <bokeh.document.Document>`.
:param docid: id of the :class:`Document <bokeh.document.Document>`
to update or insert into
:param typename: the type of objects to find and return
:param id: unique id of the object to retrieve
:status 200: when user is authorized
:status 401: when user is not authorized
'''
return _handle_specific_model(docid, typename, id, request.method)
@bokeh_app.route("/bokeh/bb/<docid>/<typename>/<id>/", methods=['PUT'])
def _handle_specific_model_put(docid, typename, id):
''' Update a specific model with a given id and typename for a
given :class:`Document <bokeh.document.Document>`.
:param docid: id of the :class:`Document <bokeh.document.Document>`
to update or insert into
:param typename: the type of objects to find and return
:param id: unique id of the object to retrieve
:status 200: when user is authorized
:status 401: when user is not authorized
'''
return _handle_specific_model(docid, typename, id, request.method)
@bokeh_app.route("/bokeh/bb/<docid>/<typename>/<id>/", methods=['PATCH'])
def _handle_specific_model_patch(docid, typename, id):
''' Update a specific model with a given id and typename for a
given :class:`Document <bokeh.document.Document>`.
:param docid: id of the :class:`Document <bokeh.document.Document>`
to update or insert into
:param typename: the type of objects to find and return
:param id: unique id of the object to retrieve
:status 200: when user is authorized
:status 401: when user is not authorized
'''
return _handle_specific_model(docid, typename, id, request.method)
@bokeh_app.route("/bokeh/bb/<docid>/<typename>/<id>/", methods=['DELETE'])
def _handle_specific_model_delete(docid, typename, id):
''' Delete a specific model with a given id and typename for a
given :class:`Document <bokeh.document.Document>`.
:param docid: id of the :class:`Document <bokeh.document.Document>`
to update or insert into
:param typename: the type of objects to find and return
:param id: unique id of the object to retrieve
:status 200: when user is authorized
:status 401: when user is not authorized
'''
return _handle_specific_model(docid, typename, id, request.method)
# individual model methods
@handle_auth_error
def getbyid(docid, typename, id):
doc = docs.Doc.load(bokeh_app.servermodel_storage, docid)
bokehuser = bokeh_app.current_user()
temporary_docid = get_temporary_docid(request, docid)
t = BokehServerTransaction(
bokehuser, doc, 'r', temporary_docid=temporary_docid
)
t.load()
clientdoc = t.clientdoc
attr = clientdoc.dump(clientdoc._models[id])[0]['attributes']
return make_json(protocol.serialize_json(attr))
@handle_auth_error
def update(docid, typename, id):
"""we need to distinguish between writing and patching models
namely in writing, we shouldn't remove unspecified attrs
(we currently don't handle this correctly)
"""
doc = docs.Doc.load(bokeh_app.servermodel_storage, docid)
bokehuser = bokeh_app.current_user()
temporary_docid = get_temporary_docid(request, docid)
t = BokehServerTransaction(
bokehuser, doc, 'rw', temporary_docid=temporary_docid
)
t.load()
modeldata = protocol.deserialize_json(request.data.decode('utf-8'))
### horrible hack, we need to pop off the noop object if it exists
modeldata.pop('noop', None)
clientdoc = t.clientdoc
log.info("loading done %s", len(clientdoc._models.values()))
# patch id is not passed...
modeldata['id'] = id
modeldata = {'type' : typename,
'attributes' : modeldata}
clientdoc.load(modeldata, events='existing', dirty=True)
t.save()
ws_update(clientdoc, t.write_docid, t.changed)
# backbone expects us to send back attrs of this model, but it doesn't
# make sense to do so because we modify other models, and we want this to
# all go out over the websocket channel
return make_json(protocol.serialize_json({'noop' : True}))
@handle_auth_error
def delete(docid, typename, id):
#I don't think this works right now
obj = 'No this does not work, because obj is not defined, should it be an arg?'
doc = docs.Doc.load(bokeh_app.servermodel_storage, docid)
bokehuser = bokeh_app.current_user()
temporary_docid = get_temporary_docid(request, docid)
t = BokehServerTransaction(
bokehuser, doc, 'rw', temporary_docid=temporary_docid
)
clientdoc = t.clientdoc
model = clientdoc._models[id]
bokeh_app.backbone_storage.del_obj(t.write_docid, obj)
t.save()
ws_delete(clientdoc, t.write_docid, [model])
return make_json(protocol.serialize_json(clientdoc.dump(model)[0]['attributes']))
| bsd-3-clause | 8,666,370,358,181,657,000 | 35.672185 | 93 | 0.674853 | false |
google/mysql-protobuf | storage/ndb/mcc/tst/unittest2/runner.py | 164 | 6757 | """Running tests"""
import sys
import time
import unittest
from unittest2 import result
try:
from unittest2.signals import registerResult
except ImportError:
def registerResult(_):
pass
__unittest = True
class _WritelnDecorator(object):
"""Used to decorate file-like objects with a handy 'writeln' method"""
def __init__(self,stream):
self.stream = stream
def __getattr__(self, attr):
if attr in ('stream', '__getstate__'):
raise AttributeError(attr)
return getattr(self.stream,attr)
def writeln(self, arg=None):
if arg:
self.write(arg)
self.write('\n') # text-mode streams translate to \r\n if needed
class TextTestResult(result.TestResult):
"""A test result class that can print formatted text results to a stream.
Used by TextTestRunner.
"""
separator1 = '=' * 70
separator2 = '-' * 70
def __init__(self, stream, descriptions, verbosity):
super(TextTestResult, self).__init__()
self.stream = stream
self.showAll = verbosity > 1
self.dots = verbosity == 1
self.descriptions = descriptions
def getDescription(self, test):
doc_first_line = test.shortDescription()
if self.descriptions and doc_first_line:
return '\n'.join((str(test), doc_first_line))
else:
return str(test)
def startTest(self, test):
super(TextTestResult, self).startTest(test)
if self.showAll:
self.stream.write(self.getDescription(test))
self.stream.write(" ... ")
self.stream.flush()
def addSuccess(self, test):
super(TextTestResult, self).addSuccess(test)
if self.showAll:
self.stream.writeln("ok")
elif self.dots:
self.stream.write('.')
self.stream.flush()
def addError(self, test, err):
super(TextTestResult, self).addError(test, err)
if self.showAll:
self.stream.writeln("ERROR")
elif self.dots:
self.stream.write('E')
self.stream.flush()
def addFailure(self, test, err):
super(TextTestResult, self).addFailure(test, err)
if self.showAll:
self.stream.writeln("FAIL")
elif self.dots:
self.stream.write('F')
self.stream.flush()
def addSkip(self, test, reason):
super(TextTestResult, self).addSkip(test, reason)
if self.showAll:
self.stream.writeln("skipped %r" % (reason,))
elif self.dots:
self.stream.write("s")
self.stream.flush()
def addExpectedFailure(self, test, err):
super(TextTestResult, self).addExpectedFailure(test, err)
if self.showAll:
self.stream.writeln("expected failure")
elif self.dots:
self.stream.write("x")
self.stream.flush()
def addUnexpectedSuccess(self, test):
super(TextTestResult, self).addUnexpectedSuccess(test)
if self.showAll:
self.stream.writeln("unexpected success")
elif self.dots:
self.stream.write("u")
self.stream.flush()
def printErrors(self):
if self.dots or self.showAll:
self.stream.writeln()
self.printErrorList('ERROR', self.errors)
self.printErrorList('FAIL', self.failures)
def printErrorList(self, flavour, errors):
for test, err in errors:
self.stream.writeln(self.separator1)
self.stream.writeln("%s: %s" % (flavour, self.getDescription(test)))
self.stream.writeln(self.separator2)
self.stream.writeln("%s" % err)
def stopTestRun(self):
super(TextTestResult, self).stopTestRun()
self.printErrors()
class TextTestRunner(unittest.TextTestRunner):
"""A test runner class that displays results in textual form.
It prints out the names of tests as they are run, errors as they
occur, and a summary of the results at the end of the test run.
"""
resultclass = TextTestResult
def __init__(self, stream=sys.stderr, descriptions=True, verbosity=1,
failfast=False, buffer=False, resultclass=None):
self.stream = _WritelnDecorator(stream)
self.descriptions = descriptions
self.verbosity = verbosity
self.failfast = failfast
self.buffer = buffer
if resultclass is not None:
self.resultclass = resultclass
def _makeResult(self):
return self.resultclass(self.stream, self.descriptions, self.verbosity)
def run(self, test):
"Run the given test case or test suite."
result = self._makeResult()
result.failfast = self.failfast
result.buffer = self.buffer
registerResult(result)
startTime = time.time()
startTestRun = getattr(result, 'startTestRun', None)
if startTestRun is not None:
startTestRun()
try:
test(result)
finally:
stopTestRun = getattr(result, 'stopTestRun', None)
if stopTestRun is not None:
stopTestRun()
else:
result.printErrors()
stopTime = time.time()
timeTaken = stopTime - startTime
if hasattr(result, 'separator2'):
self.stream.writeln(result.separator2)
run = result.testsRun
self.stream.writeln("Ran %d test%s in %.3fs" %
(run, run != 1 and "s" or "", timeTaken))
self.stream.writeln()
expectedFails = unexpectedSuccesses = skipped = 0
try:
results = map(len, (result.expectedFailures,
result.unexpectedSuccesses,
result.skipped))
expectedFails, unexpectedSuccesses, skipped = results
except AttributeError:
pass
infos = []
if not result.wasSuccessful():
self.stream.write("FAILED")
failed, errored = map(len, (result.failures, result.errors))
if failed:
infos.append("failures=%d" % failed)
if errored:
infos.append("errors=%d" % errored)
else:
self.stream.write("OK")
if skipped:
infos.append("skipped=%d" % skipped)
if expectedFails:
infos.append("expected failures=%d" % expectedFails)
if unexpectedSuccesses:
infos.append("unexpected successes=%d" % unexpectedSuccesses)
if infos:
self.stream.writeln(" (%s)" % (", ".join(infos),))
else:
self.stream.write("\n")
return result
| gpl-2.0 | 3,919,053,085,546,482,000 | 31.800971 | 80 | 0.584283 | false |
dstiert/Wox | PythonHome/Lib/site-packages/pip/_vendor/requests/__init__.py | 327 | 1856 | # -*- coding: utf-8 -*-
# __
# /__) _ _ _ _ _/ _
# / ( (- (/ (/ (- _) / _)
# /
"""
requests HTTP library
~~~~~~~~~~~~~~~~~~~~~
Requests is an HTTP library, written in Python, for human beings. Basic GET
usage:
>>> import requests
>>> r = requests.get('http://python.org')
>>> r.status_code
200
>>> 'Python is a programming language' in r.content
True
... or POST:
>>> payload = dict(key1='value1', key2='value2')
>>> r = requests.post("http://httpbin.org/post", data=payload)
>>> print(r.text)
{
...
"form": {
"key2": "value2",
"key1": "value1"
},
...
}
The other HTTP methods are supported - see `requests.api`. Full documentation
is at <http://python-requests.org>.
:copyright: (c) 2014 by Kenneth Reitz.
:license: Apache 2.0, see LICENSE for more details.
"""
__title__ = 'requests'
__version__ = '2.3.0'
__build__ = 0x020300
__author__ = 'Kenneth Reitz'
__license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2014 Kenneth Reitz'
# Attempt to enable urllib3's SNI support, if possible
try:
from .packages.urllib3.contrib import pyopenssl
pyopenssl.inject_into_urllib3()
except ImportError:
pass
from . import utils
from .models import Request, Response, PreparedRequest
from .api import request, get, head, post, patch, put, delete, options
from .sessions import session, Session
from .status_codes import codes
from .exceptions import (
RequestException, Timeout, URLRequired,
TooManyRedirects, HTTPError, ConnectionError
)
# Set default logging handler to avoid "No handler found" warnings.
import logging
try: # Python 2.7+
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
logging.getLogger(__name__).addHandler(NullHandler())
| mit | -4,502,144,626,769,838,600 | 23.103896 | 77 | 0.631466 | false |
felipenaselva/repo.felipe | plugin.video.salts/scrapers/couchtunerv1_scraper.py | 1 | 4132 | """
SALTS XBMC Addon
Copyright (C) 2014 tknorris
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import re
import urlparse
import log_utils
import kodi
import dom_parser
from salts_lib import scraper_utils
from salts_lib.constants import FORCE_NO_MATCH
from salts_lib.constants import QUALITIES
from salts_lib.constants import VIDEO_TYPES
import scraper
BASE_URL = 'http://www.couchtuner.ch'
BASE_URL2 = 'http://couchtuner.city'
class Scraper(scraper.Scraper):
base_url = BASE_URL
def __init__(self, timeout=scraper.DEFAULT_TIMEOUT):
self.timeout = timeout
self.base_url = kodi.get_setting('%s-base_url' % (self.get_name()))
@classmethod
def provides(cls):
return frozenset([VIDEO_TYPES.TVSHOW, VIDEO_TYPES.EPISODE])
@classmethod
def get_name(cls):
return 'CouchTunerV1'
def get_sources(self, video):
source_url = self.get_url(video)
hosters = []
if source_url and source_url != FORCE_NO_MATCH:
url = urlparse.urljoin(self.base_url, source_url)
entry = ''
while True:
html = self._http_get(url, cache_limit=.5)
if not html:
url = urlparse.urljoin(BASE_URL2, source_url)
html = self._http_get(url, cache_limit=.5)
entry = dom_parser.parse_dom(html, 'div', {'class': 'entry'})
if entry:
entry = entry[0]
match = re.search('Watch it here\s*:.*?href="([^"]+)', entry, re.I)
if match:
url = match.group(1)
else:
break
else:
entry = ''
break
for tab in dom_parser.parse_dom(entry, 'div', {'class': '''[^'"]*postTabs_divs[^'"]*'''}):
match = re.search('<iframe[^>]*src="([^"]+)', tab, re.I | re.DOTALL)
if match:
link = match.group(1)
host = urlparse.urlparse(link).hostname
hoster = {'multi-part': False, 'host': host, 'class': self, 'quality': scraper_utils.get_quality(video, host, QUALITIES.HIGH), 'views': None, 'rating': None, 'url': link, 'direct': False}
hosters.append(hoster)
return hosters
def _get_episode_url(self, show_url, video):
episode_pattern = 'href="([^"]+[sS](?:eason-)?%s-[eE](?:pisode-)?%s-[^"]+)' % (video.season, video.episode)
title_pattern = 'href="(?P<url>[^"]+season-\d+-episode-\d+-[^"]+).*?8211;\s*(?P<title>[^<]+)'
return self._default_get_episode_url(show_url, video, episode_pattern, title_pattern)
def search(self, video_type, title, year, season=''):
show_list_url = urlparse.urljoin(self.base_url, '/tv-lists/')
html = self._http_get(show_list_url, cache_limit=8)
results = []
norm_title = scraper_utils.normalize_title(title)
for item in dom_parser.parse_dom(html, 'li'):
match = re.search('href="([^"]+)">(.*?)</a>', item)
if match:
url, match_title = match.groups()
match_title = re.sub('</?strong[^>]*>', '', match_title)
if norm_title in scraper_utils.normalize_title(match_title):
result = {'url': scraper_utils.pathify_url(url), 'title': scraper_utils.cleanse_title(match_title), 'year': ''}
results.append(result)
return results
| gpl-2.0 | 7,472,876,776,667,992,000 | 40.737374 | 207 | 0.572604 | false |
sethkontny/blaze | blaze/data/tests/test_usability.py | 1 | 2230 | from unittest import TestCase
import os
from tempfile import mktemp
import gzip
from blaze.utils import filetext, filetexts, tmpfile
from blaze.data import *
from blaze.py2help import skip
class TestResource(TestCase):
def setUp(self):
self.filename = mktemp()
def tearDown(self):
if os.path.exists(self.filename):
os.remove(self.filename)
def test_resource_csv(self):
with filetext('1,1\n2,2', extension='.csv') as fn:
dd = resource(fn, schema='2 * int')
assert isinstance(dd, CSV)
self.assertEqual(list(dd), [[1, 1], [2, 2]])
def test_resource_json(self):
with filetext('[[1,1], [2,2]]', extension='.json') as fn:
dd = resource(fn, schema='2 * int')
assert isinstance(dd, JSON)
self.assertEqual(list(dd), [[1, 1], [2, 2]])
def test_resource_gz(self):
with filetext('1,1\n2,2', extension='.csv.gz', open=gzip.open) as fn:
dd = resource(fn, schema='2 * int')
assert isinstance(dd, CSV)
self.assertEqual(dd.open, gzip.open)
self.assertEqual(list(dd), [[1, 1], [2, 2]])
def test_filesystem(self):
d = {'a.csv': '1,1\n2,2', 'b.csv': '1,1\n2,2'}
with filetexts(d) as filenames:
dd = resource('*.csv', schema='2 * int')
assert isinstance(dd, Files)
def test_sql(self):
assert isinstance(resource('sqlite:///:memory:::tablename',
schema='{x: int, y: int}'),
SQL)
@skip("This runs fine in isolation, segfaults in full test")
def test_hdf5(self):
with tmpfile('.hdf5') as filename:
assert isinstance(resource(filename + '::/path/to/data/',
mode='w', schema='2 * int'),
HDF5)
class TestCopy(TestCase):
def test_copy(self):
with filetext('1,1\n2,2', extension='.csv') as a:
with tmpfile(extension='.csv') as b:
A = resource(a, schema='2 * int')
B = resource(b, schema='2 * int', mode='a')
copy(A, B)
assert list(B) == [[1, 1], [2, 2]]
| bsd-3-clause | -749,344,270,761,291,500 | 34.967742 | 77 | 0.529148 | false |
chainer/chainer | chainer/training/extension.py | 8 | 6662 | from chainer.utils import argument
PRIORITY_WRITER = 300
PRIORITY_EDITOR = 200
PRIORITY_READER = 100
class Extension(object):
"""Base class of trainer extensions.
Extension of :class:`Trainer` is a callable object that takes the trainer
object as the argument. It also provides some default configurations as its
attributes, e.g. the default trigger and the default priority. This class
provides a set of typical default values for these attributes.
There are three ways to define users' own extensions: inheriting this
class, decorating closures by :func:`make_extension`, or using any callable
including lambda functions as extensions. Decorator can slightly reduce the
overhead and is much easier to use, while this class provides more
flexibility (for example, it can have methods to configure the behavior).
Using a lambda function allows one-line coding for simple purposes, but
users have to specify the configurations as arguments to
:meth:`Trainer.extend`. For a callable not inheriting this class, the
default configurations of this class are used unless the user explicitly
specifies them in :meth:`Trainer.extend` method.
Attributes:
trigger: Default value of trigger for this extension. It is set to
``(1, 'iteration')`` by default.
priority: Default priority of the extension. It is set to
``PRIORITY_READER`` by default.
~Extension.name: Name of the extension. It is set to
``None`` by default. This value will be overwritten when
registering an extension to a trainer. See
:meth:`chainer.training.Trainer.extend` for details.
"""
trigger = 1, 'iteration'
priority = PRIORITY_READER
name = None
@property
def default_name(self):
"""Default name of the extension.
It is the name of the class by default. Implementation can override
this property, or provide a class attribute to hide it.
"""
return type(self).__name__
def __call__(self, trainer):
"""Invokes the extension.
Implementations should override this operator. This method is called
at iterations which the corresponding trigger accepts.
Args:
trainer (Trainer): Trainer object that calls this operator.
"""
raise NotImplementedError(
'Extension implementation must override __call__.')
def __getattr__(self, name):
if name == 'invoke_before_training':
raise AttributeError(
'invoke_before_training has been removed since Chainer '
'v2.0.0. Use Extension.initialize instead.')
raise AttributeError('{} object has no attribute {}'.format(
type(self).__name__, name))
def finalize(self):
"""Finalizes the extension.
This method is called at the end of the training loop.
"""
pass
def initialize(self, trainer):
"""Initializes up the trainer state.
This method is called before entering the training loop. An extension
that modifies the state of :class:`~chainer.training.Trainer` can
override this method to initialize it.
When the trainer has been restored from a snapshot, this method has to
recover an appropriate part of the state of the trainer.
For example, :class:`~chainer.training.extensions.ExponentialShift`
extension changes the optimizer's hyperparameter at each invocation.
Note that the hyperparameter is not saved to the snapshot; it is the
responsibility of the extension to recover the hyperparameter.
The :class:`~chainer.training.extensions.ExponentialShift` extension
recovers it in its ``initialize`` method if it has been loaded from a
snapshot, or just setting the initial value otherwise.
Args:
trainer (Trainer): Trainer object that runs the training loop.
"""
pass
def on_error(self, trainer, exc, tb):
"""Handles the error raised during training before finalization.
This method is called when an exception is thrown during the
training loop, before finalize. An extension that needs
different error handling from finalize, can override this
method to handle errors.
Args:
trainer (Trainer): Trainer object that runs the training loop.
exc (Exception): arbitrary exception thrown during update loop.
tb (traceback): traceback object of the exception
"""
pass
def serialize(self, serializer):
"""Serializes the extension state.
It is called when a trainer that owns this extension is serialized. It
serializes nothing by default.
"""
pass
def make_extension(trigger=None, default_name=None, priority=None,
finalizer=None, initializer=None, on_error=None, **kwargs):
"""Decorator to make given functions into trainer extensions.
This decorator just adds some attributes to a given function. The value of
the attributes are given by the arguments of this decorator.
See :class:`Extension` for details of trainer extensions. Most of the
default values of arguments also follow those for this class.
Args:
trigger: Default trigger of the extension.
default_name: Default name of the extension. The name of a given
function is used by default.
priority (int): Default priority of the extension.
finalizer: Finalizer function of this extension. It is
called at the end of the training loop.
initializer: Initializer function of this extension. It is called at
the beginning of the training loop.
on_error: Error handler callback function of this extension. It is
called after an error is raised during the trainer loop.
"""
if kwargs:
msg = ('invoke_before_training has been removed since Chainer v2.0.0. '
'Use initializer= instead.')
argument.check_unexpected_kwargs(kwargs, invoke_before_training=msg)
argument.assert_kwargs_empty(kwargs)
if trigger is None:
trigger = Extension.trigger
if priority is None:
priority = Extension.priority
def decorator(ext):
ext.trigger = trigger
ext.default_name = default_name or ext.__name__
ext.priority = priority
ext.finalize = finalizer
ext.on_error = on_error
ext.initialize = initializer
return ext
return decorator
| mit | 4,676,938,823,934,928,000 | 36.852273 | 79 | 0.669168 | false |
The-end-novel/Freedom-Web | app/main/views.py | 1 | 9447 | from flask import render_template, redirect, url_for, abort, flash, request,\
current_app, make_response
from flask_login import login_required, current_user
from . import main
from .forms import EditProfileForm, EditProfileAdminForm, PostForm,\
CommentForm
from .. import db
from ..models import Permission, Role, User, Post, Comment
from ..decorators import admin_required, permission_required
@main.route('/', methods=['GET', 'POST'])
def index():
pic = ('jpg', 'png', 'jpeg', 'gif')
form = PostForm()
if current_user.can(Permission.WRITE_ARTICLES) and \
form.validate_on_submit():
if form.body.data.endswith(pic):
form.body.data = "<img src="+form.body.data+">"
post = Post(body=form.body.data,
author=current_user._get_current_object())
db.session.add(post)
return redirect(url_for('.index'))
page = request.args.get('page', 1, type=int)
show_followed = False
if current_user.is_authenticated:
show_followed = bool(request.cookies.get('show_followed', ''))
if show_followed:
query = current_user.followed_posts
else:
query = Post.query
pagination = query.order_by(Post.timestamp.desc()).paginate(
page, per_page=current_app.config['FLASKY_POSTS_PER_PAGE'],
error_out=False)
posts = pagination.items
return render_template('index.html', form=form, posts=posts,
show_followed=show_followed, pagination=pagination)
@main.route('/user/<username>')
def user(username):
user = User.query.filter_by(username=username).first_or_404()
page = request.args.get('page', 1, type=int)
pagination = user.posts.order_by(Post.timestamp.desc()).paginate(
page, per_page=current_app.config['FLASKY_POSTS_PER_PAGE'],
error_out=False)
posts = pagination.items
return render_template('user.html', user=user, posts=posts,
pagination=pagination)
@main.route('/edit-profile', methods=['GET', 'POST'])
@login_required
def edit_profile():
form = EditProfileForm()
if form.validate_on_submit():
current_user.name = form.name.data
current_user.location = form.location.data
current_user.about_me = form.about_me.data
db.session.add(current_user)
flash('Your profile has been updated.')
return redirect(url_for('.user', username=current_user.username))
form.name.data = current_user.name
form.location.data = current_user.location
form.about_me.data = current_user.about_me
return render_template('edit_profile.html', form=form)
@main.route('/edit-profile/<int:id>', methods=['GET', 'POST'])
@login_required
@admin_required
def edit_profile_admin(id):
user = User.query.get_or_404(id)
form = EditProfileAdminForm(user=user)
if form.validate_on_submit():
user.email = form.email.data
user.username = form.username.data
user.confirmed = form.confirmed.data
user.role = Role.query.get(form.role.data)
user.name = form.name.data
user.location = form.location.data
user.about_me = form.about_me.data
db.session.add(user)
flash('The profile has been updated.')
return redirect(url_for('.user', username=user.username))
form.email.data = user.email
form.username.data = user.username
form.confirmed.data = user.confirmed
form.role.data = user.role_id
form.name.data = user.name
form.location.data = user.location
form.about_me.data = user.about_me
return render_template('edit_profile.html', form=form, user=user)
@main.route('/post/<int:id>', methods=['GET', 'POST'])
def post(id):
post = Post.query.get_or_404(id)
form = CommentForm()
if form.validate_on_submit():
comment = Comment(body=form.body.data,
post=post,
author=current_user._get_current_object())
db.session.add(comment)
flash('Your comment has been published.')
return redirect(url_for('.post', id=post.id, page=-1))
page = request.args.get('page', 1, type=int)
if page == -1:
page = (post.comments.count() - 1) // \
current_app.config['FLASKY_COMMENTS_PER_PAGE'] + 1
pagination = post.comments.order_by(Comment.timestamp.asc()).paginate(
page, per_page=current_app.config['FLASKY_COMMENTS_PER_PAGE'],
error_out=False)
comments = pagination.items
return render_template('post.html', posts=[post], form=form,
comments=comments, pagination=pagination)
@main.route('/edit/<int:id>', methods=['GET', 'POST'])
@login_required
def edit(id):
pic = ('jpg', 'png', 'jpeg', 'gif')
post = Post.query.get_or_404(id)
if current_user != post.author and \
not current_user.can(Permission.ADMINISTER):
abort(403)
form = PostForm()
if form.validate_on_submit():
if form.body.data.endswith(pic):
form.body.data = "<img src="+form.body.data+">"
post.body = form.body.data
db.session.add(post)
flash('The post has been updated.')
return redirect(url_for('.post', id=post.id))
form.body.data = post.body
return render_template('edit_post.html', form=form)
@main.route('/follow/<username>')
@login_required
@permission_required(Permission.FOLLOW)
def follow(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash('Invalid user.')
return redirect(url_for('.index'))
if current_user.is_following(user):
flash('You are already following this user.')
return redirect(url_for('.user', username=username))
current_user.follow(user)
flash('You are now following %s.' % username)
return redirect(url_for('.user', username=username))
@main.route('/unfollow/<username>')
@login_required
@permission_required(Permission.FOLLOW)
def unfollow(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash('Invalid user.')
return redirect(url_for('.index'))
if not current_user.is_following(user):
flash('You are not following this user.')
return redirect(url_for('.user', username=username))
current_user.unfollow(user)
flash('You are not following %s anymore.' % username)
return redirect(url_for('.user', username=username))
@main.route('/followers/<username>')
def followers(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash('Invalid user.')
return redirect(url_for('.index'))
page = request.args.get('page', 1, type=int)
pagination = user.followers.paginate(
page, per_page=current_app.config['FLASKY_FOLLOWERS_PER_PAGE'],
error_out=False)
follows = [{'user': item.follower, 'timestamp': item.timestamp}
for item in pagination.items]
return render_template('followers.html', user=user, title="Followers of",
endpoint='.followers', pagination=pagination,
follows=follows)
@main.route('/followed-by/<username>')
def followed_by(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash('Invalid user.')
return redirect(url_for('.index'))
page = request.args.get('page', 1, type=int)
pagination = user.followed.paginate(
page, per_page=current_app.config['FLASKY_FOLLOWERS_PER_PAGE'],
error_out=False)
follows = [{'user': item.followed, 'timestamp': item.timestamp}
for item in pagination.items]
return render_template('followers.html', user=user, title="Followed by",
endpoint='.followed_by', pagination=pagination,
follows=follows)
@main.route('/all')
@login_required
def show_all():
resp = make_response(redirect(url_for('.index')))
resp.set_cookie('show_followed', '', max_age=30*24*60*60)
return resp
@main.route('/followed')
@login_required
def show_followed():
resp = make_response(redirect(url_for('.index')))
resp.set_cookie('show_followed', '1', max_age=30*24*60*60)
return resp
@main.route('/moderate')
@login_required
@permission_required(Permission.MODERATE_COMMENTS)
def moderate():
page = request.args.get('page', 1, type=int)
pagination = Comment.query.order_by(Comment.timestamp.desc()).paginate(
page, per_page=current_app.config['FLASKY_COMMENTS_PER_PAGE'],
error_out=False)
comments = pagination.items
return render_template('moderate.html', comments=comments,
pagination=pagination, page=page)
@main.route('/moderate/enable/<int:id>')
@login_required
@permission_required(Permission.MODERATE_COMMENTS)
def moderate_enable(id):
comment = Comment.query.get_or_404(id)
comment.disabled = False
db.session.add(comment)
return redirect(url_for('.moderate',
page=request.args.get('page', 1, type=int)))
@main.route('/moderate/disable/<int:id>')
@login_required
@permission_required(Permission.MODERATE_COMMENTS)
def moderate_disable(id):
comment = Comment.query.get_or_404(id)
comment.disabled = True
db.session.add(comment)
return redirect(url_for('.moderate',
page=request.args.get('page', 1, type=int)))
| mit | -5,424,992,443,400,326,000 | 36.339921 | 78 | 0.643379 | false |
nmercier/linux-cross-gcc | linux/lib/python2.7/lib-tk/Tkconstants.py | 375 | 1493 | # Symbolic constants for Tk
# Booleans
NO=FALSE=OFF=0
YES=TRUE=ON=1
# -anchor and -sticky
N='n'
S='s'
W='w'
E='e'
NW='nw'
SW='sw'
NE='ne'
SE='se'
NS='ns'
EW='ew'
NSEW='nsew'
CENTER='center'
# -fill
NONE='none'
X='x'
Y='y'
BOTH='both'
# -side
LEFT='left'
TOP='top'
RIGHT='right'
BOTTOM='bottom'
# -relief
RAISED='raised'
SUNKEN='sunken'
FLAT='flat'
RIDGE='ridge'
GROOVE='groove'
SOLID = 'solid'
# -orient
HORIZONTAL='horizontal'
VERTICAL='vertical'
# -tabs
NUMERIC='numeric'
# -wrap
CHAR='char'
WORD='word'
# -align
BASELINE='baseline'
# -bordermode
INSIDE='inside'
OUTSIDE='outside'
# Special tags, marks and insert positions
SEL='sel'
SEL_FIRST='sel.first'
SEL_LAST='sel.last'
END='end'
INSERT='insert'
CURRENT='current'
ANCHOR='anchor'
ALL='all' # e.g. Canvas.delete(ALL)
# Text widget and button states
NORMAL='normal'
DISABLED='disabled'
ACTIVE='active'
# Canvas state
HIDDEN='hidden'
# Menu item types
CASCADE='cascade'
CHECKBUTTON='checkbutton'
COMMAND='command'
RADIOBUTTON='radiobutton'
SEPARATOR='separator'
# Selection modes for list boxes
SINGLE='single'
BROWSE='browse'
MULTIPLE='multiple'
EXTENDED='extended'
# Activestyle for list boxes
# NONE='none' is also valid
DOTBOX='dotbox'
UNDERLINE='underline'
# Various canvas styles
PIESLICE='pieslice'
CHORD='chord'
ARC='arc'
FIRST='first'
LAST='last'
BUTT='butt'
PROJECTING='projecting'
ROUND='round'
BEVEL='bevel'
MITER='miter'
# Arguments to xview/yview
MOVETO='moveto'
SCROLL='scroll'
UNITS='units'
PAGES='pages'
| bsd-3-clause | -2,589,145,399,336,889,000 | 12.572727 | 42 | 0.711989 | false |
mrooney/metakv | website/metakv/test_helpers.py | 1 | 3165 | from django.utils import unittest
import django.test
from django.test.client import Client
from lxml import html
from cssselect import HTMLTranslator
class NotOkay(Exception):
def __init__(self, response):
Exception.__init__(self, "%r: %r" % (response.status_code, response))
self.response = response
self.status = response.status_code
class ExtendedTestCase(django.test.TestCase):
def after_setUp(self):
""" Override this to do extra setup. """
def before_tearDown(self):
""" Override this to do extra tear-down. """
def assertStatus(self, status, path, **kwargs):
try:
response = self.get(path, **kwargs)
except NotOkay, no:
response = no.response
self.assertEqual(status, response.status_code)
@classmethod
def get_client(cls, user=None):
client = Client()
if user:
assert client.login(username=user.username, password="foobar")
return client
@classmethod
def _http_verb(cls, verb, path, client=None, data=None, https=False, user=None, raise_errors=True, **kwargs):
data = data or {}
client = client or cls.get_client(user)
kwargs['HTTP_X_FORWARDED_PROTO'] = 'https' if https else 'http' # Simulates ELB
response = getattr(client, verb.lower())(path, data=data, **kwargs)
if raise_errors and response.status_code not in [200, 302]:
raise NotOkay(response)
return response
@classmethod
def get(cls, path, data=None, client=None, **kwargs):
data = data or {}
return cls._http_verb('get', path, client=client, **kwargs)
@classmethod
def post(cls, path, data=None, client=None, **kwargs):
data = data or {}
return cls._http_verb('post', path, data=data, client=client, **kwargs)
@classmethod
def _api_call(cls, path, data=None, client=None, method="post"):
data = data or {}
response = getattr(cls, method)(path,
data=util.dumps(data),
client=client,
content_type="application/json")
try:
content = util.loads(response.content)
except ValueError:
# Probably not a JSON response, so just return a string.
content = response.content
return content
@classmethod
def api_post(cls, *args, **kwargs):
return cls._api_call(*args, **kwargs)
def parse_response(self, response):
if isinstance(response, basestring):
return html.fromstring(response)
return html.fromstring(response.content)
def css_select(self, response, css_selector):
document = self.parse_response(response)
expression = HTMLTranslator().css_to_xpath(css_selector)
return document.xpath(expression)
def assertNumCssMatches(self, num, response, css_selector):
found = len(self.css_select(response, css_selector))
self.assertEqual(num, found, "Expected {0} but found {1}.".format(num, found))
| mit | -7,697,701,449,717,117,000 | 35.37931 | 113 | 0.603476 | false |
40223226/2015cdbg80420 | static/Brython3.1.1-20150328-091302/Lib/_thread.py | 740 | 4879 | """Drop-in replacement for the thread module.
Meant to be used as a brain-dead substitute so that threaded code does
not need to be rewritten for when the thread module is not present.
Suggested usage is::
try:
import _thread
except ImportError:
import _dummy_thread as _thread
"""
# Exports only things specified by thread documentation;
# skipping obsolete synonyms allocate(), start_new(), exit_thread().
__all__ = ['error', 'start_new_thread', 'exit', 'get_ident', 'allocate_lock',
'interrupt_main', 'LockType']
# A dummy value
TIMEOUT_MAX = 2**31
# NOTE: this module can be imported early in the extension building process,
# and so top level imports of other modules should be avoided. Instead, all
# imports are done when needed on a function-by-function basis. Since threads
# are disabled, the import lock should not be an issue anyway (??).
error = RuntimeError
def start_new_thread(function, args, kwargs={}):
"""Dummy implementation of _thread.start_new_thread().
Compatibility is maintained by making sure that ``args`` is a
tuple and ``kwargs`` is a dictionary. If an exception is raised
and it is SystemExit (which can be done by _thread.exit()) it is
caught and nothing is done; all other exceptions are printed out
by using traceback.print_exc().
If the executed function calls interrupt_main the KeyboardInterrupt will be
raised when the function returns.
"""
if type(args) != type(tuple()):
raise TypeError("2nd arg must be a tuple")
if type(kwargs) != type(dict()):
raise TypeError("3rd arg must be a dict")
global _main
_main = False
try:
function(*args, **kwargs)
except SystemExit:
pass
except:
import traceback
traceback.print_exc()
_main = True
global _interrupt
if _interrupt:
_interrupt = False
raise KeyboardInterrupt
def exit():
"""Dummy implementation of _thread.exit()."""
raise SystemExit
def get_ident():
"""Dummy implementation of _thread.get_ident().
Since this module should only be used when _threadmodule is not
available, it is safe to assume that the current process is the
only thread. Thus a constant can be safely returned.
"""
return -1
def allocate_lock():
"""Dummy implementation of _thread.allocate_lock()."""
return LockType()
def stack_size(size=None):
"""Dummy implementation of _thread.stack_size()."""
if size is not None:
raise error("setting thread stack size not supported")
return 0
class LockType(object):
"""Class implementing dummy implementation of _thread.LockType.
Compatibility is maintained by maintaining self.locked_status
which is a boolean that stores the state of the lock. Pickling of
the lock, though, should not be done since if the _thread module is
then used with an unpickled ``lock()`` from here problems could
occur from this class not having atomic methods.
"""
def __init__(self):
self.locked_status = False
def acquire(self, waitflag=None, timeout=-1):
"""Dummy implementation of acquire().
For blocking calls, self.locked_status is automatically set to
True and returned appropriately based on value of
``waitflag``. If it is non-blocking, then the value is
actually checked and not set if it is already acquired. This
is all done so that threading.Condition's assert statements
aren't triggered and throw a little fit.
"""
if waitflag is None or waitflag:
self.locked_status = True
return True
else:
if not self.locked_status:
self.locked_status = True
return True
else:
if timeout > 0:
import time
time.sleep(timeout)
return False
__enter__ = acquire
def __exit__(self, typ, val, tb):
self.release()
def release(self):
"""Release the dummy lock."""
# XXX Perhaps shouldn't actually bother to test? Could lead
# to problems for complex, threaded code.
if not self.locked_status:
raise error
self.locked_status = False
return True
def locked(self):
return self.locked_status
# Used to signal that interrupt_main was called in a "thread"
_interrupt = False
# True when not executing in a "thread"
_main = True
def interrupt_main():
"""Set _interrupt flag to True to have start_new_thread raise
KeyboardInterrupt upon exiting."""
if _main:
raise KeyboardInterrupt
else:
global _interrupt
_interrupt = True
# Brython-specific to avoid circular references between threading and _threading_local
class _local:
pass | gpl-3.0 | -8,655,068,441,595,852,000 | 30.483871 | 86 | 0.651363 | false |
perimosocordiae/scipy | scipy/signal/tests/test_savitzky_golay.py | 21 | 10203 | import numpy as np
from numpy.testing import (assert_allclose, assert_equal,
assert_almost_equal, assert_array_equal,
assert_array_almost_equal)
from scipy.ndimage import convolve1d
from scipy.signal import savgol_coeffs, savgol_filter
from scipy.signal._savitzky_golay import _polyder
def check_polyder(p, m, expected):
dp = _polyder(p, m)
assert_array_equal(dp, expected)
def test_polyder():
cases = [
([5], 0, [5]),
([5], 1, [0]),
([3, 2, 1], 0, [3, 2, 1]),
([3, 2, 1], 1, [6, 2]),
([3, 2, 1], 2, [6]),
([3, 2, 1], 3, [0]),
([[3, 2, 1], [5, 6, 7]], 0, [[3, 2, 1], [5, 6, 7]]),
([[3, 2, 1], [5, 6, 7]], 1, [[6, 2], [10, 6]]),
([[3, 2, 1], [5, 6, 7]], 2, [[6], [10]]),
([[3, 2, 1], [5, 6, 7]], 3, [[0], [0]]),
]
for p, m, expected in cases:
check_polyder(np.array(p).T, m, np.array(expected).T)
#--------------------------------------------------------------------
# savgol_coeffs tests
#--------------------------------------------------------------------
def alt_sg_coeffs(window_length, polyorder, pos):
"""This is an alternative implementation of the SG coefficients.
It uses numpy.polyfit and numpy.polyval. The results should be
equivalent to those of savgol_coeffs(), but this implementation
is slower.
window_length should be odd.
"""
if pos is None:
pos = window_length // 2
t = np.arange(window_length)
unit = (t == pos).astype(int)
h = np.polyval(np.polyfit(t, unit, polyorder), t)
return h
def test_sg_coeffs_trivial():
# Test a trivial case of savgol_coeffs: polyorder = window_length - 1
h = savgol_coeffs(1, 0)
assert_allclose(h, [1])
h = savgol_coeffs(3, 2)
assert_allclose(h, [0, 1, 0], atol=1e-10)
h = savgol_coeffs(5, 4)
assert_allclose(h, [0, 0, 1, 0, 0], atol=1e-10)
h = savgol_coeffs(5, 4, pos=1)
assert_allclose(h, [0, 0, 0, 1, 0], atol=1e-10)
h = savgol_coeffs(5, 4, pos=1, use='dot')
assert_allclose(h, [0, 1, 0, 0, 0], atol=1e-10)
def compare_coeffs_to_alt(window_length, order):
# For the given window_length and order, compare the results
# of savgol_coeffs and alt_sg_coeffs for pos from 0 to window_length - 1.
# Also include pos=None.
for pos in [None] + list(range(window_length)):
h1 = savgol_coeffs(window_length, order, pos=pos, use='dot')
h2 = alt_sg_coeffs(window_length, order, pos=pos)
assert_allclose(h1, h2, atol=1e-10,
err_msg=("window_length = %d, order = %d, pos = %s" %
(window_length, order, pos)))
def test_sg_coeffs_compare():
# Compare savgol_coeffs() to alt_sg_coeffs().
for window_length in range(1, 8, 2):
for order in range(window_length):
compare_coeffs_to_alt(window_length, order)
def test_sg_coeffs_exact():
polyorder = 4
window_length = 9
halflen = window_length // 2
x = np.linspace(0, 21, 43)
delta = x[1] - x[0]
# The data is a cubic polynomial. We'll use an order 4
# SG filter, so the filtered values should equal the input data
# (except within half window_length of the edges).
y = 0.5 * x ** 3 - x
h = savgol_coeffs(window_length, polyorder)
y0 = convolve1d(y, h)
assert_allclose(y0[halflen:-halflen], y[halflen:-halflen])
# Check the same input, but use deriv=1. dy is the exact result.
dy = 1.5 * x ** 2 - 1
h = savgol_coeffs(window_length, polyorder, deriv=1, delta=delta)
y1 = convolve1d(y, h)
assert_allclose(y1[halflen:-halflen], dy[halflen:-halflen])
# Check the same input, but use deriv=2. d2y is the exact result.
d2y = 3.0 * x
h = savgol_coeffs(window_length, polyorder, deriv=2, delta=delta)
y2 = convolve1d(y, h)
assert_allclose(y2[halflen:-halflen], d2y[halflen:-halflen])
def test_sg_coeffs_deriv():
# The data in `x` is a sampled parabola, so using savgol_coeffs with an
# order 2 or higher polynomial should give exact results.
i = np.array([-2.0, 0.0, 2.0, 4.0, 6.0])
x = i ** 2 / 4
dx = i / 2
d2x = np.full_like(i, 0.5)
for pos in range(x.size):
coeffs0 = savgol_coeffs(5, 3, pos=pos, delta=2.0, use='dot')
assert_allclose(coeffs0.dot(x), x[pos], atol=1e-10)
coeffs1 = savgol_coeffs(5, 3, pos=pos, delta=2.0, use='dot', deriv=1)
assert_allclose(coeffs1.dot(x), dx[pos], atol=1e-10)
coeffs2 = savgol_coeffs(5, 3, pos=pos, delta=2.0, use='dot', deriv=2)
assert_allclose(coeffs2.dot(x), d2x[pos], atol=1e-10)
def test_sg_coeffs_deriv_gt_polyorder():
"""
If deriv > polyorder, the coefficients should be all 0.
This is a regression test for a bug where, e.g.,
savgol_coeffs(5, polyorder=1, deriv=2)
raised an error.
"""
coeffs = savgol_coeffs(5, polyorder=1, deriv=2)
assert_array_equal(coeffs, np.zeros(5))
coeffs = savgol_coeffs(7, polyorder=4, deriv=6)
assert_array_equal(coeffs, np.zeros(7))
def test_sg_coeffs_large():
# Test that for large values of window_length and polyorder the array of
# coefficients returned is symmetric. The aim is to ensure that
# no potential numeric overflow occurs.
coeffs0 = savgol_coeffs(31, 9)
assert_array_almost_equal(coeffs0, coeffs0[::-1])
coeffs1 = savgol_coeffs(31, 9, deriv=1)
assert_array_almost_equal(coeffs1, -coeffs1[::-1])
#--------------------------------------------------------------------
# savgol_filter tests
#--------------------------------------------------------------------
def test_sg_filter_trivial():
""" Test some trivial edge cases for savgol_filter()."""
x = np.array([1.0])
y = savgol_filter(x, 1, 0)
assert_equal(y, [1.0])
# Input is a single value. With a window length of 3 and polyorder 1,
# the value in y is from the straight-line fit of (-1,0), (0,3) and
# (1, 0) at 0. This is just the average of the three values, hence 1.0.
x = np.array([3.0])
y = savgol_filter(x, 3, 1, mode='constant')
assert_almost_equal(y, [1.0], decimal=15)
x = np.array([3.0])
y = savgol_filter(x, 3, 1, mode='nearest')
assert_almost_equal(y, [3.0], decimal=15)
x = np.array([1.0] * 3)
y = savgol_filter(x, 3, 1, mode='wrap')
assert_almost_equal(y, [1.0, 1.0, 1.0], decimal=15)
def test_sg_filter_basic():
# Some basic test cases for savgol_filter().
x = np.array([1.0, 2.0, 1.0])
y = savgol_filter(x, 3, 1, mode='constant')
assert_allclose(y, [1.0, 4.0 / 3, 1.0])
y = savgol_filter(x, 3, 1, mode='mirror')
assert_allclose(y, [5.0 / 3, 4.0 / 3, 5.0 / 3])
y = savgol_filter(x, 3, 1, mode='wrap')
assert_allclose(y, [4.0 / 3, 4.0 / 3, 4.0 / 3])
def test_sg_filter_2d():
x = np.array([[1.0, 2.0, 1.0],
[2.0, 4.0, 2.0]])
expected = np.array([[1.0, 4.0 / 3, 1.0],
[2.0, 8.0 / 3, 2.0]])
y = savgol_filter(x, 3, 1, mode='constant')
assert_allclose(y, expected)
y = savgol_filter(x.T, 3, 1, mode='constant', axis=0)
assert_allclose(y, expected.T)
def test_sg_filter_interp_edges():
# Another test with low degree polynomial data, for which we can easily
# give the exact results. In this test, we use mode='interp', so
# savgol_filter should match the exact solution for the entire data set,
# including the edges.
t = np.linspace(-5, 5, 21)
delta = t[1] - t[0]
# Polynomial test data.
x = np.array([t,
3 * t ** 2,
t ** 3 - t])
dx = np.array([np.ones_like(t),
6 * t,
3 * t ** 2 - 1.0])
d2x = np.array([np.zeros_like(t),
np.full_like(t, 6),
6 * t])
window_length = 7
y = savgol_filter(x, window_length, 3, axis=-1, mode='interp')
assert_allclose(y, x, atol=1e-12)
y1 = savgol_filter(x, window_length, 3, axis=-1, mode='interp',
deriv=1, delta=delta)
assert_allclose(y1, dx, atol=1e-12)
y2 = savgol_filter(x, window_length, 3, axis=-1, mode='interp',
deriv=2, delta=delta)
assert_allclose(y2, d2x, atol=1e-12)
# Transpose everything, and test again with axis=0.
x = x.T
dx = dx.T
d2x = d2x.T
y = savgol_filter(x, window_length, 3, axis=0, mode='interp')
assert_allclose(y, x, atol=1e-12)
y1 = savgol_filter(x, window_length, 3, axis=0, mode='interp',
deriv=1, delta=delta)
assert_allclose(y1, dx, atol=1e-12)
y2 = savgol_filter(x, window_length, 3, axis=0, mode='interp',
deriv=2, delta=delta)
assert_allclose(y2, d2x, atol=1e-12)
def test_sg_filter_interp_edges_3d():
# Test mode='interp' with a 3-D array.
t = np.linspace(-5, 5, 21)
delta = t[1] - t[0]
x1 = np.array([t, -t])
x2 = np.array([t ** 2, 3 * t ** 2 + 5])
x3 = np.array([t ** 3, 2 * t ** 3 + t ** 2 - 0.5 * t])
dx1 = np.array([np.ones_like(t), -np.ones_like(t)])
dx2 = np.array([2 * t, 6 * t])
dx3 = np.array([3 * t ** 2, 6 * t ** 2 + 2 * t - 0.5])
# z has shape (3, 2, 21)
z = np.array([x1, x2, x3])
dz = np.array([dx1, dx2, dx3])
y = savgol_filter(z, 7, 3, axis=-1, mode='interp', delta=delta)
assert_allclose(y, z, atol=1e-10)
dy = savgol_filter(z, 7, 3, axis=-1, mode='interp', deriv=1, delta=delta)
assert_allclose(dy, dz, atol=1e-10)
# z has shape (3, 21, 2)
z = np.array([x1.T, x2.T, x3.T])
dz = np.array([dx1.T, dx2.T, dx3.T])
y = savgol_filter(z, 7, 3, axis=1, mode='interp', delta=delta)
assert_allclose(y, z, atol=1e-10)
dy = savgol_filter(z, 7, 3, axis=1, mode='interp', deriv=1, delta=delta)
assert_allclose(dy, dz, atol=1e-10)
# z has shape (21, 3, 2)
z = z.swapaxes(0, 1).copy()
dz = dz.swapaxes(0, 1).copy()
y = savgol_filter(z, 7, 3, axis=0, mode='interp', delta=delta)
assert_allclose(y, z, atol=1e-10)
dy = savgol_filter(z, 7, 3, axis=0, mode='interp', deriv=1, delta=delta)
assert_allclose(dy, dz, atol=1e-10)
| bsd-3-clause | -4,873,323,042,120,880,000 | 32.89701 | 77 | 0.555915 | false |
strobo-inc/pc-nrfutil | nordicsemi/utility/tests/__init__.py | 7 | 1579 | # Copyright (c) 2015, Nordic Semiconductor
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of Nordic Semiconductor ASA nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Package marker file."""
| bsd-3-clause | 5,783,532,005,324,216,000 | 53.448276 | 80 | 0.784041 | false |
jinzo27/infoGrabr | lib/cpp/scons/scons-local-2.0.0.final.0/SCons/__init__.py | 34 | 1629 | """SCons
The main package for the SCons software construction utility.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/__init__.py 5023 2010/06/14 22:05:46 scons"
__version__ = "2.0.0.final.0"
__build__ = "r5023"
__buildsys__ = "scons-dev"
__date__ = "2010/06/14 22:05:46"
__developer__ = "scons"
# make sure compatibility is always in place
import SCons.compat
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| mit | 2,683,697,884,442,261,500 | 32.244898 | 95 | 0.740331 | false |
stormbeard/pyvmomi-community-samples | samples/create_snapshot.py | 11 | 2756 | # Copyright 2015 Michael Rice <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import atexit
import requests
from pyVim.connect import SmartConnect, Disconnect
from tools import cli
requests.packages.urllib3.disable_warnings()
def setup_args():
parser = cli.build_arg_parser()
parser.add_argument('-j', '--uuid', required=True,
help="UUID of the VirtualMachine you want to find."
" If -i is not used BIOS UUID assumed.")
parser.add_argument('-i', '--instance', required=False,
action='store_true',
help="Flag to indicate the UUID is an instance UUID")
parser.add_argument('-d', '--description', required=False,
help="Description for the snapshot")
parser.add_argument('-n', '--name', required=True,
help="Name for the Snapshot")
my_args = parser.parse_args()
return cli.prompt_for_password(my_args)
args = setup_args()
si = None
instance_search = False
try:
si = SmartConnect(host=args.host,
user=args.user,
pwd=args.password,
port=int(args.port))
atexit.register(Disconnect, si)
except IOError:
pass
if not si:
raise SystemExit("Unable to connect to host with supplied info.")
if args.instance:
instance_search = True
vm = si.content.searchIndex.FindByUuid(None, args.uuid, True, instance_search)
if vm is None:
raise SystemExit("Unable to locate VirtualMachine.")
desc = None
if args.description:
desc = args.description
task = vm.CreateSnapshot_Task(name=args.name,
description=desc,
memory=True,
quiesce=False)
print("Snapshot Completed.")
del vm
vm = si.content.searchIndex.FindByUuid(None, args.uuid, True, instance_search)
snap_info = vm.snapshot
tree = snap_info.rootSnapshotList
while tree[0].childSnapshotList is not None:
print("Snap: {0} => {1}".format(tree[0].name, tree[0].description))
if len(tree[0].childSnapshotList) < 1:
break
tree = tree[0].childSnapshotList
| apache-2.0 | 7,761,043,029,955,407,000 | 31.423529 | 78 | 0.643687 | false |
trustedanalytics/platform-appstack | env_vars_fetcher/cdh_utilities.py | 1 | 15698 | try:
from sshtunnel import SSHTunnelForwarder
except ImportError:
from sshtunnel.sshtunnel import SSHTunnelForwarder
from cm_api.api_client import ApiResource, ApiException
from cm_api.endpoints.services import ApiService, ApiServiceSetupInfo
import paramiko
import json
import yaml
import requests
import subprocess
import zipfile
import shutil
import os
import logger
import base64
class CdhConfExtractor(object):
def __init__(self, config_filename=None):
self._logger = logger.get_info_logger(__name__)
self.config_filename = config_filename if config_filename else 'fetcher_config.yml'
config = self._load_config_yaml(self.config_filename)
self._hostname = config['machines']['cdh-launcher']['hostname']
self._hostport = config['machines']['cdh-launcher']['hostport']
self._username = config['machines']['cdh-launcher']['username']
self._key_filename = config['machines']['cdh-launcher']['key_filename']
self._key = os.path.expanduser(self._key_filename)
self._key_password = config['machines']['cdh-launcher']['key_password']
self._is_openstack = config['openstack_env']
self._is_kerberos = config['kerberos_used']
self._cdh_manager_ip = config['machines']['cdh-manager']['ip']
self._cdh_manager_user = config['machines']['cdh-manager']['user']
self._cdh_manager_sshtunnel_required = config['machines']['cdh-manager']['sshtunnel_required']
self._cdh_manager_password = config['machines']['cdh-manager']['password']
def __enter__(self):
extractor = self
try:
if self._cdh_manager_sshtunnel_required:
self._logger.info('Creating tunnel to CDH-Manager.')
extractor.create_tunnel_to_cdh_manager()
extractor.start_cdh_manager_tunneling()
self._logger.info('Tunnel to CDH-Manager has been created.')
else:
self._logger.info('Connection to CDH-Manager host without ssh tunnel.')
self._local_bind_address = self.extract_cdh_manager_host()
self._local_bind_port = 7180
return extractor
except Exception as exc:
self._logger.error('Cannot creating tunnel to CDH-Manager machine.')
raise exc
def __exit__(self, exc_type, exc_val, exc_tb):
try:
if self._cdh_manager_sshtunnel_required:
self.stop_cdh_manager_tunneling()
self._logger.info('Tunelling to CDH-Manager stopped.')
except Exception as exc:
self._logger.error('Cannot close tunnel to CDH-Manager machine.')
raise exc
# Cdh launcher methods
def create_ssh_connection(self, hostname, username, key_filename, key_password):
try:
self._logger.info('Creating connection to remote host {0}.'.format(hostname))
self.ssh_connection = paramiko.SSHClient()
self.ssh_connection.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.ssh_connection.connect(hostname, username=username, key_filename=key_filename, password=key_password)
self._logger.info('Connection to host {0} established.'.format(hostname))
except Exception as exc:
self._logger.error('Cannot creating connection to host {0} machine. Check your settings '
'in fetcher_config.yml file.'.format(hostname))
raise exc
def close_ssh_connection(self):
try:
self.ssh_connection.close()
self._logger.info('Connection to remote host closed.')
except Exception as exc:
self._logger.error('Cannot close connection to the remote host.')
raise exc
def ssh_call_command(self, command, subcommands=None):
self._logger.info('Calling remote command: "{0}" with subcommands "{1}"'.format(command, subcommands))
ssh_in, ssh_out, ssh_err = self.ssh_connection.exec_command(command, get_pty=True)
if subcommands != None:
for subcommand in subcommands:
ssh_in.write(subcommand + '\n')
ssh_in.flush()
return ssh_out.read() if ssh_out is not None else ssh_err.read()
def extract_cdh_manager_host(self):
self._logger.info('Extracting CDH-Manager address.')
if self._cdh_manager_ip is None:
self.create_ssh_connection(self._hostname, self._username, self._key_filename, self._key_password)
if self._is_openstack:
ansible_ini = self.ssh_call_command('cat ansible-cdh/platform-ansible/inventory/cdh')
else:
ansible_ini = self.ssh_call_command('cat ansible-cdh/inventory/cdh')
self._cdh_manager_ip = self._get_host_ip('cdh-manager', ansible_ini)
self.close_ssh_connection()
self._logger.info('CDH-Manager adress extracted: {}'.format(self._cdh_manager_ip))
return self._cdh_manager_ip
# Cdh manager methods
def create_tunnel_to_cdh_manager(self, local_bind_address='localhost', local_bind_port=7180, remote_bind_port=7180):
self._local_bind_address = local_bind_address
self._local_bind_port = local_bind_port
self.cdh_manager_tunnel = SSHTunnelForwarder(
(self._hostname, self._hostport),
ssh_username=self._username,
local_bind_address=(local_bind_address, local_bind_port),
remote_bind_address=(self.extract_cdh_manager_host(), remote_bind_port),
ssh_private_key_password=self._key_password,
ssh_private_key=self._key
)
def start_cdh_manager_tunneling(self):
try:
self.cdh_manager_tunnel.start()
except Exception as e:
self._logger.error('Cannot start tunnel: ' + e.message)
def stop_cdh_manager_tunneling(self):
try:
self.cdh_manager_tunnel.stop()
except Exception as e:
self._logger.error('Cannot stop tunnel: ' + e.message)
def extract_cdh_manager_details(self, settings):
for host in settings['hosts']:
if 'cdh-manager' in host['hostname']:
return host
def extract_nodes_info(self, name, settings):
nodes = []
for host in settings['hosts']:
if name in host['hostname']:
nodes.append(host)
return nodes
def extract_service_namenode(self, service_name, role_name, settings):
hdfs_service = self._find_item_by_attr_value(service_name, 'name', settings['clusters'][0]['services'])
hdfs_namenode = self._find_item_by_attr_value(role_name, 'name', hdfs_service['roles'])
host_id = hdfs_namenode['hostRef']['hostId']
return self._find_item_by_attr_value(host_id, 'hostId', settings['hosts'])['hostname']
def get_client_config_for_service(self, service_name):
result = requests.get('http://{0}:{1}/api/v10/clusters/CDH-cluster/services/{2}/clientConfig'.format(self._local_bind_address, self._local_bind_port, service_name))
return base64.standard_b64encode(result.content)
def generate_keytab(self, principal_name):
self._logger.info('Generating keytab for {} principal.'.format(principal_name))
self.create_ssh_connection(self._hostname, self._username, self._key_filename, self._key_password)
sftp = self.ssh_connection.open_sftp()
sftp.put('utils/generate_keytab_script.sh', '/tmp/generate_keytab_script.sh')
self.ssh_call_command('scp -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no /tmp/generate_keytab_script.sh {0}:/tmp/'.format(self._cdh_manager_ip))
self.ssh_call_command('ssh -t {0} -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no "chmod 700 /tmp/generate_keytab_script.sh"'.format(self._cdh_manager_ip))
keytab_hash = self.ssh_call_command('ssh -t {0} -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no "/tmp/generate_keytab_script.sh {1}"'
.format(self._cdh_manager_ip, principal_name))
self.close_ssh_connection()
lines = keytab_hash.splitlines()
self._logger.info('Keytab for {} principal has been generated.'.format(principal_name))
return '"{}"'.format(''.join(lines[2:-2]))
def generate_base64_for_file(self, file_path, hostname):
self._logger.info('Generating base64 for {} file.'.format(file_path))
self.create_ssh_connection(self._hostname, self._username, self._key_filename, self._key_password)
base64_file_hash = self.ssh_call_command('ssh -t {0} -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no "base64 {1}"'.format(hostname, file_path))
self.close_ssh_connection()
lines = base64_file_hash.splitlines()
self._logger.info('Base64 hash for {0} file on {1} machine has been generated.'.format(file_path, hostname))
return '"{}"'.format(''.join(lines[2:-2]))
def get_all_deployments_conf(self):
result = {}
deployments_settings = json.loads(requests.get('http://' + self._local_bind_address + ':'
+ str(self._local_bind_port) + '/api/v10/cm/deployment',
auth=(self._cdh_manager_user, self._cdh_manager_password)).content)
result['cloudera_manager_internal_host'] = self.extract_cdh_manager_details(deployments_settings)['hostname']
if self._is_kerberos:
result['kerberos_host'] = result['cloudera_manager_internal_host']
result['hdfs_keytab_value'] = self.generate_keytab('hdfs')
result['auth_gateway_keytab_value'] = self.generate_keytab('authgateway/sys')
result['hgm_keytab_value'] = self.generate_keytab('hgm/sys')
result['vcap_keytab_value'] = self.generate_keytab('vcap')
result['krb5_base64'] = self.generate_base64_for_file('/etc/krb5.conf', self._cdh_manager_ip)
result['kerberos_cacert'] = self.generate_base64_for_file('/var/krb5kdc/cacert.pem', self._cdh_manager_ip)
helper = CdhApiHelper(ApiResource(self._local_bind_address, username=self._cdh_manager_user, password=self._cdh_manager_password, version=9))
hgm_service = helper.get_service_from_cdh('HADOOPGROUPSMAPPING')
result['hgm_adress'] = 'http://' + helper.get_host(hgm_service, 'HADOOPGROUPSMAPPING-HADOOPGROUPSMAPPING_RESTSERVER') + ':' \
+ helper.get_entry_from_group(hgm_service, 'rest_port', 'HADOOPGROUPSMAPPING-HADOOPGROUPSMAPPING_RESTSERVER-BASE')
result['hgm_password'] = helper.get_entry_from_group(hgm_service, 'basic_auth_pass', 'HADOOPGROUPSMAPPING-HADOOPGROUPSMAPPING_RESTSERVER-BASE')
result['hgm_username'] = helper.get_entry_from_group(hgm_service, 'basic_auth_user', 'HADOOPGROUPSMAPPING-HADOOPGROUPSMAPPING_RESTSERVER-BASE')
sentry_service = helper.get_service_from_cdh('SENTRY')
result['sentry_port'] = helper.get_entry(sentry_service, 'sentry_service_server_rpc_port')
result['sentry_address'] = helper.get_host(sentry_service)
result['sentry_keytab_value'] = self.generate_keytab('hive/sys')
result['auth_gateway_profile'] = 'cloud,zookeeper-auth-gateway,hdfs-auth-gateway,kerberos-hgm-auth-gateway,sentry-auth-gateway'
else:
result['sentry_port'] = "''"
result['sentry_address'] = "''"
result['sentry_keytab_value'] = "''"
result['hdfs_keytab_value'] = "''"
result['auth_gateway_keytab_value'] = "''"
result['vcap_keytab_value'] = '""'
result['hgm_keytab_value'] = '""'
result['krb5_base64'] = '""'
result['kerberos_cacert'] = '""'
result['auth_gateway_profile'] = 'cloud,zookeeper-auth-gateway,hdfs-auth-gateway,https-hgm-auth-gateway'
helper = CdhApiHelper(ApiResource(self._local_bind_address, username=self._cdh_manager_user, password=self._cdh_manager_password, version=9))
hgm_service = helper.get_service_from_cdh('HADOOPGROUPSMAPPING')
result['hgm_adress'] = 'https://' + helper.get_host(hgm_service, 'HADOOPGROUPSMAPPING-HADOOPGROUPSMAPPING_RESTSERVER') + ':' \
+ helper.get_entry_from_group(hgm_service, 'rest_port', 'HADOOPGROUPSMAPPING-HADOOPGROUPSMAPPING_RESTSERVER-BASE')
result['hgm_password'] = helper.get_entry_from_group(hgm_service, 'basic_auth_pass', 'HADOOPGROUPSMAPPING-HADOOPGROUPSMAPPING_RESTSERVER-BASE')
result['hgm_username'] = helper.get_entry_from_group(hgm_service, 'basic_auth_user', 'HADOOPGROUPSMAPPING-HADOOPGROUPSMAPPING_RESTSERVER-BASE')
master_nodes = self.extract_nodes_info('cdh-master', deployments_settings)
for i, node in enumerate(master_nodes):
result['master_node_host_' + str(i+1)] = node['hostname']
result['namenode_internal_host'] = self.extract_service_namenode('HDFS', 'HDFS-NAMENODE', deployments_settings)
result['hue_node'] = self.extract_service_namenode('HUE', 'HUE-HUE_SERVER', deployments_settings)
result['h2o_node'] = self.extract_nodes_info('cdh-worker-0', deployments_settings)[0]['hostname']
result['arcadia_node'] = self.extract_nodes_info('cdh-worker-0', deployments_settings)[0]['hostname']
result['import_hadoop_conf_hdfs'] = self.get_client_config_for_service('HDFS')
result['import_hadoop_conf_hbase'] = self.get_client_config_for_service('HBASE')
result['import_hadoop_conf_yarn'] = self.get_client_config_for_service('YARN')
return result
# helpful methods
def _find_item_by_attr_value(self, attr_value, attr_name, array_with_dicts):
return next(item for item in array_with_dicts if item[attr_name] == attr_value)
def _get_host_ip(self, host, ansible_ini):
host_info = []
for line in ansible_ini.split('\n'):
if host in line:
host_info.append(line.strip())
return host_info[host_info.index('[' + host + ']') + 1].split(' ')[1].split('=')[1]
def _load_config_yaml(self, filename):
with open(filename, 'r') as stream:
return yaml.load(stream)
class CdhApiHelper(object):
def __init__(self, cdhApi):
self.cdhApi = cdhApi
def get_service_from_cdh(self, name):
cluster = self.cdhApi.get_all_clusters()[0]
try:
return next(service for service in cluster.get_all_services() if service.type == name)
except StopIteration:
raise NoCdhServiceError('No {} in CDH services.'.format(name))
# get host ip for service or specified service role
def get_host(self, service, role = None):
if role is None:
id = service.get_all_roles()[0].hostRef.hostId
else:
id = service.get_role(role).hostRef.hostId
return self.cdhApi.get_host(id).hostname
def get_entry(self, service, name):
config = service.get_all_roles()[0].get_config('full')
for config_entry in config:
if name == config_entry:
entry = config[config_entry].value or config[config_entry].default
return entry
def get_entry_from_group(self, service, name, group):
config = service.get_role_config_group(group).get_config('full')
for config_entry in config:
if name == config_entry:
entry = config[config_entry].value or config[config_entry].default
return entry
class NoCdhServiceError(Exception):
pass
| apache-2.0 | 1,212,905,699,057,743,400 | 53.318339 | 175 | 0.636005 | false |
richard-willowit/odoo | addons/l10n_fr_hr_payroll/report/fiche_paye.py | 14 | 1711 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, models
class FichePayeParser(models.AbstractModel):
_name = 'report.l10n_fr_hr_payroll.report_l10n_fr_fiche_paye'
def get_payslip_lines(self, objs):
res = []
ids = []
for item in objs:
if item.appears_on_payslip is True and not item.salary_rule_id.parent_rule_id:
ids.append(item.id)
if ids:
res = self.env['hr.payslip.line'].browse(ids)
return res
def get_total_by_rule_category(self, obj, code):
category_total = 0
category_id = self.env['hr.salary.rule.category'].search([('code', '=', code)], limit=1).id
if category_id:
line_ids = self.env['hr.payslip.line'].search([('slip_id', '=', obj.id), ('category_id', 'child_of', category_id)])
for line in line_ids:
category_total += line.total
return category_total
def get_employer_line(self, obj, parent_line):
return self.env['hr.payslip.line'].search([('slip_id', '=', obj.id), ('salary_rule_id.parent_rule_id.id', '=', parent_line.salary_rule_id.id)], limit=1)
@api.model
def get_report_values(self, docids, data=None):
payslip = self.env['hr.payslip'].browse(docids)
return {
'doc_ids': docids,
'doc_model': 'hr.payslip',
'data': data,
'docs': payslip,
'lang': "fr_FR",
'get_payslip_lines': self.get_payslip_lines,
'get_total_by_rule_category': self.get_total_by_rule_category,
'get_employer_line': self.get_employer_line,
}
| gpl-3.0 | -2,913,185,774,306,629,600 | 37.886364 | 160 | 0.575687 | false |
BT-ojossen/odoo | addons/mail/mail_group_menu.py | 334 | 2631 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2012-today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
from openerp.osv import osv
from openerp.osv import fields
class ir_ui_menu(osv.osv):
""" Override of ir.ui.menu class. When adding mail_thread module, each
new mail.group will create a menu entry. This overrides checks that
the current user is in the mail.group followers. If not, the menu
entry is taken off the list of menu ids. This way the user will see
menu entries for the mail.group he is following.
"""
_inherit = 'ir.ui.menu'
_columns = {
'mail_group_id': fields.many2one('mail.group', 'Mail Group')
}
def search(self, cr, uid, args, offset=0, limit=None, order=None, context=None, count=False):
""" Remove mail.group menu entries when the user is not a follower."""
ids = super(ir_ui_menu, self).search(cr, uid, args, offset=offset,
limit=limit, order=order,
context=context, count=False)
if ids:
cr.execute("""
SELECT id FROM ir_ui_menu m
WHERE m.mail_group_id IS NULL OR EXISTS (
SELECT 1 FROM mail_followers
WHERE res_model = 'mail.group' AND res_id = m.mail_group_id
AND partner_id = (SELECT partner_id FROM res_users WHERE id = %s)
) AND id in %s
""", (uid, tuple(ids)))
# Preserve original search order
visible_ids = set(x[0] for x in cr.fetchall())
ids = [i for i in ids if i in visible_ids]
if count:
return len(ids)
return ids
| agpl-3.0 | 810,033,660,480,252,000 | 45.157895 | 97 | 0.569745 | false |
synergeticsedx/deployment-wipro | lms/djangoapps/commerce/api/v1/views.py | 60 | 2633 | """ API v1 views. """
import logging
from django.http import Http404
from edx_rest_api_client import exceptions
from rest_framework.authentication import SessionAuthentication
from rest_framework.views import APIView
from rest_framework.generics import RetrieveUpdateAPIView, ListAPIView
from rest_framework.permissions import IsAuthenticated
from rest_framework_oauth.authentication import OAuth2Authentication
from commerce.api.v1.models import Course
from commerce.api.v1.permissions import ApiKeyOrModelPermission
from commerce.api.v1.serializers import CourseSerializer
from course_modes.models import CourseMode
from openedx.core.djangoapps.commerce.utils import ecommerce_api_client
from openedx.core.lib.api.mixins import PutAsCreateMixin
from util.json_request import JsonResponse
log = logging.getLogger(__name__)
class CourseListView(ListAPIView):
""" List courses and modes. """
authentication_classes = (OAuth2Authentication, SessionAuthentication,)
permission_classes = (IsAuthenticated,)
serializer_class = CourseSerializer
pagination_class = None
def get_queryset(self):
return list(Course.iterator())
class CourseRetrieveUpdateView(PutAsCreateMixin, RetrieveUpdateAPIView):
""" Retrieve, update, or create courses/modes. """
lookup_field = 'id'
lookup_url_kwarg = 'course_id'
model = CourseMode
authentication_classes = (OAuth2Authentication, SessionAuthentication,)
permission_classes = (ApiKeyOrModelPermission,)
serializer_class = CourseSerializer
# Django Rest Framework v3 requires that we provide a queryset.
# Note that we're overriding `get_object()` below to return a `Course`
# rather than a CourseMode, so this isn't really used.
queryset = CourseMode.objects.all()
def get_object(self, queryset=None):
course_id = self.kwargs.get(self.lookup_url_kwarg)
course = Course.get(course_id)
if course:
return course
raise Http404
def pre_save(self, obj):
# There is nothing to pre-save. The default behavior changes the Course.id attribute from
# a CourseKey to a string, which is not desired.
pass
class OrderView(APIView):
""" Retrieve order details. """
authentication_classes = (SessionAuthentication,)
permission_classes = (IsAuthenticated,)
def get(self, request, number):
""" HTTP handler. """
try:
order = ecommerce_api_client(request.user).orders(number).get()
return JsonResponse(order)
except exceptions.HttpNotFoundError:
return JsonResponse(status=404)
| agpl-3.0 | -2,213,388,010,154,755,300 | 34.106667 | 97 | 0.731865 | false |
tombstone/models | research/fivo/fivo/models/base.py | 4 | 14517 | # Copyright 2018 The TensorFlow Authors All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Reusable model classes for FIVO."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sonnet as snt
import tensorflow as tf
from fivo import nested_utils as nested
tfd = tf.contrib.distributions
class ELBOTrainableSequenceModel(object):
"""An abstract class for ELBO-trainable sequence models to extend.
Because the ELBO, IWAE, and FIVO bounds all accept the same arguments,
any model that is ELBO-trainable is also IWAE- and FIVO-trainable.
"""
def zero_state(self, batch_size, dtype):
"""Returns the initial state of the model as a Tensor or tuple of Tensors.
Args:
batch_size: The batch size.
dtype: The datatype to use for the state.
"""
raise NotImplementedError("zero_state not yet implemented.")
def set_observations(self, observations, seq_lengths):
"""Sets the observations for the model.
This method provides the model with all observed variables including both
inputs and targets. It will be called before running any computations with
the model that require the observations, e.g. training the model or
computing bounds, and should be used to run any necessary preprocessing
steps.
Args:
observations: A potentially nested set of Tensors containing
all observations for the model, both inputs and targets. Typically
a set of Tensors with shape [max_seq_len, batch_size, data_size].
seq_lengths: A [batch_size] Tensor of ints encoding the length of each
sequence in the batch (sequences can be padded to a common length).
"""
self.observations = observations
self.max_seq_len = tf.reduce_max(seq_lengths)
self.observations_ta = nested.tas_for_tensors(
observations, self.max_seq_len, clear_after_read=False)
self.seq_lengths = seq_lengths
def propose_and_weight(self, state, t):
"""Propogates model state one timestep and computes log weights.
This method accepts the current state of the model and computes the state
for the next timestep as well as the incremental log weight of each
element in the batch.
Args:
state: The current state of the model.
t: A scalar integer Tensor representing the current timestep.
Returns:
next_state: The state of the model after one timestep.
log_weights: A [batch_size] Tensor containing the incremental log weights.
"""
raise NotImplementedError("propose_and_weight not yet implemented.")
DEFAULT_INITIALIZERS = {"w": tf.contrib.layers.xavier_initializer(),
"b": tf.zeros_initializer()}
class ConditionalNormalDistribution(object):
"""A Normal distribution conditioned on Tensor inputs via a fc network."""
def __init__(self, size, hidden_layer_sizes, sigma_min=0.0,
raw_sigma_bias=0.25, hidden_activation_fn=tf.nn.relu,
initializers=None, name="conditional_normal_distribution"):
"""Creates a conditional Normal distribution.
Args:
size: The dimension of the random variable.
hidden_layer_sizes: The sizes of the hidden layers of the fully connected
network used to condition the distribution on the inputs.
sigma_min: The minimum standard deviation allowed, a scalar.
raw_sigma_bias: A scalar that is added to the raw standard deviation
output from the fully connected network. Set to 0.25 by default to
prevent standard deviations close to 0.
hidden_activation_fn: The activation function to use on the hidden layers
of the fully connected network.
initializers: The variable intitializers to use for the fully connected
network. The network is implemented using snt.nets.MLP so it must
be a dictionary mapping the keys 'w' and 'b' to the initializers for
the weights and biases. Defaults to xavier for the weights and zeros
for the biases when initializers is None.
name: The name of this distribution, used for sonnet scoping.
"""
self.sigma_min = sigma_min
self.raw_sigma_bias = raw_sigma_bias
self.name = name
self.size = size
if initializers is None:
initializers = DEFAULT_INITIALIZERS
self.fcnet = snt.nets.MLP(
output_sizes=hidden_layer_sizes + [2*size],
activation=hidden_activation_fn,
initializers=initializers,
activate_final=False,
use_bias=True,
name=name + "_fcnet")
def condition(self, tensor_list, **unused_kwargs):
"""Computes the parameters of a normal distribution based on the inputs."""
inputs = tf.concat(tensor_list, axis=1)
outs = self.fcnet(inputs)
mu, sigma = tf.split(outs, 2, axis=1)
sigma = tf.maximum(tf.nn.softplus(sigma + self.raw_sigma_bias),
self.sigma_min)
return mu, sigma
def __call__(self, *args, **kwargs):
"""Creates a normal distribution conditioned on the inputs."""
mu, sigma = self.condition(args, **kwargs)
return tf.contrib.distributions.Normal(loc=mu, scale=sigma)
class ConditionalBernoulliDistribution(object):
"""A Bernoulli distribution conditioned on Tensor inputs via a fc net."""
def __init__(self, size, hidden_layer_sizes, hidden_activation_fn=tf.nn.relu,
initializers=None, bias_init=0.0,
name="conditional_bernoulli_distribution"):
"""Creates a conditional Bernoulli distribution.
Args:
size: The dimension of the random variable.
hidden_layer_sizes: The sizes of the hidden layers of the fully connected
network used to condition the distribution on the inputs.
hidden_activation_fn: The activation function to use on the hidden layers
of the fully connected network.
initializers: The variable intiializers to use for the fully connected
network. The network is implemented using snt.nets.MLP so it must
be a dictionary mapping the keys 'w' and 'b' to the initializers for
the weights and biases. Defaults to xavier for the weights and zeros
for the biases when initializers is None.
bias_init: A scalar or vector Tensor that is added to the output of the
fully-connected network that parameterizes the mean of this
distribution.
name: The name of this distribution, used for sonnet scoping.
"""
self.bias_init = bias_init
self.size = size
if initializers is None:
initializers = DEFAULT_INITIALIZERS
self.fcnet = snt.nets.MLP(
output_sizes=hidden_layer_sizes + [size],
activation=hidden_activation_fn,
initializers=initializers,
activate_final=False,
use_bias=True,
name=name + "_fcnet")
def condition(self, tensor_list):
"""Computes the p parameter of the Bernoulli distribution."""
inputs = tf.concat(tensor_list, axis=1)
return self.fcnet(inputs) + self.bias_init
def __call__(self, *args):
p = self.condition(args)
return tf.contrib.distributions.Bernoulli(logits=p)
class NormalApproximatePosterior(ConditionalNormalDistribution):
"""A Normally-distributed approx. posterior with res_q parameterization."""
def __init__(self, size, hidden_layer_sizes, sigma_min=0.0,
raw_sigma_bias=0.25, hidden_activation_fn=tf.nn.relu,
initializers=None, smoothing=False,
name="conditional_normal_distribution"):
super(NormalApproximatePosterior, self).__init__(
size, hidden_layer_sizes, sigma_min=sigma_min,
raw_sigma_bias=raw_sigma_bias,
hidden_activation_fn=hidden_activation_fn, initializers=initializers,
name=name)
self.smoothing = smoothing
def condition(self, tensor_list, prior_mu, smoothing_tensors=None):
"""Generates the mean and variance of the normal distribution.
Args:
tensor_list: The list of Tensors to condition on. Will be concatenated and
fed through a fully connected network.
prior_mu: The mean of the prior distribution associated with this
approximate posterior. Will be added to the mean produced by
this approximate posterior, in res_q fashion.
smoothing_tensors: A list of Tensors. If smoothing is True, these Tensors
will be concatenated with the tensors in tensor_list.
Returns:
mu: The mean of the approximate posterior.
sigma: The standard deviation of the approximate posterior.
"""
if self.smoothing:
tensor_list.extend(smoothing_tensors)
mu, sigma = super(NormalApproximatePosterior, self).condition(tensor_list)
return mu + prior_mu, sigma
class NonstationaryLinearDistribution(object):
"""A set of loc-scale distributions that are linear functions of inputs.
This class defines a series of location-scale distributions such that
the means are learnable linear functions of the inputs and the log variances
are learnable constants. The functions and log variances are different across
timesteps, allowing the distributions to be nonstationary.
"""
def __init__(self,
num_timesteps,
inputs_per_timestep=None,
outputs_per_timestep=None,
initializers=None,
variance_min=0.0,
output_distribution=tfd.Normal,
dtype=tf.float32):
"""Creates a NonstationaryLinearDistribution.
Args:
num_timesteps: The number of timesteps, i.e. the number of distributions.
inputs_per_timestep: A list of python ints, the dimension of inputs to the
linear function at each timestep. If not provided, the dimension at each
timestep is assumed to be 1.
outputs_per_timestep: A list of python ints, the dimension of the output
distribution at each timestep. If not provided, the dimension at each
timestep is assumed to be 1.
initializers: A dictionary containing intializers for the variables. The
initializer under the key 'w' is used for the weights in the linear
function and the initializer under the key 'b' is used for the biases.
Defaults to xavier initialization for the weights and zeros for the
biases.
variance_min: Python float, the minimum variance of each distribution.
output_distribution: A locatin-scale subclass of tfd.Distribution that
defines the output distribution, e.g. Normal.
dtype: The dtype of the weights and biases.
"""
if not initializers:
initializers = DEFAULT_INITIALIZERS
if not inputs_per_timestep:
inputs_per_timestep = [1] * num_timesteps
if not outputs_per_timestep:
outputs_per_timestep = [1] * num_timesteps
self.num_timesteps = num_timesteps
self.variance_min = variance_min
self.initializers = initializers
self.dtype = dtype
self.output_distribution = output_distribution
def _get_variables_ta(shapes, name, initializer, trainable=True):
"""Creates a sequence of variables and stores them in a TensorArray."""
# Infer shape if all shapes are equal.
first_shape = shapes[0]
infer_shape = all(shape == first_shape for shape in shapes)
ta = tf.TensorArray(
dtype=dtype, size=len(shapes), dynamic_size=False,
clear_after_read=False, infer_shape=infer_shape)
for t, shape in enumerate(shapes):
var = tf.get_variable(
name % t, shape=shape, initializer=initializer, trainable=trainable)
ta = ta.write(t, var)
return ta
bias_shapes = [[num_outputs] for num_outputs in outputs_per_timestep]
self.log_variances = _get_variables_ta(
bias_shapes, "proposal_log_variance_%d", initializers["b"])
self.mean_biases = _get_variables_ta(
bias_shapes, "proposal_b_%d", initializers["b"])
weight_shapes = zip(inputs_per_timestep, outputs_per_timestep)
self.mean_weights = _get_variables_ta(
weight_shapes, "proposal_w_%d", initializers["w"])
self.shapes = tf.TensorArray(
dtype=tf.int32, size=num_timesteps,
dynamic_size=False, clear_after_read=False).unstack(weight_shapes)
def __call__(self, t, inputs):
"""Computes the distribution at timestep t.
Args:
t: Scalar integer Tensor, the current timestep. Must be in
[0, num_timesteps).
inputs: The inputs to the linear function parameterizing the mean of
the current distribution. A Tensor of shape [batch_size, num_inputs_t].
Returns:
A tfd.Distribution subclass representing the distribution at timestep t.
"""
b = self.mean_biases.read(t)
w = self.mean_weights.read(t)
shape = self.shapes.read(t)
w = tf.reshape(w, shape)
b = tf.reshape(b, [shape[1], 1])
log_variance = self.log_variances.read(t)
scale = tf.sqrt(tf.maximum(tf.exp(log_variance), self.variance_min))
loc = tf.matmul(w, inputs, transpose_a=True) + b
return self.output_distribution(loc=loc, scale=scale)
def encode_all(inputs, encoder):
"""Encodes a timeseries of inputs with a time independent encoder.
Args:
inputs: A [time, batch, feature_dimensions] tensor.
encoder: A network that takes a [batch, features_dimensions] input and
encodes the input.
Returns:
A [time, batch, encoded_feature_dimensions] output tensor.
"""
input_shape = tf.shape(inputs)
num_timesteps, batch_size = input_shape[0], input_shape[1]
reshaped_inputs = tf.reshape(inputs, [-1, inputs.shape[-1]])
inputs_encoded = encoder(reshaped_inputs)
inputs_encoded = tf.reshape(inputs_encoded,
[num_timesteps, batch_size, encoder.output_size])
return inputs_encoded
def ta_for_tensor(x, **kwargs):
"""Creates a TensorArray for the input tensor."""
return tf.TensorArray(
x.dtype, tf.shape(x)[0], dynamic_size=False, **kwargs).unstack(x)
| apache-2.0 | -8,618,805,646,749,898,000 | 41.447368 | 80 | 0.68871 | false |
rhdedgar/openshift-tools | openshift/installer/vendored/openshift-ansible-3.6.173/roles/lib_openshift/src/class/oc_serviceaccount_secret.py | 66 | 4640 | # pylint: skip-file
# flake8: noqa
class OCServiceAccountSecret(OpenShiftCLI):
''' Class to wrap the oc command line tools '''
kind = 'sa'
def __init__(self, config, verbose=False):
''' Constructor for OpenshiftOC '''
super(OCServiceAccountSecret, self).__init__(config.namespace, kubeconfig=config.kubeconfig, verbose=verbose)
self.config = config
self.verbose = verbose
self._service_account = None
@property
def service_account(self):
''' Property for the service account '''
if not self._service_account:
self.get()
return self._service_account
@service_account.setter
def service_account(self, data):
''' setter for the service account '''
self._service_account = data
def exists(self, in_secret):
''' verifies if secret exists in the service account '''
result = self.service_account.find_secret(in_secret)
if not result:
return False
return True
def get(self):
''' get the service account definition from the master '''
sao = self._get(OCServiceAccountSecret.kind, self.config.name)
if sao['returncode'] == 0:
self.service_account = ServiceAccount(content=sao['results'][0])
sao['results'] = self.service_account.get('secrets')
return sao
def delete(self):
''' delete secrets '''
modified = []
for rem_secret in self.config.secrets:
modified.append(self.service_account.delete_secret(rem_secret))
if any(modified):
return self._replace_content(OCServiceAccountSecret.kind, self.config.name, self.service_account.yaml_dict)
return {'returncode': 0, 'changed': False}
def put(self):
''' place secrets into sa '''
modified = False
for add_secret in self.config.secrets:
if not self.service_account.find_secret(add_secret):
self.service_account.add_secret(add_secret)
modified = True
if modified:
return self._replace_content(OCServiceAccountSecret.kind, self.config.name, self.service_account.yaml_dict)
return {'returncode': 0, 'changed': False}
@staticmethod
# pylint: disable=too-many-return-statements,too-many-branches
# TODO: This function should be refactored into its individual parts.
def run_ansible(params, check_mode):
''' run the ansible idempotent code '''
sconfig = ServiceAccountConfig(params['service_account'],
params['namespace'],
params['kubeconfig'],
[params['secret']],
None)
oc_sa_sec = OCServiceAccountSecret(sconfig, verbose=params['debug'])
state = params['state']
api_rval = oc_sa_sec.get()
#####
# Get
#####
if state == 'list':
return {'changed': False, 'results': api_rval['results'], 'state': "list"}
########
# Delete
########
if state == 'absent':
if oc_sa_sec.exists(params['secret']):
if check_mode:
return {'changed': True, 'msg': 'Would have removed the " + \
"secret from the service account.'}
api_rval = oc_sa_sec.delete()
return {'changed': True, 'results': api_rval, 'state': "absent"}
return {'changed': False, 'state': "absent"}
if state == 'present':
########
# Create
########
if not oc_sa_sec.exists(params['secret']):
if check_mode:
return {'changed': True, 'msg': 'Would have added the ' + \
'secret to the service account.'}
# Create it here
api_rval = oc_sa_sec.put()
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval}
# return the created object
api_rval = oc_sa_sec.get()
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval}
return {'changed': True, 'results': api_rval, 'state': "present"}
return {'changed': False, 'results': api_rval, 'state': "present"}
return {'failed': True,
'changed': False,
'msg': 'Unknown state passed. %s' % state,
'state': 'unknown'}
| apache-2.0 | 8,433,338,444,175,442,000 | 32.623188 | 119 | 0.531466 | false |
toontownfunserver/Panda3D-1.9.0 | direct/leveleditor/AnimMgrBase.py | 3 | 17489 | """
Defines AnimMgrBase
"""
import os, wx, math
from direct.interval.IntervalGlobal import *
from panda3d.core import VBase3,VBase4
import ObjectGlobals as OG
import AnimGlobals as AG
class AnimMgrBase:
""" AnimMgr will create, manage, update animations in the scene """
def __init__(self, editor):
self.editor = editor
self.graphEditorCounter = 0
self.keyFramesInfo = {}
self.curveAnimation = {}
#normal properties
self.lerpFuncs={
'H' : self.lerpFuncH,
'P' : self.lerpFuncP,
'R' : self.lerpFuncR,
'SX' : self.lerpFuncSX,
'SY' : self.lerpFuncSY,
'SZ' : self.lerpFuncSZ,
'CR' : self.lerpFuncCR,
'CG' : self.lerpFuncCG,
'CB' : self.lerpFuncCB,
'CA' : self.lerpFuncCA
}
#Properties which has animation curves
self.curveLerpFuncs={
'X' : [ self.lerpFuncX, self.lerpCurveFuncX ],
'Y' : [ self.lerpFuncY, self.lerpCurveFuncY ],
'Z' : [ self.lerpFuncZ, self.lerpCurveFuncZ ]
}
def reset(self):
self.keyFramesInfo = {}
self.curveAnimation = {}
def generateKeyFrames(self):
#generate keyFrame list
self.keyFrames = []
for property in self.keyFramesInfo.keys():
for frameInfo in self.keyFramesInfo[property]:
frame = frameInfo[AG.FRAME]
exist = False
for keyFrame in self.keyFrames:
if frame == keyFrame:
exist = True
break
if exist == False:
self.keyFrames.append(frame)
def generateSlope(self, list):
#generate handler slope of every keyframe for animation curve
listLen = len(list)
if listLen == 2:
slope =[float(list[1][AG.FRAME]-list[0][AG.FRAME]),(float(list[1][AG.VALUE])-float(list[0][AG.VALUE]))]
list[0][AG.INSLOPE] = slope
list[1][AG.INSLOPE] = slope
list[0][AG.OUTSLOPE] = list[0][AG.INSLOPE]
list[1][AG.OUTSLOPE] = list[1][AG.INSLOPE]
return
if listLen >= 3:
list[0][AG.INSLOPE] = [float(list[1][AG.FRAME] - list[0][AG.FRAME]),(float(list[1][AG.VALUE]) - float(list[0][AG.VALUE]))]
list[0][AG.OUTSLOPE] = list[0][AG.INSLOPE]
for i in range(1, listLen-1):
list[i][AG.INSLOPE] = [float(list[i+1][AG.FRAME] - list[i-1][AG.FRAME]),(float(list[i+1][AG.VALUE]) - float(list[i-1][AG.VALUE]))]
list[i][AG.OUTSLOPE] = list[i][AG.INSLOPE]
list[listLen-1][AG.INSLOPE] = [float(list[listLen-1][AG.FRAME] - list[listLen-2][AG.FRAME]),(float(list[listLen-1][AG.VALUE]) - float(list[listLen-2][AG.VALUE]))]
list[listLen-1][AG.OUTSLOPE] = list[listLen-1][AG.INSLOPE]
return
def removeAnimInfo(self, uid):
for property in self.keyFramesInfo.keys():
if property[AG.UID] == uid:
del self.keyFramesInfo[property]
self.generateKeyFrames()
if self.editor.mode == self.editor.ANIM_MODE:
self.editor.ui.animUI.OnPropKey()
def singleCurveAnimation(self, nodePath, curve, time):
rope = curve[OG.OBJ_NP]
self.points = rope.getPoints(time)
self.hprs = []
temp = render.attachNewNode("temp")
temp.setHpr(0,0,0)
for i in range(len(self.points)-1):
temp.setPos(self.points[i])
temp.lookAt(self.points[i+1])
hpr = temp.getHpr()
## self.hprs.append(hpr)
self.hprs.append(VBase3(hpr[0]+180,hpr[1],hpr[2]))
self.hprs.append(self.hprs[len(self.points)-2])
curveSequenceName = str(nodePath[OG.OBJ_UID])+' '+str(curve[OG.OBJ_UID])+' '+str(time)
self.curveSequence = Sequence(name = curveSequenceName)
for i in range(len(self.points)-1):
myLerp = LerpPosHprInterval(nodePath[OG.OBJ_NP], float(1)/float(24), self.points[i+1], self.hprs[i+1], self.points[i], self.hprs[i])
self.curveSequence.append(myLerp)
return self.curveSequence
def createParallel(self, startFrame, endFrame):
self.parallel = []
self.parallel = Parallel(name="Current Parallel")
self.createCurveAnimation(self.parallel)
self.createActorAnimation(self.parallel, startFrame, endFrame)
self.createKeyFrameAnimation(self.parallel, startFrame, endFrame)
self.createCurveKeyFrameAnimation(self.parallel, startFrame, endFrame)
return self.parallel
def createCurveAnimation(self, parallel):
for key in self.curveAnimation:
curveInfo = self.curveAnimation[key]
nodePath = self.editor.objectMgr.findObjectById(curveInfo[AG.NODE])
curve = self.editor.objectMgr.findObjectById(curveInfo[AG.CURVE])
time = curveInfo[AG.TIME]
sequence = self.singleCurveAnimation(nodePath, curve, time)
parallel.append(sequence)
def createActorAnimation(self, parallel, startFrame, endFrame):
self.editor.objectMgr.findActors(render)
for actor in self.editor.objectMgr.Actor:
actorAnim = os.path.basename(actor[OG.OBJ_ANIM])
myInterval = ActorInterval(actor[OG.OBJ_NP], actorAnim, loop=1, duration = float(endFrame-startFrame+1)/float(24))
parallel.append(myInterval)
def createKeyFrameAnimation(self, parallel, startFrame, endFrame):
#generate key frame animation for normal property
self.editor.objectMgr.findNodes(render)
for node in self.editor.objectMgr.Nodes:
for property in self.keyFramesInfo.keys():
if property[AG.UID] == node[OG.OBJ_UID] and property[AG.PROP_NAME] != 'X' and property[AG.PROP_NAME] != 'Y' and property[AG.PROP_NAME] != 'Z':
mysequence = Sequence(name = node[OG.OBJ_UID])
keyFramesInfo = self.keyFramesInfo[property]
if len(keyFramesInfo) == 1:
myLerp = LerpFunc(self.lerpFuncs[property[AG.PROP_NAME]],fromData=float(keyFramesInfo[0][AG.VALUE]),toData=float(keyFramesInfo[0][AG.VALUE]),duration = float(endFrame-startFrame)/float(24),blendType = 'noBlend',extraArgs = [node[OG.OBJ_NP]])
mysequence.append(myLerp)
parallel.append(mysequence)
if len(keyFramesInfo) != 1:
myLerp = LerpFunc(self.lerpFuncs[property[AG.PROP_NAME]],fromData=float(keyFramesInfo[0][AG.VALUE]),toData=float(keyFramesInfo[0][AG.VALUE]),duration = float(keyFramesInfo[0][AG.FRAME]-startFrame)/float(24),blendType = 'noBlend',extraArgs = [node[OG.OBJ_NP]])
mysequence.append(myLerp)
for key in range(0,len(keyFramesInfo)-1):
myLerp = LerpFunc(self.lerpFuncs[property[AG.PROP_NAME]],fromData=float(keyFramesInfo[key][AG.VALUE]),toData=float(keyFramesInfo[key+1][AG.VALUE]),duration = float(keyFramesInfo[key+1][AG.FRAME]-keyFramesInfo[key][AG.FRAME])/float(24),blendType = 'noBlend',extraArgs = [node[OG.OBJ_NP]])
mysequence.append(myLerp)
myLerp = LerpFunc(self.lerpFuncs[property[AG.PROP_NAME]],fromData=float(keyFramesInfo[len(keyFramesInfo)-1][AG.VALUE]),toData=float(keyFramesInfo[len(keyFramesInfo)-1][AG.VALUE]),duration = float(endFrame-keyFramesInfo[len(keyFramesInfo)-1][AG.FRAME])/float(24),blendType = 'noBlend',extraArgs = [node[OG.OBJ_NP]])
mysequence.append(myLerp)
parallel.append(mysequence)
def createCurveKeyFrameAnimation(self, parallel, startFrame, endFrame):
#generate key frame animation for the property which is controled by animation curve
self.editor.objectMgr.findNodes(render)
for node in self.editor.objectMgr.Nodes:
for property in self.keyFramesInfo.keys():
if property[AG.UID] == node[OG.OBJ_UID]:
if property[AG.PROP_NAME] == 'X' or property[AG.PROP_NAME] == 'Y' or property[AG.PROP_NAME] == 'Z':
mysequence = Sequence(name = node[OG.OBJ_UID])
keyFramesInfo = self.keyFramesInfo[property]
if len(keyFramesInfo) == 1:
myLerp = LerpFunc(self.curveLerpFuncs[property[AG.PROP_NAME]][0],fromData=float(keyFramesInfo[0][AG.VALUE]),toData=float(keyFramesInfo[0][AG.VALUE]),duration = float(endFrame-startFrame)/float(24),blendType = 'noBlend',extraArgs = [node[OG.OBJ_NP]])
mysequence.append(myLerp)
parallel.append(mysequence)
if len(keyFramesInfo) == 2:
myLerp = LerpFunc(self.curveLerpFuncs[property[AG.PROP_NAME]][0],fromData=float(keyFramesInfo[0][AG.VALUE]),toData=float(keyFramesInfo[0][AG.VALUE]),duration = float(keyFramesInfo[0][AG.FRAME]-startFrame)/float(24),blendType = 'noBlend',extraArgs = [node[OG.OBJ_NP]])
mysequence.append(myLerp)
for key in range(0,len(keyFramesInfo)-1):
self.keyFrameInfoForSingleLerp = keyFramesInfo
self.keyInfoForSingleLerp = key
myLerp = LerpFunc(self.curveLerpFuncs[property[AG.PROP_NAME]][0],fromData=float(keyFramesInfo[key][AG.VALUE]),toData=float(keyFramesInfo[key+1][AG.VALUE]),duration = float(keyFramesInfo[key+1][AG.FRAME]-keyFramesInfo[key][AG.FRAME])/float(24),blendType = 'noBlend',extraArgs = [node[OG.OBJ_NP]])
mysequence.append(myLerp)
myLerp = LerpFunc(self.curveLerpFuncs[property[AG.PROP_NAME]][0],fromData=float(keyFramesInfo[len(keyFramesInfo)-1][AG.VALUE]),toData=float(keyFramesInfo[len(keyFramesInfo)-1][AG.VALUE]),duration = float(endFrame-keyFramesInfo[len(keyFramesInfo)-1][AG.FRAME])/float(24),blendType = 'noBlend',extraArgs = [node[OG.OBJ_NP]])
mysequence.append(myLerp)
parallel.append(mysequence)
if len(keyFramesInfo) > 2:
myLerp = LerpFunc(self.curveLerpFuncs[property[AG.PROP_NAME]][0],fromData=float(keyFramesInfo[0][AG.VALUE]),toData=float(keyFramesInfo[0][1]),duration = float(keyFramesInfo[0][AG.FRAME]-startFrame)/float(24),blendType = 'noBlend',extraArgs = [node[OG.OBJ_NP]])
mysequence.append(myLerp)
for key in range(0,len(keyFramesInfo)-1):
myLerp = LerpFunc(self.curveLerpFuncs[property[AG.PROP_NAME]][1],fromData=float(keyFramesInfo[key][AG.FRAME]),toData=float(keyFramesInfo[key+1][AG.FRAME]),duration = float(keyFramesInfo[key+1][AG.FRAME]-keyFramesInfo[key][AG.FRAME])/float(24),blendType = 'noBlend',extraArgs = [[node[OG.OBJ_NP], keyFramesInfo, key]])
mysequence.append(myLerp)
myLerp = LerpFunc(self.curveLerpFuncs[property[AG.PROP_NAME]][0],fromData=float(keyFramesInfo[len(keyFramesInfo)-1][AG.VALUE]),toData=float(keyFramesInfo[len(keyFramesInfo)-1][AG.VALUE]),duration = float(endFrame-keyFramesInfo[len(keyFramesInfo)-1][AG.FRAME])/float(24),blendType = 'noBlend',extraArgs = [node[OG.OBJ_NP]])
mysequence.append(myLerp)
parallel.append(mysequence)
def getPos(self, x, list, i):
#get the value from animation curve
x1 = float(list[i][AG.FRAME])
y1 = float(list[i][AG.VALUE])
x4 = float(list[i+1][AG.FRAME])
y4 = float(list[i+1][AG.VALUE])
t1x = list[i][AG.OUTSLOPE][0]
t1y = list[i][AG.OUTSLOPE][1]
t2x = list[i+1][AG.INSLOPE][0]
t2y = list[i+1][AG.INSLOPE][1]
x2 = x1 + (x4 - x1) / float(3)
scale1 = (x2 - x1) / t1x
y2 = y1 + t1y * scale1
x3 = x4 - (x4 - x1) / float(3)
scale2 = (x4 - x3) / t2x
y3 = y4 - t2y * scale2
ax = - float(1) * x1 + float(3) * x2 - float(3) * x3 + float(1) * x4
bx = float(3) * x1 - float(6) * x2 + float(3) * x3 + float(0) * x4
cx = - float(3) * x1 + float(3) * x2 + float(0) * x3 + float(0) * x4
dx = float(1) * x1 + float(0) * x2 - float(0) * x3 + float(0) * x4
ay = - float(1) * y1 + float(3) * y2 - float(3) * y3 + float(1) * y4
by = float(3) * y1 - float(6) * y2 + float(3) * y3 + float(0) * y4
cy = - float(3) * y1 + float(3) * y2 + float(0) * y3 + float(0) * y4
dy = float(1) * y1 + float(0) * y2 - float(0) * y3 + float(0) * y4
if ax == 0 and bx == 0 and cx == 0:
return 0
if ax == 0 and bx == 0 and cx != 0:
a = cx
b = dx-x
t = -b/a
y = ay * t*t*t + by * t*t + cy * t + dy
return y
if ax == 0 and bx!= 0:
a=bx
b=cx
c=dx-x
t=(-b+math.sqrt(b**2-4.0*a*c))/2*a
if t>=0 and t<=1:
y = ay * t*t*t + by * t*t + cy * t + dy
return y
else:
t=(-b-math.sqrt(b**2-4.0*a*c))/2*a
y = ay * t*t*t + by * t*t + cy * t + dy
return y
if ax != 0:
a = ax
b = bx
c = cx
d = dx - float(x)
t = self.calculateT(a, b, c, d, x)
y = ay * t*t*t + by * t*t + cy * t + dy
return y
def calculateT(self, a, b, c, d, x):
#Newton EQUATION
t = float(1)
t2 = t
t -= (a*t*t*t+b*t*t+c*t+d)/(float(3)*a*t*t+float(2)*b*t+c)
if abs(t-t2) <= 0.000001:
return t
else:
while abs(t - t2) > 0.000001:
t2 = t
t -= (a*t*t*t+b*t*t+c*t+d)/(float(3)*a*t*t+float(2)*b*t+c)
return t
def lerpFuncX(self,pos,np):
np.setX(pos)
def lerpFuncY(self,pos,np):
np.setY(pos)
def lerpFuncZ(self,pos,np):
np.setZ(pos)
def lerpCurveFuncX(self,t,extraArgs):
np = extraArgs[0]
pos = self.getPos(t, extraArgs[1], extraArgs[2])
np.setX(pos)
def lerpCurveFuncY(self,t,extraArgs):
np = extraArgs[0]
pos = self.getPos(t, extraArgs[1], extraArgs[2])
np.setY(pos)
def lerpCurveFuncZ(self,t,extraArgs):
np = extraArgs[0]
pos = self.getPos(t, extraArgs[1], extraArgs[2])
np.setZ(pos)
def lerpFuncH(self,angle,np):
np.setH(angle)
def lerpFuncP(self,angle,np):
np.setP(angle)
def lerpFuncR(self,angle,np):
np.setR(angle)
def lerpFuncSX(self,scale,np):
np.setSx(scale)
def lerpFuncSY(self,scale,np):
np.setSy(scale)
def lerpFuncSZ(self,scale,np):
np.setSz(scale)
def lerpFuncCR(self,R,np):
obj = self.editor.objectMgr.findObjectByNodePath(np)
r = obj[OG.OBJ_RGBA][0]
g = obj[OG.OBJ_RGBA][1]
b = obj[OG.OBJ_RGBA][2]
a = obj[OG.OBJ_RGBA][3]
self.colorUpdate(R,g,b,a,np)
def lerpFuncCG(self,G,np):
obj = self.editor.objectMgr.findObjectByNodePath(np)
r = obj[OG.OBJ_RGBA][0]
g = obj[OG.OBJ_RGBA][1]
b = obj[OG.OBJ_RGBA][2]
a = obj[OG.OBJ_RGBA][3]
self.colorUpdate(r,G,b,a,np)
def lerpFuncCB(self,B,np):
obj = self.editor.objectMgr.findObjectByNodePath(np)
r = obj[OG.OBJ_RGBA][0]
g = obj[OG.OBJ_RGBA][1]
b = obj[OG.OBJ_RGBA][2]
a = obj[OG.OBJ_RGBA][3]
self.colorUpdate(r,g,B,a,np)
def lerpFuncCA(self,A,np):
obj = self.editor.objectMgr.findObjectByNodePath(np)
r = obj[OG.OBJ_RGBA][0]
g = obj[OG.OBJ_RGBA][1]
b = obj[OG.OBJ_RGBA][2]
a = obj[OG.OBJ_RGBA][3]
self.colorUpdate(r,g,b,A,np)
def colorUpdate(self, r, g, b, a, np):
if base.direct.selected.last == None:
self.editor.objectMgr.updateObjectColor(r, g, b, a, np)
elif self.editor.objectMgr.findObjectByNodePath(np) == self.editor.objectMgr.findObjectByNodePath(base.direct.selected.last):
self.editor.ui.objectPropertyUI.propCR.setValue(r)
self.editor.ui.objectPropertyUI.propCG.setValue(g)
self.editor.ui.objectPropertyUI.propCB.setValue(b)
self.editor.ui.objectPropertyUI.propCA.setValue(a)
self.editor.objectMgr.updateObjectColor(r, g, b, a, np)
else:
self.editor.objectMgr.updateObjectColor(r, g, b, a, np)
| bsd-3-clause | -8,185,697,550,575,294,000 | 46.656676 | 354 | 0.547944 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.