text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
# coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: release-1.23
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from kubernetes.client.configuration import Configuration
class V1RuleWithOperations(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'api_groups': 'list[str]',
'api_versions': 'list[str]',
'operations': 'list[str]',
'resources': 'list[str]',
'scope': 'str'
}
attribute_map = {
'api_groups': 'apiGroups',
'api_versions': 'apiVersions',
'operations': 'operations',
'resources': 'resources',
'scope': 'scope'
}
def __init__(self, api_groups=None, api_versions=None, operations=None, resources=None, scope=None, local_vars_configuration=None): # noqa: E501
"""V1RuleWithOperations - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._api_groups = None
self._api_versions = None
self._operations = None
self._resources = None
self._scope = None
self.discriminator = None
if api_groups is not None:
self.api_groups = api_groups
if api_versions is not None:
self.api_versions = api_versions
if operations is not None:
self.operations = operations
if resources is not None:
self.resources = resources
if scope is not None:
self.scope = scope
@property
def api_groups(self):
"""Gets the api_groups of this V1RuleWithOperations. # noqa: E501
APIGroups is the API groups the resources belong to. '*' is all groups. If '*' is present, the length of the slice must be one. Required. # noqa: E501
:return: The api_groups of this V1RuleWithOperations. # noqa: E501
:rtype: list[str]
"""
return self._api_groups
@api_groups.setter
def api_groups(self, api_groups):
"""Sets the api_groups of this V1RuleWithOperations.
APIGroups is the API groups the resources belong to. '*' is all groups. If '*' is present, the length of the slice must be one. Required. # noqa: E501
:param api_groups: The api_groups of this V1RuleWithOperations. # noqa: E501
:type: list[str]
"""
self._api_groups = api_groups
@property
def api_versions(self):
"""Gets the api_versions of this V1RuleWithOperations. # noqa: E501
APIVersions is the API versions the resources belong to. '*' is all versions. If '*' is present, the length of the slice must be one. Required. # noqa: E501
:return: The api_versions of this V1RuleWithOperations. # noqa: E501
:rtype: list[str]
"""
return self._api_versions
@api_versions.setter
def api_versions(self, api_versions):
"""Sets the api_versions of this V1RuleWithOperations.
APIVersions is the API versions the resources belong to. '*' is all versions. If '*' is present, the length of the slice must be one. Required. # noqa: E501
:param api_versions: The api_versions of this V1RuleWithOperations. # noqa: E501
:type: list[str]
"""
self._api_versions = api_versions
@property
def operations(self):
"""Gets the operations of this V1RuleWithOperations. # noqa: E501
Operations is the operations the admission hook cares about - CREATE, UPDATE, DELETE, CONNECT or * for all of those operations and any future admission operations that are added. If '*' is present, the length of the slice must be one. Required. # noqa: E501
:return: The operations of this V1RuleWithOperations. # noqa: E501
:rtype: list[str]
"""
return self._operations
@operations.setter
def operations(self, operations):
"""Sets the operations of this V1RuleWithOperations.
Operations is the operations the admission hook cares about - CREATE, UPDATE, DELETE, CONNECT or * for all of those operations and any future admission operations that are added. If '*' is present, the length of the slice must be one. Required. # noqa: E501
:param operations: The operations of this V1RuleWithOperations. # noqa: E501
:type: list[str]
"""
self._operations = operations
@property
def resources(self):
"""Gets the resources of this V1RuleWithOperations. # noqa: E501
Resources is a list of resources this rule applies to. For example: 'pods' means pods. 'pods/log' means the log subresource of pods. '*' means all resources, but not subresources. 'pods/*' means all subresources of pods. '*/scale' means all scale subresources. '*/*' means all resources and their subresources. If wildcard is present, the validation rule will ensure resources do not overlap with each other. Depending on the enclosing object, subresources might not be allowed. Required. # noqa: E501
:return: The resources of this V1RuleWithOperations. # noqa: E501
:rtype: list[str]
"""
return self._resources
@resources.setter
def resources(self, resources):
"""Sets the resources of this V1RuleWithOperations.
Resources is a list of resources this rule applies to. For example: 'pods' means pods. 'pods/log' means the log subresource of pods. '*' means all resources, but not subresources. 'pods/*' means all subresources of pods. '*/scale' means all scale subresources. '*/*' means all resources and their subresources. If wildcard is present, the validation rule will ensure resources do not overlap with each other. Depending on the enclosing object, subresources might not be allowed. Required. # noqa: E501
:param resources: The resources of this V1RuleWithOperations. # noqa: E501
:type: list[str]
"""
self._resources = resources
@property
def scope(self):
"""Gets the scope of this V1RuleWithOperations. # noqa: E501
scope specifies the scope of this rule. Valid values are \"Cluster\", \"Namespaced\", and \"*\" \"Cluster\" means that only cluster-scoped resources will match this rule. Namespace API objects are cluster-scoped. \"Namespaced\" means that only namespaced resources will match this rule. \"*\" means that there are no scope restrictions. Subresources match the scope of their parent resource. Default is \"*\". # noqa: E501
:return: The scope of this V1RuleWithOperations. # noqa: E501
:rtype: str
"""
return self._scope
@scope.setter
def scope(self, scope):
"""Sets the scope of this V1RuleWithOperations.
scope specifies the scope of this rule. Valid values are \"Cluster\", \"Namespaced\", and \"*\" \"Cluster\" means that only cluster-scoped resources will match this rule. Namespace API objects are cluster-scoped. \"Namespaced\" means that only namespaced resources will match this rule. \"*\" means that there are no scope restrictions. Subresources match the scope of their parent resource. Default is \"*\". # noqa: E501
:param scope: The scope of this V1RuleWithOperations. # noqa: E501
:type: str
"""
self._scope = scope
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1RuleWithOperations):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1RuleWithOperations):
return True
return self.to_dict() != other.to_dict()
| kubernetes-client/python | kubernetes/client/models/v1_rule_with_operations.py | Python | apache-2.0 | 9,436 | 0 |
# Copyright (c) 2010 by Dan Jacob.
#
# Some rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# * The names of the contributors may not be used to endorse or
# promote products derived from this software without specific
# prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import getpass
try:
assert raw_input
except NameError:
raw_input = input
def prompt(name, default=None):
"""
Grab user input from command line.
:param name: prompt text
:param default: default value if no input provided.
"""
prompt = name + (default and ' [%s]' % default or '')
prompt += name.endswith('?') and ' ' or ': '
while True:
rv = raw_input(prompt)
if rv:
return rv
if default is not None:
return default
def prompt_pass(name, default=None):
"""
Grabs hidden (password) input from command line.
:param name: prompt text
:param default: default value if no input provided.
"""
prompt = name + (default and ' [%s]' % default or '')
prompt += name.endswith('?') and ' ' or ': '
while True:
rv = getpass.getpass(prompt)
if rv:
return rv
if default is not None:
return default
def prompt_bool(name, default=False, yes_choices=None, no_choices=None):
"""
Grabs user input from command line and converts to boolean
value.
:param name: prompt text
:param default: default value if no input provided.
:param yes_choices: default 'y', 'yes', '1', 'on', 'true', 't'
:param no_choices: default 'n', 'no', '0', 'off', 'false', 'f'
"""
yes_choices = yes_choices or ('y', 'yes', '1', 'on', 'true', 't')
no_choices = no_choices or ('n', 'no', '0', 'off', 'false', 'f')
while True:
rv = prompt(name + '?', default and yes_choices[0] or no_choices[0])
if rv.lower() in yes_choices:
return True
elif rv.lower() in no_choices:
return False
def prompt_choices(name, choices, default=None, no_choice=('none',)):
"""
Grabs user input from command line from set of provided choices.
:param name: prompt text
:param choices: list or tuple of available choices.
:param default: default value if no input provided.
:param no_choice: acceptable list of strings for "null choice"
"""
_choices = []
options = []
for choice in choices:
options.append(choice)
_choices.append(choice)
while True:
rv = prompt(name + '? - (%s)' % ', '.join(options), default)
rv = rv.lower()
if rv in no_choice:
return None
if rv in _choices:
return rv
| whtsky/parguments | parguments/cli.py | Python | mit | 3,906 | 0 |
#############################################################################
#
# Copyright (C) 2013 Navi-X
#
# This file is part of Navi-X.
#
# Navi-X is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# Navi-X is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Navi-X. If not, see <http://www.gnu.org/licenses/>.
#
#############################################################################
#############################################################################
#
# CDownloader:
# This class handles user login to the Navi-Xtreme website.
#############################################################################
from string import *
import sys, os.path
import urllib
import urllib2
import re, random, string
import xbmc, xbmcgui, xbmcaddon
import re, os, time, datetime, traceback
import shutil
import os
from libs2 import *
try: Emulating = xbmcgui.Emulating
except: Emulating = False
LABEL_USRNAME = 141
LABEL_PASSWORD = 142
BUTTON_USRNAME = 143
BUTTON_PASSWORD = 1144
BUTTON_LOGIN = 145
BUTTON_CANCEL = 146
class CDialogLogin(xbmcgui.WindowXMLDialog):
def __init__(self,strXMLname, strFallbackPath):#, strDefaultName, forceFallback):
# self.setCoordinateResolution(PAL_4x3)
#user background image
# self.bg = xbmcgui.ControlImage(100,100,520,376, imageDir + "background_txt.png")
# self.addControl(self.bg)
self.userloggedin = False
#read user ID from file
self.user_id=''
pass
def onAction(self, action):
if (action == ACTION_PREVIOUS_MENU) or (action == ACTION_PARENT_DIR) or (action == ACTION_PREVIOUS_MENU2):# or (action == ACTION_MOVE_LEFT):
self.close()
def onFocus( self, controlId ):
pass
def onClick( self, controlId ):
pass
def onControl(self, control):
#self.setFocus(control)
pass
def login(self):
#display GUI window
self.doModal()
#perform login to the Navi-Xtreme server
#if success
self.save_user_id()
def logout(self):
self.user_id=''
self.write_user_id() #There is no such function.
def is_user_logged_in(self):
if self.user_id != '':
return True
return False
def rate_item(self, mediaitem):
pass
def read_user_id(self):
pass
def save_user_id(self):
pass
#end of class
#use singleton
#login = CDialogLogin("CLoginskin.xml", os.getcwd())
login = CDialogLogin("CLoginskin2.xml", addon.getAddonInfo('path'))
| JamesLinEngineer/RKMC | addons/script.navi-x/src/CLogin.py | Python | gpl-2.0 | 3,203 | 0.01561 |
"""
Learning Tools Interoperability (LTI) module.
Resources
---------
Theoretical background and detailed specifications of LTI can be found on:
http://www.imsglobal.org/LTI/v1p1p1/ltiIMGv1p1p1.html
This module is based on the version 1.1.1 of the LTI specifications by the
IMS Global authority. For authentication, it uses OAuth1.
When responding back to the LTI tool provider, we must issue a correct
response. Types of responses and their message payload is available at:
Table A1.2 Interpretation of the 'CodeMajor/severity' matrix.
http://www.imsglobal.org/gws/gwsv1p0/imsgws_wsdlBindv1p0.html
A resource to test the LTI protocol (PHP realization):
http://www.imsglobal.org/developers/LTI/test/v1p1/lms.php
We have also begun to add support for LTI 1.2/2.0. We will keep this
docstring in synch with what support is available. The first LTI 2.0
feature to be supported is the REST API results service, see specification
at
http://www.imsglobal.org/lti/ltiv2p0/uml/purl.imsglobal.org/vocab/lis/v2/outcomes/Result/service.html
What is supported:
------------------
1.) Display of simple LTI in iframe or a new window.
2.) Multiple LTI components on a single page.
3.) The use of multiple LTI providers per course.
4.) Use of advanced LTI component that provides back a grade.
A) LTI 1.1.1 XML endpoint
a.) The LTI provider sends back a grade to a specified URL.
b.) Currently only action "update" is supported. "Read", and "delete"
actions initially weren't required.
B) LTI 2.0 Result Service JSON REST endpoint
(http://www.imsglobal.org/lti/ltiv2p0/uml/purl.imsglobal.org/vocab/lis/v2/outcomes/Result/service.html)
a.) Discovery of all such LTI http endpoints for a course. External tools GET from this discovery
endpoint and receive URLs for interacting with individual grading units.
(see lms/djangoapps/courseware/views.py:get_course_lti_endpoints)
b.) GET, PUT and DELETE in LTI Result JSON binding
(http://www.imsglobal.org/lti/ltiv2p0/mediatype/application/vnd/ims/lis/v2/result+json/index.html)
for a provider to synchronize grades into edx-platform. Reading, Setting, and Deleteing
Numeric grades between 0 and 1 and text + basic HTML feedback comments are supported, via
GET / PUT / DELETE HTTP methods respectively
"""
import datetime
from django.utils.timezone import UTC
import logging
import oauthlib.oauth1
from oauthlib.oauth1.rfc5849 import signature
import hashlib
import base64
import urllib
import textwrap
import bleach
from lxml import etree
from webob import Response
import mock
from xml.sax.saxutils import escape
from xmodule.editing_module import MetadataOnlyEditingDescriptor
from xmodule.raw_module import EmptyDataRawDescriptor
from xmodule.x_module import XModule, module_attr
from xmodule.course_module import CourseDescriptor
from xmodule.lti_2_util import LTI20ModuleMixin, LTIError
from pkg_resources import resource_string
from xblock.core import String, Scope, List, XBlock
from xblock.fields import Boolean, Float
log = logging.getLogger(__name__)
# Make '_' a no-op so we can scrape strings
_ = lambda text: text
DOCS_ANCHOR_TAG_OPEN = (
"<a target='_blank' "
"href='http://edx.readthedocs.org/projects/ca/en/latest/exercises_tools/lti_component.html'>"
)
class LTIFields(object):
"""
Fields to define and obtain LTI tool from provider are set here,
except credentials, which should be set in course settings::
`lti_id` is id to connect tool with credentials in course settings. It should not contain :: (double semicolon)
`launch_url` is launch URL of tool.
`custom_parameters` are additional parameters to navigate to proper book and book page.
For example, for Vitalsource provider, `launch_url` should be
*https://bc-staging.vitalsource.com/books/book*,
and to get to proper book and book page, you should set custom parameters as::
vbid=put_book_id_here
book_location=page/put_page_number_here
Default non-empty URL for `launch_url` is needed due to oauthlib demand (URL scheme should be presented)::
https://github.com/idan/oauthlib/blob/master/oauthlib/oauth1/rfc5849/signature.py#L136
"""
display_name = String(
display_name=_("Display Name"),
help=_(
"Enter the name that students see for this component. "
"Analytics reports may also use the display name to identify this component."
),
scope=Scope.settings,
default="LTI",
)
lti_id = String(
display_name=_("LTI ID"),
help=_(
"Enter the LTI ID for the external LTI provider. "
"This value must be the same LTI ID that you entered in the "
"LTI Passports setting on the Advanced Settings page."
"<br />See {docs_anchor_open}the edX LTI documentation{anchor_close} for more details on this setting."
).format(
docs_anchor_open=DOCS_ANCHOR_TAG_OPEN,
anchor_close="</a>"
),
default='',
scope=Scope.settings
)
launch_url = String(
display_name=_("LTI URL"),
help=_(
"Enter the URL of the external tool that this component launches. "
"This setting is only used when Hide External Tool is set to False."
"<br />See {docs_anchor_open}the edX LTI documentation{anchor_close} for more details on this setting."
).format(
docs_anchor_open=DOCS_ANCHOR_TAG_OPEN,
anchor_close="</a>"
),
default='http://www.example.com',
scope=Scope.settings)
custom_parameters = List(
display_name=_("Custom Parameters"),
help=_(
"Add the key/value pair for any custom parameters, such as the page your e-book should open to or "
"the background color for this component."
"<br />See {docs_anchor_open}the edX LTI documentation{anchor_close} for more details on this setting."
).format(
docs_anchor_open=DOCS_ANCHOR_TAG_OPEN,
anchor_close="</a>"
),
scope=Scope.settings)
open_in_a_new_page = Boolean(
display_name=_("Open in New Page"),
help=_(
"Select True if you want students to click a link that opens the LTI tool in a new window. "
"Select False if you want the LTI content to open in an IFrame in the current page. "
"This setting is only used when Hide External Tool is set to False. "
),
default=True,
scope=Scope.settings
)
has_score = Boolean(
display_name=_("Scored"),
help=_(
"Select True if this component will receive a numerical score from the external LTI system."
),
default=False,
scope=Scope.settings
)
weight = Float(
display_name=_("Weight"),
help=_(
"Enter the number of points possible for this component. "
"The default value is 1.0. "
"This setting is only used when Scored is set to True."
),
default=1.0,
scope=Scope.settings,
values={"min": 0},
)
module_score = Float(
help=_("The score kept in the xblock KVS -- duplicate of the published score in django DB"),
default=None,
scope=Scope.user_state
)
score_comment = String(
help=_("Comment as returned from grader, LTI2.0 spec"),
default="",
scope=Scope.user_state
)
hide_launch = Boolean(
display_name=_("Hide External Tool"),
help=_(
"Select True if you want to use this component as a placeholder for syncing with an external grading "
"system rather than launch an external tool. "
"This setting hides the Launch button and any IFrames for this component."
),
default=False,
scope=Scope.settings
)
# Users will be presented with a message indicating that their e-mail/username would be sent to a third
# party application. When "Open in New Page" is not selected, the tool automatically appears without any user action.
ask_to_send_username = Boolean(
display_name=_("Request user's username"),
# Translators: This is used to request the user's username for a third party service.
# Usernames can only be requested if "Open in New Page" is set to True.
help=_(
"Select True to request the user's username. You must also set Open in New Page to True to get the user's information."
),
default=False,
scope=Scope.settings
)
ask_to_send_email = Boolean(
display_name=_("Request user's email"),
# Translators: This is used to request the user's email for a third party service.
# Emails can only be requested if "Open in New Page" is set to True.
help=_(
"Select True to request the user's email address. You must also set Open in New Page to True to get the user's information."
),
default=False,
scope=Scope.settings
)
description = String(
display_name=_("LTI Application Information"),
help=_(
"Enter a description of the third party application. If requesting username and/or email, use this text box to inform users "
"why their username and/or email will be forwarded to a third party application."
),
default="",
scope=Scope.settings
)
button_text = String(
display_name=_("Button Text"),
help=_(
"Enter the text on the button used to launch the third party application."
),
default="",
scope=Scope.settings
)
accept_grades_past_due = Boolean(
display_name=_("Accept grades past deadline"),
help=_("Select True to allow third party systems to post grades past the deadline."),
default=True,
scope=Scope.settings
)
class LTIModule(LTIFields, LTI20ModuleMixin, XModule):
"""
Module provides LTI integration to course.
Except usual Xmodule structure it proceeds with OAuth signing.
How it works::
1. Get credentials from course settings.
2. There is minimal set of parameters need to be signed (presented for Vitalsource)::
user_id
oauth_callback
lis_outcome_service_url
lis_result_sourcedid
launch_presentation_return_url
lti_message_type
lti_version
roles
*+ all custom parameters*
These parameters should be encoded and signed by *OAuth1* together with
`launch_url` and *POST* request type.
3. Signing proceeds with client key/secret pair obtained from course settings.
That pair should be obtained from LTI provider and set into course settings by course author.
After that signature and other OAuth data are generated.
OAuth data which is generated after signing is usual::
oauth_callback
oauth_nonce
oauth_consumer_key
oauth_signature_method
oauth_timestamp
oauth_version
4. All that data is passed to form and sent to LTI provider server by browser via
autosubmit via JavaScript.
Form example::
<form
action="${launch_url}"
name="ltiLaunchForm-${element_id}"
class="ltiLaunchForm"
method="post"
target="ltiLaunchFrame-${element_id}"
encType="application/x-www-form-urlencoded"
>
<input name="launch_presentation_return_url" value="" />
<input name="lis_outcome_service_url" value="" />
<input name="lis_result_sourcedid" value="" />
<input name="lti_message_type" value="basic-lti-launch-request" />
<input name="lti_version" value="LTI-1p0" />
<input name="oauth_callback" value="about:blank" />
<input name="oauth_consumer_key" value="${oauth_consumer_key}" />
<input name="oauth_nonce" value="${oauth_nonce}" />
<input name="oauth_signature_method" value="HMAC-SHA1" />
<input name="oauth_timestamp" value="${oauth_timestamp}" />
<input name="oauth_version" value="1.0" />
<input name="user_id" value="${user_id}" />
<input name="role" value="student" />
<input name="oauth_signature" value="${oauth_signature}" />
<input name="custom_1" value="${custom_param_1_value}" />
<input name="custom_2" value="${custom_param_2_value}" />
<input name="custom_..." value="${custom_param_..._value}" />
<input type="submit" value="Press to Launch" />
</form>
5. LTI provider has same secret key and it signs data string via *OAuth1* and compares signatures.
If signatures are correct, LTI provider redirects iframe source to LTI tool web page,
and LTI tool is rendered to iframe inside course.
Otherwise error message from LTI provider is generated.
"""
js = {
'js': [
resource_string(__name__, 'js/src/lti/lti.js')
]
}
css = {'scss': [resource_string(__name__, 'css/lti/lti.scss')]}
js_module_name = "LTI"
def get_input_fields(self):
# LTI provides a list of default parameters that might be passed as
# part of the POST data. These parameters should not be prefixed.
# Likewise, The creator of an LTI link can add custom key/value parameters
# to a launch which are to be included with the launch of the LTI link.
# In this case, we will automatically add `custom_` prefix before this parameters.
# See http://www.imsglobal.org/LTI/v1p1p1/ltiIMGv1p1p1.html#_Toc316828520
PARAMETERS = [
"lti_message_type",
"lti_version",
"resource_link_title",
"resource_link_description",
"user_image",
"lis_person_name_given",
"lis_person_name_family",
"lis_person_name_full",
"lis_person_contact_email_primary",
"lis_person_sourcedid",
"role_scope_mentor",
"context_type",
"context_title",
"context_label",
"launch_presentation_locale",
"launch_presentation_document_target",
"launch_presentation_css_url",
"launch_presentation_width",
"launch_presentation_height",
"launch_presentation_return_url",
"tool_consumer_info_product_family_code",
"tool_consumer_info_version",
"tool_consumer_instance_guid",
"tool_consumer_instance_name",
"tool_consumer_instance_description",
"tool_consumer_instance_url",
"tool_consumer_instance_contact_email",
]
client_key, client_secret = self.get_client_key_secret()
# parsing custom parameters to dict
custom_parameters = {}
for custom_parameter in self.custom_parameters:
try:
param_name, param_value = [p.strip() for p in custom_parameter.split('=', 1)]
except ValueError:
_ = self.runtime.service(self, "i18n").ugettext
msg = _('Could not parse custom parameter: {custom_parameter}. Should be "x=y" string.').format(
custom_parameter="{0!r}".format(custom_parameter)
)
raise LTIError(msg)
# LTI specs: 'custom_' should be prepended before each custom parameter, as pointed in link above.
if param_name not in PARAMETERS:
param_name = 'custom_' + param_name
custom_parameters[unicode(param_name)] = unicode(param_value)
return self.oauth_params(
custom_parameters,
client_key,
client_secret,
)
def get_context(self):
"""
Returns a context.
"""
# use bleach defaults. see https://github.com/jsocol/bleach/blob/master/bleach/__init__.py
# ALLOWED_TAGS are
# ['a', 'abbr', 'acronym', 'b', 'blockquote', 'code', 'em', 'i', 'li', 'ol', 'strong', 'ul']
#
# ALLOWED_ATTRIBUTES are
# 'a': ['href', 'title'],
# 'abbr': ['title'],
# 'acronym': ['title'],
#
# This lets all plaintext through.
sanitized_comment = bleach.clean(self.score_comment)
return {
'input_fields': self.get_input_fields(),
# These parameters do not participate in OAuth signing.
'launch_url': self.launch_url.strip(),
'element_id': self.location.html_id(),
'element_class': self.category,
'open_in_a_new_page': self.open_in_a_new_page,
'display_name': self.display_name,
'form_url': self.runtime.handler_url(self, 'preview_handler').rstrip('/?'),
'hide_launch': self.hide_launch,
'has_score': self.has_score,
'weight': self.weight,
'module_score': self.module_score,
'comment': sanitized_comment,
'description': self.description,
'ask_to_send_username': self.ask_to_send_username,
'ask_to_send_email': self.ask_to_send_email,
'button_text': self.button_text,
'accept_grades_past_due': self.accept_grades_past_due,
}
def get_html(self):
"""
Renders parameters to template.
"""
return self.system.render_template('lti.html', self.get_context())
@XBlock.handler
def preview_handler(self, _, __):
"""
This is called to get context with new oauth params to iframe.
"""
template = self.system.render_template('lti_form.html', self.get_context())
return Response(template, content_type='text/html')
def get_user_id(self):
user_id = self.runtime.anonymous_student_id
assert user_id is not None
return unicode(urllib.quote(user_id))
def get_outcome_service_url(self, service_name="grade_handler"):
"""
Return URL for storing grades.
To test LTI on sandbox we must use http scheme.
While testing locally and on Jenkins, mock_lti_server use http.referer
to obtain scheme, so it is ok to have http(s) anyway.
The scheme logic is handled in lms/lib/xblock/runtime.py
"""
return self.runtime.handler_url(self, service_name, thirdparty=True).rstrip('/?')
def get_resource_link_id(self):
"""
This is an opaque unique identifier that the TC guarantees will be unique
within the TC for every placement of the link.
If the tool / activity is placed multiple times in the same context,
each of those placements will be distinct.
This value will also change if the item is exported from one system or
context and imported into another system or context.
This parameter is required.
Example: u'edx.org-i4x-2-3-lti-31de800015cf4afb973356dbe81496df'
Hostname, edx.org,
makes resource_link_id change on import to another system.
Last part of location, location.name - 31de800015cf4afb973356dbe81496df,
is random hash, updated by course_id,
this makes resource_link_id unique inside single course.
First part of location is tag-org-course-category, i4x-2-3-lti.
Location.name itself does not change on import to another course,
but org and course_id change.
So together with org and course_id in a form of
i4x-2-3-lti-31de800015cf4afb973356dbe81496df this part of resource_link_id:
makes resource_link_id to be unique among courses inside same system.
"""
return unicode(urllib.quote("{}-{}".format(self.system.hostname, self.location.html_id())))
def get_lis_result_sourcedid(self):
"""
This field contains an identifier that indicates the LIS Result Identifier (if any)
associated with this launch. This field identifies a unique row and column within the
TC gradebook. This field is unique for every combination of context_id / resource_link_id / user_id.
This value may change for a particular resource_link_id / user_id from one launch to the next.
The TP should only retain the most recent value for this field for a particular resource_link_id / user_id.
This field is generally optional, but is required for grading.
"""
return "{context}:{resource_link}:{user_id}".format(
context=urllib.quote(self.context_id),
resource_link=self.get_resource_link_id(),
user_id=self.get_user_id()
)
def get_course(self):
"""
Return course by course id.
"""
return self.descriptor.runtime.modulestore.get_course(self.course_id)
@property
def context_id(self):
"""
Return context_id.
context_id is an opaque identifier that uniquely identifies the context (e.g., a course)
that contains the link being launched.
"""
return self.course_id.to_deprecated_string()
@property
def role(self):
"""
Get system user role and convert it to LTI role.
"""
roles = {
'student': u'Student',
'staff': u'Administrator',
'instructor': u'Instructor',
}
return roles.get(self.system.get_user_role(), u'Student')
def oauth_params(self, custom_parameters, client_key, client_secret):
"""
Signs request and returns signature and OAuth parameters.
`custom_paramters` is dict of parsed `custom_parameter` field
`client_key` and `client_secret` are LTI tool credentials.
Also *anonymous student id* is passed to template and therefore to LTI provider.
"""
client = oauthlib.oauth1.Client(
client_key=unicode(client_key),
client_secret=unicode(client_secret)
)
# Must have parameters for correct signing from LTI:
body = {
u'user_id': self.get_user_id(),
u'oauth_callback': u'about:blank',
u'launch_presentation_return_url': '',
u'lti_message_type': u'basic-lti-launch-request',
u'lti_version': 'LTI-1p0',
u'roles': self.role,
# Parameters required for grading:
u'resource_link_id': self.get_resource_link_id(),
u'lis_result_sourcedid': self.get_lis_result_sourcedid(),
u'context_id': self.context_id,
}
if self.has_score:
body.update({
u'lis_outcome_service_url': self.get_outcome_service_url()
})
self.user_email = ""
self.user_username = ""
# Username and email can't be sent in studio mode, because the user object is not defined.
# To test functionality test in LMS
if callable(self.runtime.get_real_user):
real_user_object = self.runtime.get_real_user(self.runtime.anonymous_student_id)
try:
self.user_email = real_user_object.email
except AttributeError:
self.user_email = ""
try:
self.user_username = real_user_object.username
except AttributeError:
self.user_username = ""
if self.open_in_a_new_page:
if self.ask_to_send_username and self.user_username:
body["lis_person_sourcedid"] = self.user_username
if self.ask_to_send_email and self.user_email:
body["lis_person_contact_email_primary"] = self.user_email
# Appending custom parameter for signing.
body.update(custom_parameters)
headers = {
# This is needed for body encoding:
'Content-Type': 'application/x-www-form-urlencoded',
}
try:
__, headers, __ = client.sign(
unicode(self.launch_url.strip()),
http_method=u'POST',
body=body,
headers=headers)
except ValueError: # Scheme not in url.
# https://github.com/idan/oauthlib/blob/master/oauthlib/oauth1/rfc5849/signature.py#L136
# Stubbing headers for now:
headers = {
u'Content-Type': u'application/x-www-form-urlencoded',
u'Authorization': u'OAuth oauth_nonce="80966668944732164491378916897", \
oauth_timestamp="1378916897", oauth_version="1.0", oauth_signature_method="HMAC-SHA1", \
oauth_consumer_key="", oauth_signature="frVp4JuvT1mVXlxktiAUjQ7%2F1cw%3D"'}
params = headers['Authorization']
# Parse headers to pass to template as part of context:
params = dict([param.strip().replace('"', '').split('=') for param in params.split(',')])
params[u'oauth_nonce'] = params[u'OAuth oauth_nonce']
del params[u'OAuth oauth_nonce']
# oauthlib encodes signature with
# 'Content-Type': 'application/x-www-form-urlencoded'
# so '='' becomes '%3D'.
# We send form via browser, so browser will encode it again,
# So we need to decode signature back:
params[u'oauth_signature'] = urllib.unquote(params[u'oauth_signature']).decode('utf8')
# Add LTI parameters to OAuth parameters for sending in form.
params.update(body)
return params
def max_score(self):
return self.weight if self.has_score else None
@XBlock.handler
def grade_handler(self, request, suffix): # pylint: disable=unused-argument
"""
This is called by courseware.module_render, to handle an AJAX call.
Used only for grading. Returns XML response.
Example of request body from LTI provider::
<?xml version = "1.0" encoding = "UTF-8"?>
<imsx_POXEnvelopeRequest xmlns = "some_link (may be not required)">
<imsx_POXHeader>
<imsx_POXRequestHeaderInfo>
<imsx_version>V1.0</imsx_version>
<imsx_messageIdentifier>528243ba5241b</imsx_messageIdentifier>
</imsx_POXRequestHeaderInfo>
</imsx_POXHeader>
<imsx_POXBody>
<replaceResultRequest>
<resultRecord>
<sourcedGUID>
<sourcedId>feb-123-456-2929::28883</sourcedId>
</sourcedGUID>
<result>
<resultScore>
<language>en-us</language>
<textString>0.4</textString>
</resultScore>
</result>
</resultRecord>
</replaceResultRequest>
</imsx_POXBody>
</imsx_POXEnvelopeRequest>
Example of correct/incorrect answer XML body:: see response_xml_template.
"""
response_xml_template = textwrap.dedent("""\
<?xml version="1.0" encoding="UTF-8"?>
<imsx_POXEnvelopeResponse xmlns = "http://www.imsglobal.org/services/ltiv1p1/xsd/imsoms_v1p0">
<imsx_POXHeader>
<imsx_POXResponseHeaderInfo>
<imsx_version>V1.0</imsx_version>
<imsx_messageIdentifier>{imsx_messageIdentifier}</imsx_messageIdentifier>
<imsx_statusInfo>
<imsx_codeMajor>{imsx_codeMajor}</imsx_codeMajor>
<imsx_severity>status</imsx_severity>
<imsx_description>{imsx_description}</imsx_description>
<imsx_messageRefIdentifier>
</imsx_messageRefIdentifier>
</imsx_statusInfo>
</imsx_POXResponseHeaderInfo>
</imsx_POXHeader>
<imsx_POXBody>{response}</imsx_POXBody>
</imsx_POXEnvelopeResponse>
""")
# Returns when `action` is unsupported.
# Supported actions:
# - replaceResultRequest.
unsupported_values = {
'imsx_codeMajor': 'unsupported',
'imsx_description': 'Target does not support the requested operation.',
'imsx_messageIdentifier': 'unknown',
'response': ''
}
# Returns if:
# - past due grades are not accepted and grade is past due
# - score is out of range
# - can't parse response from TP;
# - can't verify OAuth signing or OAuth signing is incorrect.
failure_values = {
'imsx_codeMajor': 'failure',
'imsx_description': 'The request has failed.',
'imsx_messageIdentifier': 'unknown',
'response': ''
}
if not self.accept_grades_past_due and self.is_past_due():
failure_values['imsx_description'] = "Grade is past due"
return Response(response_xml_template.format(**failure_values), content_type="application/xml")
try:
imsx_messageIdentifier, sourcedId, score, action = self.parse_grade_xml_body(request.body)
except Exception as e:
error_message = "Request body XML parsing error: " + escape(e.message)
log.debug("[LTI]: " + error_message)
failure_values['imsx_description'] = error_message
return Response(response_xml_template.format(**failure_values), content_type="application/xml")
# Verify OAuth signing.
try:
self.verify_oauth_body_sign(request)
except (ValueError, LTIError) as e:
failure_values['imsx_messageIdentifier'] = escape(imsx_messageIdentifier)
error_message = "OAuth verification error: " + escape(e.message)
failure_values['imsx_description'] = error_message
log.debug("[LTI]: " + error_message)
return Response(response_xml_template.format(**failure_values), content_type="application/xml")
real_user = self.system.get_real_user(urllib.unquote(sourcedId.split(':')[-1]))
if not real_user: # that means we can't save to database, as we do not have real user id.
failure_values['imsx_messageIdentifier'] = escape(imsx_messageIdentifier)
failure_values['imsx_description'] = "User not found."
return Response(response_xml_template.format(**failure_values), content_type="application/xml")
if action == 'replaceResultRequest':
self.set_user_module_score(real_user, score, self.max_score())
values = {
'imsx_codeMajor': 'success',
'imsx_description': 'Score for {sourced_id} is now {score}'.format(sourced_id=sourcedId, score=score),
'imsx_messageIdentifier': escape(imsx_messageIdentifier),
'response': '<replaceResultResponse/>'
}
log.debug("[LTI]: Grade is saved.")
return Response(response_xml_template.format(**values), content_type="application/xml")
unsupported_values['imsx_messageIdentifier'] = escape(imsx_messageIdentifier)
log.debug("[LTI]: Incorrect action.")
return Response(response_xml_template.format(**unsupported_values), content_type='application/xml')
@classmethod
def parse_grade_xml_body(cls, body):
"""
Parses XML from request.body and returns parsed data
XML body should contain nsmap with namespace, that is specified in LTI specs.
Returns tuple: imsx_messageIdentifier, sourcedId, score, action
Raises Exception if can't parse.
"""
lti_spec_namespace = "http://www.imsglobal.org/services/ltiv1p1/xsd/imsoms_v1p0"
namespaces = {'def': lti_spec_namespace}
data = body.strip().encode('utf-8')
parser = etree.XMLParser(ns_clean=True, recover=True, encoding='utf-8')
root = etree.fromstring(data, parser=parser)
imsx_messageIdentifier = root.xpath("//def:imsx_messageIdentifier", namespaces=namespaces)[0].text or ''
sourcedId = root.xpath("//def:sourcedId", namespaces=namespaces)[0].text
score = root.xpath("//def:textString", namespaces=namespaces)[0].text
action = root.xpath("//def:imsx_POXBody", namespaces=namespaces)[0].getchildren()[0].tag.replace('{' + lti_spec_namespace + '}', '')
# Raise exception if score is not float or not in range 0.0-1.0 regarding spec.
score = float(score)
if not 0 <= score <= 1:
raise LTIError('score value outside the permitted range of 0-1.')
return imsx_messageIdentifier, sourcedId, score, action
def verify_oauth_body_sign(self, request, content_type='application/x-www-form-urlencoded'):
"""
Verify grade request from LTI provider using OAuth body signing.
Uses http://oauth.googlecode.com/svn/spec/ext/body_hash/1.0/oauth-bodyhash.html::
This specification extends the OAuth signature to include integrity checks on HTTP request bodies
with content types other than application/x-www-form-urlencoded.
Arguments:
request: DjangoWebobRequest.
Raises:
LTIError if request is incorrect.
"""
client_key, client_secret = self.get_client_key_secret()
headers = {
'Authorization': unicode(request.headers.get('Authorization')),
'Content-Type': content_type,
}
sha1 = hashlib.sha1()
sha1.update(request.body)
oauth_body_hash = base64.b64encode(sha1.digest())
oauth_params = signature.collect_parameters(headers=headers, exclude_oauth_signature=False)
oauth_headers = dict(oauth_params)
oauth_signature = oauth_headers.pop('oauth_signature')
mock_request_lti_1 = mock.Mock(
uri=unicode(urllib.unquote(self.get_outcome_service_url())),
http_method=unicode(request.method),
params=oauth_headers.items(),
signature=oauth_signature
)
mock_request_lti_2 = mock.Mock(
uri=unicode(urllib.unquote(request.url)),
http_method=unicode(request.method),
params=oauth_headers.items(),
signature=oauth_signature
)
if oauth_body_hash != oauth_headers.get('oauth_body_hash'):
log.error(
"OAuth body hash verification failed, provided: {}, "
"calculated: {}, for url: {}, body is: {}".format(
oauth_headers.get('oauth_body_hash'),
oauth_body_hash,
self.get_outcome_service_url(),
request.body
)
)
raise LTIError("OAuth body hash verification is failed.")
if (not signature.verify_hmac_sha1(mock_request_lti_1, client_secret) and not
signature.verify_hmac_sha1(mock_request_lti_2, client_secret)):
log.error("OAuth signature verification failed, for "
"headers:{} url:{} method:{}".format(
oauth_headers,
self.get_outcome_service_url(),
unicode(request.method)
))
raise LTIError("OAuth signature verification has failed.")
def get_client_key_secret(self):
"""
Obtains client_key and client_secret credentials from current course.
"""
course = self.get_course()
for lti_passport in course.lti_passports:
try:
lti_id, key, secret = [i.strip() for i in lti_passport.split(':')]
except ValueError:
_ = self.runtime.service(self, "i18n").ugettext
msg = _('Could not parse LTI passport: {lti_passport}. Should be "id:key:secret" string.').format(
lti_passport='{0!r}'.format(lti_passport)
)
raise LTIError(msg)
if lti_id == self.lti_id.strip():
return key, secret
return '', ''
def is_past_due(self):
"""
Is it now past this problem's due date, including grace period?
"""
due_date = self.due # pylint: disable=no-member
if self.graceperiod is not None and due_date: # pylint: disable=no-member
close_date = due_date + self.graceperiod # pylint: disable=no-member
else:
close_date = due_date
return close_date is not None and datetime.datetime.now(UTC()) > close_date
class LTIDescriptor(LTIFields, MetadataOnlyEditingDescriptor, EmptyDataRawDescriptor):
"""
Descriptor for LTI Xmodule.
"""
module_class = LTIModule
grade_handler = module_attr('grade_handler')
preview_handler = module_attr('preview_handler')
lti_2_0_result_rest_handler = module_attr('lti_2_0_result_rest_handler')
clear_user_module_score = module_attr('clear_user_module_score')
get_outcome_service_url = module_attr('get_outcome_service_url')
| mtlchun/edx | common/lib/xmodule/xmodule/lti_module.py | Python | agpl-3.0 | 37,531 | 0.002771 |
# -*- coding: utf-8 -*-
"""Utilities for performing merges."""
from . import api
def prepare(data):
"""Restructure/prepare data about merges for output."""
sha = data.get("sha")
commit = data.get("commit")
message = commit.get("message")
tree = commit.get("tree")
tree_sha = tree.get("sha")
return {"message": message, "sha": sha, "tree": {"sha": tree_sha}}
def merge(profile, head, base, commit_message=None):
"""Merge the head of a branch into the base branch.
Args:
profile
A profile generated from ``simplygithub.authentication.profile``.
Such profiles tell this module (i) the ``repo`` to connect to,
and (ii) the ``token`` to connect with.
head
The head to merge. It can be a SHA, or a branch name.
base
The name of the branch to merge the specified head into.
commit_message
The message to give for the commit.
Returns:
A dict with data about the merge.
"""
if not commit_message:
commit_message = "Merged " + head + " into " + base + "."
payload = {
"base": base,
"head": head,
"commit_message": commit_message,
}
response = api.post_merge_request(profile, payload)
data = None
if response.status_code == 201:
json_data = response.json()
data = prepare(json_data)
return data
| jtpaasch/simplygithub | simplygithub/internals/merges.py | Python | mit | 1,433 | 0 |
#!/usr/bin/env python
## \file geometry.py
# \brief python package for running geometry analyses
# \author T. Lukaczyk, F. Palacios
# \version 5.0.0 "Raven"
#
# SU2 Original Developers: Dr. Francisco D. Palacios.
# Dr. Thomas D. Economon.
#
# SU2 Developers: Prof. Juan J. Alonso's group at Stanford University.
# Prof. Piero Colonna's group at Delft University of Technology.
# Prof. Nicolas R. Gauger's group at Kaiserslautern University of Technology.
# Prof. Alberto Guardone's group at Polytechnic University of Milan.
# Prof. Rafael Palacios' group at Imperial College London.
# Prof. Edwin van der Weide's group at the University of Twente.
# Prof. Vincent Terrapon's group at the University of Liege.
#
# Copyright (C) 2012-2017 SU2, the open-source CFD code.
#
# SU2 is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# SU2 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with SU2. If not, see <http://www.gnu.org/licenses/>.
# ----------------------------------------------------------------------
# Imports
# ----------------------------------------------------------------------
import os, sys, shutil, copy
from .. import io as su2io
from interface import GEO as SU2_GEO
from ..util import ordered_bunch
# ----------------------------------------------------------------------
# Direct Simulation
# ----------------------------------------------------------------------
def geometry ( config , step = 1e-3 ):
""" info = SU2.run.geometry(config)
Runs an geometry analysis with:
SU2.run.decomp()
SU2.run.GEO()
Assumptions:
Performs both function and gradient analysis
Inputs:
config - an SU2 configuration
step - gradient finite difference step if config.GEO_MODE=GRADIENT
Outputs:
info - SU2 State with keys:
FUNCTIONS
GRADIENTS
Updates:
Executes in:
./
"""
# local copy
konfig = copy.deepcopy(config)
# unpack
function_name = konfig['GEO_PARAM']
func_filename = konfig['VALUE_OBJFUNC_FILENAME']
grad_filename = konfig['GRAD_OBJFUNC_FILENAME']
# choose dv values
Definition_DV = konfig['DEFINITION_DV']
n_DV = len(Definition_DV['KIND'])
if isinstance(step,list):
assert len(step) == n_DV , 'unexpected step vector length'
else:
step = [step]*n_DV
dv_old = [0.0]*n_DV # SU2_DOT input requirement, assumes linear superposition of design variables
dv_new = step
konfig.unpack_dvs(dv_new,dv_old)
# Run Solution
SU2_GEO(konfig)
# info out
info = su2io.State()
# get function values
if konfig.GEO_MODE == 'FUNCTION':
functions = su2io.tools.read_plot(func_filename)
for key,value in functions.items():
functions[key] = value[0]
info.FUNCTIONS.update( functions )
# get gradient_values
if konfig.GEO_MODE == 'GRADIENT':
gradients = su2io.tools.read_plot(grad_filename)
info.GRADIENTS.update( gradients )
return info
| pawhewitt/Dev | SU2_PY/SU2/run/geometry.py | Python | lgpl-2.1 | 3,794 | 0.011861 |
def run_all():
import nose2
nose2.discover(module='pygraphml')
__all__ = [run_all]
| hadim/pygraphml | pygraphml/tests/__init__.py | Python | bsd-3-clause | 93 | 0 |
import re, sys, time, splunk.Intersplunk
import urllib, zlib, base64
import logging, logging.handlers
try:
import xml.etree.cElementTree as xml
except ImportError:
import xml.etree.ElementTree as xml
def setup_logger(LOGGER_NAME,LOGFILE_NAME):
logger = logging.getLogger(LOGGER_NAME)
file_handler = logging.handlers.RotatingFileHandler(LOGFILE_NAME)
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
logger.setLevel(logging.ERROR)
return(logger)
def set_logger_level(LOGGER_LEVEL='NOTSET'):
logger.info('set_logger_level(' + LOGGER_LEVEL + ') called...')
if LOGGER_LEVEL == 'NOTSET':
logger.setLevel(logging.NOTSET)
elif LOGGER_LEVEL == 'DEBUG':
logger.setLevel(logging.DEBUG)
elif LOGGER_LEVEL == 'INFO':
logger.setLevel(logging.INFO)
elif LOGGER_LEVEL == 'WARNING':
logger.setLevel(logging.WARNING)
elif LOGGER_LEVEL == 'ERROR':
logger.setLevel(logging.ERROR)
elif LOGGER_LEVEL == 'CRITICAL':
logger.setLevel(logging.CRITICAL)
return(None)
def uri_unescape(string):
# Parameters
# string : URI escaped string
# Return
# URI unescaped string
logger.debug('uri_unescape() called...')
uri_unescaped_string = None
try:
uri_unescaped_string = urllib.unquote(string) # urldecode Base64 encoded SAML AuthnRequest
except:
return(string)
return(uri_unescaped_string)
def base64_decode(string):
# Parameters
# string : Base64 encoded string
# Return
# decoded/plain text string
logger.debug('base64_decode() called...')
base64_decoded_string = None
try:
base64_decoded_string = base64.b64decode(string) # decode Base64 encoded XML document
except:
return(string)
return(base64_decoded_string)
def zlib_decompress(string):
# Parameters
# string : zlib compressed string
# Return
# inflated/uncompressed string
zlib_decompressed_string = None
try:
zlib_decompressed_string = zlib.decompress(string, -15) # uncompress XML document
except:
return(string)
return(zlib_decompressed_string)
def xml2dict(xmlstring, prepend_string=None, remove_namespace=True):
logger.debug('xml2dict() called...')
# Parameters
# xmlstring : XML document
# prepend_string : String to add to the beginning of each key
# remove_namespace : If set to True (default), the XML namespace is removed from key names
# Return
# xmlkv : dict of XML element names and values. XML tags and attribute names are concatenated to form the returned key
# TODO: dict keys should indicate the complete XML hierarchy.
# Example: <Root><Element1><Element2 Attribute="stuff" /></Element1></Root> = xmlkv['Root_Element1_Element2_Attribute']
xmlkv = {}
try:
root = xml.fromstring(xmlstring)
tree = xml.ElementTree(root)
except:
logger.warning('Error parsing XML:' + xmlstring)
return(None)
root_tag = repr(root).split('}',1)[1].split('\'',1)[0].replace('\n','').replace('\r','') # strip XML namespace and remove newline characters
if prepend_string is not None:
root_tag = prepend_string + root_tag
for element in tree.iter():
if remove_namespace == True:
if '}' in element.tag:
element.tag = element.tag.split('}',1)[1].replace('\n','').replace('\r','') # strip XML namespaces and remove newline characters
try:
if element.text:
key = root_tag + '_' + element.tag
val = element.text = element.text.replace('\n','').replace('\r','') # remove newline characters
if val.strip():
xmlkv[key] = val
elif element.attrib is not None:
for attribute in element.attrib:
if attribute is not None:
key = root_tag + '_' + element.tag + '_' + attribute.replace('\n','').replace('\r','') # remove newline characters
key = key.replace('__','_') # replace 2 consecutive underscores with a single underscore (this only happens with the tag or attribute name begins with an underscore)
val = element.attrib.get(attribute).replace('\n','').replace('\r','') # remove newline characters
if val.strip():
xmlkv[key] = val
except:
logger.warning(root_tag + '_' + element.tag, element.text)
continue
return(xmlkv)
def dosaml(results,settings):
# Parameters
# string : SAML message
# type : type of SAML message (AuthnRequest, Response, AttributeQuery, etc...) If type is not provided we will try to detect it
# Return
# dict containing SAML message key/value pairs
try:
fields, argvals = splunk.Intersplunk.getKeywordsAndOptions()
for _result in results:
for _field in fields:
if _field in _result:
saml_message = _result[_field]
saml_message = uri_unescape(saml_message)
saml_message = base64_decode(saml_message)
saml_message = zlib_decompress(saml_message)
saml_message_dict = xml2dict(saml_message,'SAML')
if saml_message_dict is not None:
logger.debug(repr(saml_message_dict))
_result.update(saml_message_dict) # create new fields with SAML attributes
#append extracted_saml_fields to results
splunk.Intersplunk.outputResults(results)
except:
import traceback
stack = traceback.format_exc()
results = splunk.Intersplunk.generateErrorResults("Error : Traceback: " + str(stack))
logger.error("Error : " + str(stack))
logger = setup_logger('SplunkSAML','/opt/splunk/var/log/splunk/saml_utils.log')
#set_logger_level('DEBUG')
results, dummyresults, settings = splunk.Intersplunk.getOrganizedResults()
results = dosaml(results, settings)
| bcarroll/splunk_samltools | bin/splunksaml.py | Python | apache-2.0 | 6,313 | 0.011722 |
#!/usr/bin/python3
#
# Copyright (C) 2012 Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Script for testing ganeti.rapi.testutils"""
import unittest
from ganeti import compat
from ganeti import constants
from ganeti import errors
from ganeti import opcodes
from ganeti import luxi
from ganeti import rapi
from ganeti import utils
import ganeti.rapi.testutils
import ganeti.rapi.client
import testutils
KNOWN_UNUSED_LUXI = compat.UniqueFrozenset([
luxi.REQ_SUBMIT_MANY_JOBS,
luxi.REQ_SUBMIT_JOB_TO_DRAINED_QUEUE,
luxi.REQ_ARCHIVE_JOB,
luxi.REQ_AUTO_ARCHIVE_JOBS,
luxi.REQ_CHANGE_JOB_PRIORITY,
luxi.REQ_PICKUP_JOB,
luxi.REQ_QUERY_EXPORTS,
luxi.REQ_QUERY_CONFIG_VALUES,
luxi.REQ_QUERY_NETWORKS,
luxi.REQ_QUERY_TAGS,
luxi.REQ_SET_DRAIN_FLAG,
luxi.REQ_SET_WATCHER_PAUSE,
])
# Global variable for storing used LUXI calls
_used_luxi_calls = None
class TestHideInternalErrors(unittest.TestCase):
def test(self):
def inner():
raise errors.GenericError("error")
fn = rapi.testutils._HideInternalErrors(inner)
self.assertRaises(rapi.testutils.VerificationError, fn)
class TestVerifyOpInput(unittest.TestCase):
def testUnknownOpId(self):
voi = rapi.testutils.VerifyOpInput
self.assertRaises(rapi.testutils.VerificationError, voi, "UNK_OP_ID", None)
def testUnknownParameter(self):
voi = rapi.testutils.VerifyOpInput
self.assertRaises(rapi.testutils.VerificationError, voi,
opcodes.OpClusterRename.OP_ID, {
"unk": "unk",
})
def testWrongParameterValue(self):
voi = rapi.testutils.VerifyOpInput
self.assertRaises(rapi.testutils.VerificationError, voi,
opcodes.OpClusterRename.OP_ID, {
"name": object(),
})
def testSuccess(self):
voi = rapi.testutils.VerifyOpInput
voi(opcodes.OpClusterRename.OP_ID, {
"name": "new-name.example.com",
})
class TestVerifyOpResult(unittest.TestCase):
def testSuccess(self):
vor = rapi.testutils.VerifyOpResult
vor(opcodes.OpClusterVerify.OP_ID, {
constants.JOB_IDS_KEY: [
(False, "error message"),
],
})
def testWrongResult(self):
vor = rapi.testutils.VerifyOpResult
self.assertRaises(rapi.testutils.VerificationError, vor,
opcodes.OpClusterVerify.OP_ID, [])
def testNoResultCheck(self):
vor = rapi.testutils.VerifyOpResult
vor(opcodes.OpTestDummy.OP_ID, None)
class TestInputTestClient(unittest.TestCase):
def setUp(self):
self.cl = rapi.testutils.InputTestClient()
def tearDown(self):
_used_luxi_calls.update(self.cl._GetLuxiCalls())
def testGetInfo(self):
self.assertTrue(self.cl.GetInfo() is NotImplemented)
def testPrepareExport(self):
result = self.cl.PrepareExport("inst1.example.com",
constants.EXPORT_MODE_LOCAL)
self.assertTrue(result is NotImplemented)
self.assertRaises(rapi.testutils.VerificationError, self.cl.PrepareExport,
"inst1.example.com", "###invalid###")
def testGetJobs(self):
self.assertTrue(self.cl.GetJobs() is NotImplemented)
def testQuery(self):
result = self.cl.Query(constants.QR_NODE, ["name"])
self.assertTrue(result is NotImplemented)
def testQueryFields(self):
result = self.cl.QueryFields(constants.QR_INSTANCE)
self.assertTrue(result is NotImplemented)
def testCancelJob(self):
self.assertTrue(self.cl.CancelJob("1") is NotImplemented)
def testGetNodes(self):
self.assertTrue(self.cl.GetNodes() is NotImplemented)
def testGetInstances(self):
self.assertTrue(self.cl.GetInstances() is NotImplemented)
def testGetGroups(self):
self.assertTrue(self.cl.GetGroups() is NotImplemented)
def testWaitForJobChange(self):
result = self.cl.WaitForJobChange("1", ["id"], None, None)
self.assertTrue(result is NotImplemented)
def testGetFilters(self):
self.assertTrue(self.cl.GetFilters() is NotImplemented)
def testGetFilter(self):
result = self.cl.GetFilter("4364c043-f232-41e3-837f-f1ce846f21d2")
self.assertTrue(result is NotImplemented)
def testReplaceFilter(self):
self.assertTrue(self.cl.ReplaceFilter(
uuid="c6a70f02-facb-4e37-b344-54f146dd0396",
priority=1,
predicates=[["jobid", [">", "id", "watermark"]]],
action="CONTINUE",
reason_trail=["testReplaceFilter", "myreason", utils.EpochNano()],
) is NotImplemented)
def testAddFilter(self):
self.assertTrue(self.cl.AddFilter(
priority=1,
predicates=[["jobid", [">", "id", "watermark"]]],
action="CONTINUE",
reason_trail=["testAddFilter", "myreason", utils.EpochNano()],
) is NotImplemented)
def testDeleteFilter(self):
self.assertTrue(self.cl.DeleteFilter(
uuid="c6a70f02-facb-4e37-b344-54f146dd0396",
) is NotImplemented)
class CustomTestRunner(unittest.TextTestRunner):
def run(self, *args):
global _used_luxi_calls
assert _used_luxi_calls is None
diff = (KNOWN_UNUSED_LUXI - luxi.REQ_ALL)
assert not diff, "Non-existing LUXI calls listed as unused: %s" % diff
_used_luxi_calls = set()
try:
# Run actual tests
result = unittest.TextTestRunner.run(self, *args)
diff = _used_luxi_calls & KNOWN_UNUSED_LUXI
if diff:
raise AssertionError("LUXI methods marked as unused were called: %s" %
utils.CommaJoin(diff))
diff = (luxi.REQ_ALL - KNOWN_UNUSED_LUXI - _used_luxi_calls)
if diff:
raise AssertionError("The following LUXI methods were not used: %s" %
utils.CommaJoin(diff))
finally:
# Reset global variable
_used_luxi_calls = None
return result
if __name__ == "__main__":
testutils.GanetiTestProgram(testRunner=CustomTestRunner)
| ganeti/ganeti | test/py/ganeti.rapi.testutils_unittest.py | Python | bsd-2-clause | 7,060 | 0.005949 |
a, b = list(map(int, input().split()))
array = []
for i in range(a):
array.append([])
for j in range(b):
if (i+j) % 2:
array[i].append('*')
else:
array[i].append('.')
#for i in range(n):
# for j in range(n):
# if i == j or i == n//2 or j == n//2 or i == n-j-1:
# array[i][j] = "*"
for i in range(a):
for j in range(b):
print(array[i][j], end = " ")
print()
| lesina/labs2016 | contests_1sem/6-7/E.py | Python | gpl-3.0 | 443 | 0.006772 |
from __future__ import print_function, unicode_literals
from datetime import timedelta
import logging
import os
from django.conf import settings
from django.contrib.auth.models import AnonymousUser, User
from django.core.exceptions import ValidationError
from django.core.files.uploadedfile import SimpleUploadedFile
from django.template import Context, Template
from django.utils import six
from djblets.siteconfig.models import SiteConfiguration
from djblets.testing.decorators import add_fixtures
from kgb import SpyAgency
from reviewboard.accounts.models import Profile, LocalSiteProfile
from reviewboard.attachments.models import FileAttachment
from reviewboard.reviews.forms import DefaultReviewerForm, GroupForm
from reviewboard.reviews.markdown_utils import (markdown_escape,
markdown_unescape)
from reviewboard.reviews.models import (Comment,
DefaultReviewer,
Group,
ReviewRequest,
ReviewRequestDraft,
Review,
Screenshot)
from reviewboard.scmtools.core import Commit
from reviewboard.scmtools.models import Repository, Tool
from reviewboard.site.models import LocalSite
from reviewboard.site.urlresolvers import local_site_reverse
from reviewboard.testing import TestCase
class ReviewRequestManagerTests(TestCase):
"""Tests ReviewRequestManager functions."""
fixtures = ['test_users']
@add_fixtures(['test_scmtools'])
def test_create_with_site(self):
"""Testing ReviewRequest.objects.create with LocalSite"""
user = User.objects.get(username='doc')
local_site = LocalSite.objects.create(name='test')
repository = self.create_repository()
review_request = ReviewRequest.objects.create(
user, repository, local_site=local_site)
self.assertEqual(review_request.repository, repository)
self.assertEqual(review_request.local_site, local_site)
self.assertEqual(review_request.local_id, 1)
@add_fixtures(['test_scmtools'])
def test_create_with_site_and_commit_id(self):
"""Testing ReviewRequest.objects.create with LocalSite and commit ID"""
user = User.objects.get(username='doc')
local_site = LocalSite.objects.create(name='test')
repository = self.create_repository()
review_request = ReviewRequest.objects.create(
user, repository,
commit_id='123',
local_site=local_site)
self.assertEqual(review_request.repository, repository)
self.assertEqual(review_request.commit_id, '123')
self.assertEqual(review_request.local_site, local_site)
self.assertEqual(review_request.local_id, 1)
@add_fixtures(['test_scmtools'])
def test_create_with_site_and_commit_id_not_unique(self):
"""Testing ReviewRequest.objects.create with LocalSite and
commit ID that is not unique
"""
user = User.objects.get(username='doc')
local_site = LocalSite.objects.create(name='test')
repository = self.create_repository()
# This one should be fine.
ReviewRequest.objects.create(user, repository, commit_id='123',
local_site=local_site)
self.assertEqual(local_site.review_requests.count(), 1)
# This one will yell.
self.assertRaises(
ValidationError,
lambda: ReviewRequest.objects.create(
user, repository,
commit_id='123',
local_site=local_site))
# Make sure that entry doesn't exist in the database.
self.assertEqual(local_site.review_requests.count(), 1)
@add_fixtures(['test_scmtools'])
def test_create_with_site_and_commit_id_and_fetch_problem(self):
"""Testing ReviewRequest.objects.create with LocalSite and
commit ID with problem fetching commit details
"""
user = User.objects.get(username='doc')
local_site = LocalSite.objects.create(name='test')
repository = self.create_repository()
self.assertEqual(local_site.review_requests.count(), 0)
ReviewRequest.objects.create(
user, repository,
commit_id='123',
local_site=local_site,
create_from_commit_id=True)
# Make sure that entry doesn't exist in the database.
self.assertEqual(local_site.review_requests.count(), 1)
review_request = local_site.review_requests.get()
self.assertEqual(review_request.local_id, 1)
self.assertEqual(review_request.commit_id, '123')
def test_public(self):
"""Testing ReviewRequest.objects.public"""
user1 = User.objects.get(username='doc')
user2 = User.objects.get(username='grumpy')
self.create_review_request(summary='Test 1',
publish=True,
submitter=user1)
self.create_review_request(summary='Test 2',
submitter=user2)
self.create_review_request(summary='Test 3',
status='S',
public=True,
submitter=user1)
self.create_review_request(summary='Test 4',
status='S',
public=True,
submitter=user2)
self.create_review_request(summary='Test 5',
status='D',
public=True,
submitter=user1)
self.create_review_request(summary='Test 6',
status='D',
submitter=user2)
self.assertValidSummaries(
ReviewRequest.objects.public(user=user1),
[
'Test 1',
])
self.assertValidSummaries(
ReviewRequest.objects.public(status=None),
[
'Test 5',
'Test 4',
'Test 3',
'Test 1',
])
self.assertValidSummaries(
ReviewRequest.objects.public(user=user2, status=None),
[
'Test 6',
'Test 5',
'Test 4',
'Test 3',
'Test 2',
'Test 1'
])
@add_fixtures(['test_scmtools'])
def test_public_without_private_repo_access(self):
"""Testing ReviewRequest.objects.public without access to private
repositories
"""
user = User.objects.get(username='grumpy')
repository = self.create_repository(public=False)
review_request = self.create_review_request(repository=repository,
publish=True)
self.assertFalse(review_request.is_accessible_by(user))
review_requests = ReviewRequest.objects.public(user=user)
self.assertEqual(review_requests.count(), 0)
@add_fixtures(['test_scmtools'])
def test_public_with_private_repo_access(self):
"""Testing ReviewRequest.objects.public with access to private
repositories
"""
user = User.objects.get(username='grumpy')
repository = self.create_repository(public=False)
repository.users.add(user)
review_request = self.create_review_request(repository=repository,
publish=True)
self.assertTrue(review_request.is_accessible_by(user))
review_requests = ReviewRequest.objects.public(user=user)
self.assertEqual(review_requests.count(), 1)
@add_fixtures(['test_scmtools'])
def test_public_with_private_repo_access_through_group(self):
"""Testing ReviewRequest.objects.public with access to private
repositories
"""
user = User.objects.get(username='grumpy')
group = self.create_review_group(invite_only=True)
group.users.add(user)
repository = self.create_repository(public=False)
repository.review_groups.add(group)
review_request = self.create_review_request(repository=repository,
publish=True)
self.assertTrue(review_request.is_accessible_by(user))
review_requests = ReviewRequest.objects.public(user=user)
self.assertEqual(review_requests.count(), 1)
def test_public_without_private_group_access(self):
"""Testing ReviewRequest.objects.public without access to private
group
"""
user = User.objects.get(username='grumpy')
group = self.create_review_group(invite_only=True)
review_request = self.create_review_request(publish=True)
review_request.target_groups.add(group)
self.assertFalse(review_request.is_accessible_by(user))
review_requests = ReviewRequest.objects.public(user=user)
self.assertEqual(review_requests.count(), 0)
def test_public_with_private_group_access(self):
"""Testing ReviewRequest.objects.public with access to private
group
"""
user = User.objects.get(username='grumpy')
group = self.create_review_group(invite_only=True)
group.users.add(user)
review_request = self.create_review_request(publish=True)
review_request.target_groups.add(group)
self.assertTrue(review_request.is_accessible_by(user))
review_requests = ReviewRequest.objects.public(user=user)
self.assertEqual(review_requests.count(), 1)
@add_fixtures(['test_scmtools'])
def test_public_with_private_repo_and_public_group(self):
"""Testing ReviewRequest.objects.public without access to private
repositories and with access to private group
"""
user = User.objects.get(username='grumpy')
group = self.create_review_group()
repository = self.create_repository(public=False)
review_request = self.create_review_request(repository=repository,
publish=True)
review_request.target_groups.add(group)
self.assertFalse(review_request.is_accessible_by(user))
review_requests = ReviewRequest.objects.public(user=user)
self.assertEqual(review_requests.count(), 0)
@add_fixtures(['test_scmtools'])
def test_public_with_private_group_and_public_repo(self):
"""Testing ReviewRequest.objects.public with access to private
group and without access to private group
"""
user = User.objects.get(username='grumpy')
group = self.create_review_group(invite_only=True)
repository = self.create_repository(public=False)
repository.users.add(user)
review_request = self.create_review_request(repository=repository,
publish=True)
review_request.target_groups.add(group)
self.assertFalse(review_request.is_accessible_by(user))
review_requests = ReviewRequest.objects.public(user=user)
self.assertEqual(review_requests.count(), 0)
@add_fixtures(['test_scmtools'])
def test_public_with_private_repo_and_owner(self):
"""Testing ReviewRequest.objects.public without access to private
repository and as the submitter
"""
user = User.objects.get(username='grumpy')
repository = self.create_repository(public=False)
review_request = self.create_review_request(repository=repository,
submitter=user,
publish=True)
self.assertTrue(review_request.is_accessible_by(user))
review_requests = ReviewRequest.objects.public(user=user)
self.assertEqual(review_requests.count(), 1)
def test_public_with_private_group_and_owner(self):
"""Testing ReviewRequest.objects.public without access to private
group and as the submitter
"""
user = User.objects.get(username='grumpy')
group = self.create_review_group(invite_only=True)
review_request = self.create_review_request(submitter=user,
publish=True)
review_request.target_groups.add(group)
self.assertTrue(review_request.is_accessible_by(user))
review_requests = ReviewRequest.objects.public(user=user)
self.assertEqual(review_requests.count(), 1)
@add_fixtures(['test_scmtools'])
def test_public_with_private_repo_and_target_people(self):
"""Testing ReviewRequest.objects.public without access to private
repository and user in target_people
"""
user = User.objects.get(username='grumpy')
repository = self.create_repository(public=False)
review_request = self.create_review_request(repository=repository,
publish=True)
review_request.target_people.add(user)
self.assertFalse(review_request.is_accessible_by(user))
review_requests = ReviewRequest.objects.public(user=user)
self.assertEqual(review_requests.count(), 0)
def test_public_with_private_group_and_target_people(self):
"""Testing ReviewRequest.objects.public without access to private
group and user in target_people
"""
user = User.objects.get(username='grumpy')
group = self.create_review_group(invite_only=True)
review_request = self.create_review_request(publish=True)
review_request.target_groups.add(group)
review_request.target_people.add(user)
self.assertTrue(review_request.is_accessible_by(user))
review_requests = ReviewRequest.objects.public(user=user)
self.assertEqual(review_requests.count(), 1)
def test_to_group(self):
"""Testing ReviewRequest.objects.to_group"""
user1 = User.objects.get(username='doc')
group1 = self.create_review_group(name='privgroup')
group1.users.add(user1)
review_request = self.create_review_request(summary='Test 1',
public=True,
submitter=user1)
review_request.target_groups.add(group1)
review_request = self.create_review_request(summary='Test 2',
public=False,
submitter=user1)
review_request.target_groups.add(group1)
review_request = self.create_review_request(summary='Test 3',
public=True,
status='S',
submitter=user1)
review_request.target_groups.add(group1)
self.assertValidSummaries(
ReviewRequest.objects.to_group("privgroup", None),
[
'Test 1',
])
self.assertValidSummaries(
ReviewRequest.objects.to_group("privgroup", None, status=None),
[
'Test 3',
'Test 1',
])
def test_to_user_group(self):
"""Testing ReviewRequest.objects.to_user_groups"""
user1 = User.objects.get(username='doc')
user2 = User.objects.get(username='grumpy')
group1 = self.create_review_group(name='group1')
group1.users.add(user1)
group2 = self.create_review_group(name='group2')
group2.users.add(user2)
review_request = self.create_review_request(summary='Test 1',
public=True,
submitter=user1)
review_request.target_groups.add(group1)
review_request = self.create_review_request(summary='Test 2',
submitter=user2,
public=True,
status='S')
review_request.target_groups.add(group1)
review_request = self.create_review_request(summary='Test 3',
public=True,
submitter=user2)
review_request.target_groups.add(group1)
review_request.target_groups.add(group2)
self.assertValidSummaries(
ReviewRequest.objects.to_user_groups("doc", local_site=None),
[
'Test 3',
'Test 1',
])
self.assertValidSummaries(
ReviewRequest.objects.to_user_groups(
"doc", status=None, local_site=None),
[
'Test 3',
'Test 2',
'Test 1',
])
self.assertValidSummaries(
ReviewRequest.objects.to_user_groups(
"grumpy", user=user2, local_site=None),
[
'Test 3',
])
def test_to_user_directly(self):
"""Testing ReviewRequest.objects.to_user_directly"""
user1 = User.objects.get(username='doc')
user2 = User.objects.get(username='grumpy')
group1 = self.create_review_group(name='group1')
group1.users.add(user1)
group2 = self.create_review_group(name='group2')
group2.users.add(user2)
review_request = self.create_review_request(summary='Test 1',
public=True,
submitter=user1)
review_request.target_groups.add(group1)
review_request.target_people.add(user2)
review_request = self.create_review_request(summary='Test 2',
submitter=user2,
status='S')
review_request.target_groups.add(group1)
review_request.target_people.add(user2)
review_request.target_people.add(user1)
review_request = self.create_review_request(summary='Test 3',
public=True,
submitter=user2)
review_request.target_groups.add(group1)
review_request.target_groups.add(group2)
review_request.target_people.add(user1)
review_request = self.create_review_request(summary='Test 4',
public=True,
status='S',
submitter=user2)
review_request.target_people.add(user1)
self.assertValidSummaries(
ReviewRequest.objects.to_user_directly("doc", local_site=None),
[
'Test 3',
])
self.assertValidSummaries(
ReviewRequest.objects.to_user_directly("doc", status=None),
[
'Test 4',
'Test 3',
])
self.assertValidSummaries(
ReviewRequest.objects.to_user_directly(
"doc", user2, status=None, local_site=None),
[
'Test 4',
'Test 3',
'Test 2',
])
def test_from_user(self):
"""Testing ReviewRequest.objects.from_user"""
user1 = User.objects.get(username='doc')
self.create_review_request(summary='Test 1',
public=True,
submitter=user1)
self.create_review_request(summary='Test 2',
public=False,
submitter=user1)
self.create_review_request(summary='Test 3',
public=True,
status='S',
submitter=user1)
self.assertValidSummaries(
ReviewRequest.objects.from_user("doc", local_site=None),
[
'Test 1',
])
self.assertValidSummaries(
ReviewRequest.objects.from_user("doc", status=None,
local_site=None),
[
'Test 3',
'Test 1',
])
self.assertValidSummaries(
ReviewRequest.objects.from_user(
"doc", user=user1, status=None, local_site=None),
[
'Test 3',
'Test 2',
'Test 1',
])
def to_user(self):
"""Testing ReviewRequest.objects.to_user"""
user1 = User.objects.get(username='doc')
user2 = User.objects.get(username='grumpy')
group1 = self.create_review_group(name='group1')
group1.users.add(user1)
group2 = self.create_review_group(name='group2')
group2.users.add(user2)
review_request = self.create_review_request(summary='Test 1',
publish=True,
submitter=user1)
review_request.target_groups.add(group1)
review_request = self.create_review_request(summary='Test 2',
submitter=user2,
status='S')
review_request.target_groups.add(group1)
review_request.target_people.add(user2)
review_request.target_people.add(user1)
review_request = self.create_review_request(summary='Test 3',
publish=True,
submitter=user2)
review_request.target_groups.add(group1)
review_request.target_groups.add(group2)
review_request.target_people.add(user1)
review_request = self.create_review_request(summary='Test 4',
publish=True,
status='S',
submitter=user2)
review_request.target_groups.add(group1)
review_request.target_groups.add(group2)
review_request.target_people.add(user1)
self.assertValidSummaries(
ReviewRequest.objects.to_user("doc", local_site=None),
[
'Test 3',
'Test 1',
])
self.assertValidSummaries(
ReviewRequest.objects.to_user("doc", status=None, local_site=None),
[
'Test 4',
'Test 3',
'Test 1',
])
self.assertValidSummaries(
ReviewRequest.objects.to_user(
"doc", user=user2, status=None, local_site=None),
[
'Test 4',
'Test 3',
'Test 2',
'Test 1',
])
def assertValidSummaries(self, review_requests, summaries):
r_summaries = [r.summary for r in review_requests]
for summary in r_summaries:
self.assertIn(summary, summaries,
'summary "%s" not found in summary list'
% summary)
for summary in summaries:
self.assertIn(summary, r_summaries,
'summary "%s" not found in review request list'
% summary)
class ReviewRequestTests(TestCase):
"""Tests for ReviewRequest."""
fixtures = ['test_users']
def test_public_with_discard_reopen_submitted(self):
"""Testing ReviewRequest.public when discarded, reopened, submitted."""
review_request = self.create_review_request(publish=True)
self.assertTrue(review_request.public)
review_request.close(ReviewRequest.DISCARDED)
self.assertTrue(review_request.public)
review_request.reopen()
self.assertFalse(review_request.public)
review_request.close(ReviewRequest.SUBMITTED)
self.assertTrue(review_request.public)
def test_unicode_summary_and_str(self):
"""Testing ReviewRequest.__str__ with unicode summaries."""
review_request = self.create_review_request(
summary='\u203e\u203e', publish=True)
self.assertEqual(six.text_type(review_request), '\u203e\u203e')
class ViewTests(TestCase):
"""Tests for views in reviewboard.reviews.views"""
fixtures = ['test_users', 'test_scmtools', 'test_site']
def setUp(self):
super(ViewTests, self).setUp()
self.siteconfig = SiteConfiguration.objects.get_current()
self.siteconfig.set("auth_require_sitewide_login", False)
self.siteconfig.save()
def _get_context_var(self, response, varname):
for context in response.context:
if varname in context:
return context[varname]
return None
def test_review_detail_redirect_no_slash(self):
"""Testing review_detail view redirecting with no trailing slash"""
response = self.client.get('/r/1')
self.assertEqual(response.status_code, 301)
def test_review_detail(self):
"""Testing review_detail view"""
review_request = self.create_review_request(publish=True)
response = self.client.get('/r/%d/' % review_request.id)
self.assertEqual(response.status_code, 200)
request = self._get_context_var(response, 'review_request')
self.assertEqual(request.pk, review_request.pk)
def test_review_detail_context(self):
"""Testing review_detail view's context"""
# Make sure this request is made while logged in, to catch the
# login-only pieces of the review_detail view.
self.client.login(username='admin', password='admin')
username = 'admin'
summary = 'This is a test summary'
description = 'This is my description'
testing_done = 'Some testing'
review_request = self.create_review_request(
publish=True,
submitter=username,
summary=summary,
description=description,
testing_done=testing_done)
response = self.client.get('/r/%s/' % review_request.pk)
self.assertEqual(response.status_code, 200)
request = self._get_context_var(response, 'review_request')
self.assertEqual(request.submitter.username, username)
self.assertEqual(request.summary, summary)
self.assertEqual(request.description, description)
self.assertEqual(request.testing_done, testing_done)
self.assertEqual(request.pk, review_request.pk)
def test_review_detail_diff_comment_ordering(self):
"""Testing review_detail and ordering of diff comments on a review"""
comment_text_1 = "Comment text 1"
comment_text_2 = "Comment text 2"
comment_text_3 = "Comment text 3"
review_request = self.create_review_request(create_repository=True,
publish=True)
diffset = self.create_diffset(review_request)
filediff = self.create_filediff(diffset)
# Create the users who will be commenting.
user1 = User.objects.get(username='doc')
user2 = User.objects.get(username='dopey')
# Create the master review.
main_review = self.create_review(review_request, user=user1)
main_comment = self.create_diff_comment(main_review, filediff,
text=comment_text_1)
main_review.publish()
# First reply
reply1 = self.create_reply(
main_review,
user=user1,
timestamp=(main_review.timestamp + timedelta(days=1)))
self.create_diff_comment(reply1, filediff, text=comment_text_2,
reply_to=main_comment)
# Second reply
reply2 = self.create_reply(
main_review,
user=user2,
timestamp=(main_review.timestamp + timedelta(days=2)))
self.create_diff_comment(reply2, filediff, text=comment_text_3,
reply_to=main_comment)
# Publish them out of order.
reply2.publish()
reply1.publish()
# Make sure they published in the order expected.
self.assertTrue(reply1.timestamp > reply2.timestamp)
# Make sure they're looked up in the order expected.
comments = list(Comment.objects.filter(
review__review_request=review_request))
self.assertEqual(len(comments), 3)
self.assertEqual(comments[0].text, comment_text_1)
self.assertEqual(comments[1].text, comment_text_3)
self.assertEqual(comments[2].text, comment_text_2)
# Now figure out the order on the page.
response = self.client.get('/r/%d/' % review_request.pk)
self.assertEqual(response.status_code, 200)
entries = response.context['entries']
self.assertEqual(len(entries), 1)
entry = entries[0]
comments = entry['comments']['diff_comments']
self.assertEqual(len(comments), 1)
self.assertEqual(comments[0].text, comment_text_1)
replies = comments[0].public_replies()
self.assertEqual(len(replies), 2)
self.assertEqual(replies[0].text, comment_text_3)
self.assertEqual(replies[1].text, comment_text_2)
def test_review_detail_file_attachment_visibility(self):
"""Testing visibility of file attachments on review requests."""
caption_1 = 'File Attachment 1'
caption_2 = 'File Attachment 2'
caption_3 = 'File Attachment 3'
comment_text_1 = "Comment text 1"
comment_text_2 = "Comment text 2"
user1 = User.objects.get(username='doc')
review_request = ReviewRequest.objects.create(user1, None)
# Add two file attachments. One active, one inactive.
filename = os.path.join(settings.STATIC_ROOT,
'rb', 'images', 'trophy.png')
f = open(filename, 'r')
file = SimpleUploadedFile(f.name, f.read(), content_type='image/png')
f.close()
file1 = FileAttachment.objects.create(caption=caption_1,
file=file,
mimetype='image/png')
file2 = FileAttachment.objects.create(caption=caption_2,
file=file,
mimetype='image/png')
review_request.file_attachments.add(file1)
review_request.inactive_file_attachments.add(file2)
review_request.publish(user1)
# Create one on a draft with a new file attachment.
draft = ReviewRequestDraft.create(review_request)
file3 = FileAttachment.objects.create(caption=caption_3,
file=file,
mimetype='image/png')
draft.file_attachments.add(file3)
# Create the review with comments for each screenshot.
review = Review.objects.create(review_request=review_request,
user=user1)
review.file_attachment_comments.create(file_attachment=file1,
text=comment_text_1)
review.file_attachment_comments.create(file_attachment=file2,
text=comment_text_2)
review.publish()
# Check that we can find all the objects we expect on the page.
self.client.login(username='doc', password='doc')
response = self.client.get('/r/%d/' % review_request.pk)
self.assertEqual(response.status_code, 200)
file_attachments = response.context['file_attachments']
self.assertEqual(len(file_attachments), 2)
self.assertEqual(file_attachments[0].caption, caption_1)
self.assertEqual(file_attachments[1].caption, caption_3)
# Make sure that other users won't see the draft one.
self.client.logout()
response = self.client.get('/r/%d/' % review_request.pk)
self.assertEqual(response.status_code, 200)
file_attachments = response.context['file_attachments']
self.assertEqual(len(file_attachments), 1)
self.assertEqual(file_attachments[0].caption, caption_1)
# Make sure we loaded the reviews and all data correctly.
entries = response.context['entries']
self.assertEqual(len(entries), 1)
entry = entries[0]
comments = entry['comments']['file_attachment_comments']
self.assertEqual(len(comments), 2)
self.assertEqual(comments[0].text, comment_text_1)
self.assertEqual(comments[1].text, comment_text_2)
def test_review_detail_screenshot_visibility(self):
"""Testing visibility of screenshots on review requests."""
caption_1 = 'Screenshot 1'
caption_2 = 'Screenshot 2'
caption_3 = 'Screenshot 3'
comment_text_1 = "Comment text 1"
comment_text_2 = "Comment text 2"
user1 = User.objects.get(username='doc')
review_request = ReviewRequest.objects.create(user1, None)
# Add two screenshots. One active, one inactive.
screenshot1 = Screenshot.objects.create(caption=caption_1,
image='')
screenshot2 = Screenshot.objects.create(caption=caption_2,
image='')
review_request.screenshots.add(screenshot1)
review_request.inactive_screenshots.add(screenshot2)
review_request.publish(user1)
# Create one on a draft with a new screenshot.
draft = ReviewRequestDraft.create(review_request)
screenshot3 = Screenshot.objects.create(caption=caption_3,
image='')
draft.screenshots.add(screenshot3)
# Create the review with comments for each screenshot.
user1 = User.objects.get(username='doc')
review = Review.objects.create(review_request=review_request,
user=user1)
review.screenshot_comments.create(screenshot=screenshot1,
text=comment_text_1,
x=10,
y=10,
w=20,
h=20)
review.screenshot_comments.create(screenshot=screenshot2,
text=comment_text_2,
x=0,
y=0,
w=10,
h=10)
review.publish()
# Check that we can find all the objects we expect on the page.
self.client.login(username='doc', password='doc')
response = self.client.get('/r/%d/' % review_request.pk)
self.assertEqual(response.status_code, 200)
screenshots = response.context['screenshots']
self.assertEqual(len(screenshots), 2)
self.assertEqual(screenshots[0].caption, caption_1)
self.assertEqual(screenshots[1].caption, caption_3)
# Make sure that other users won't see the draft one.
self.client.logout()
response = self.client.get('/r/%d/' % review_request.pk)
self.assertEqual(response.status_code, 200)
screenshots = response.context['screenshots']
self.assertEqual(len(screenshots), 1)
self.assertEqual(screenshots[0].caption, caption_1)
entries = response.context['entries']
self.assertEqual(len(entries), 1)
entry = entries[0]
# Make sure we loaded the reviews and all data correctly.
comments = entry['comments']['screenshot_comments']
self.assertEqual(len(comments), 2)
self.assertEqual(comments[0].text, comment_text_1)
self.assertEqual(comments[1].text, comment_text_2)
def test_review_detail_sitewide_login(self):
"""Testing review_detail view with site-wide login enabled"""
self.siteconfig.set("auth_require_sitewide_login", True)
self.siteconfig.save()
self.create_review_request(publish=True)
response = self.client.get('/r/1/')
self.assertEqual(response.status_code, 302)
def test_new_review_request(self):
"""Testing new_review_request view"""
response = self.client.get('/r/new')
self.assertEqual(response.status_code, 301)
response = self.client.get('/r/new/')
self.assertEqual(response.status_code, 302)
self.client.login(username='grumpy', password='grumpy')
response = self.client.get('/r/new/')
self.assertEqual(response.status_code, 200)
# Bug 892
def test_interdiff(self):
"""Testing the diff viewer with interdiffs"""
review_request = self.create_review_request(create_repository=True,
publish=True)
diffset = self.create_diffset(review_request, revision=1)
self.create_filediff(
diffset,
source_file='/diffutils.py',
dest_file='/diffutils.py',
source_revision='6bba278',
dest_detail='465d217',
diff=(
b'diff --git a/diffutils.py b/diffutils.py\n'
b'index 6bba278..465d217 100644\n'
b'--- a/diffutils.py\n'
b'+++ b/diffutils.py\n'
b'@@ -1,3 +1,4 @@\n'
b'+# diffutils.py\n'
b' import fnmatch\n'
b' import os\n'
b' import re\n'
))
self.create_filediff(
diffset,
source_file='/readme',
dest_file='/readme',
source_revision='d6613f5',
dest_detail='5b50866',
diff=(
b'diff --git a/readme b/readme\n'
b'index d6613f5..5b50866 100644\n'
b'--- a/readme\n'
b'+++ b/readme\n'
b'@@ -1 +1,3 @@\n'
b' Hello there\n'
b'+\n'
b'+Oh hi!\n'
))
self.create_filediff(
diffset,
source_file='/newfile',
dest_file='/newfile',
source_revision='PRE-CREATION',
dest_detail='',
diff=(
b'diff --git a/new_file b/new_file\n'
b'new file mode 100644\n'
b'index 0000000..ac30bd3\n'
b'--- /dev/null\n'
b'+++ b/new_file\n'
b'@@ -0,0 +1 @@\n'
b'+This is a new file!\n'
))
diffset = self.create_diffset(review_request, revision=2)
self.create_filediff(
diffset,
source_file='/diffutils.py',
dest_file='/diffutils.py',
source_revision='6bba278',
dest_detail='465d217',
diff=(
b'diff --git a/diffutils.py b/diffutils.py\n'
b'index 6bba278..465d217 100644\n'
b'--- a/diffutils.py\n'
b'+++ b/diffutils.py\n'
b'@@ -1,3 +1,4 @@\n'
b'+# diffutils.py\n'
b' import fnmatch\n'
b' import os\n'
b' import re\n'
))
self.create_filediff(
diffset,
source_file='/readme',
dest_file='/readme',
source_revision='d6613f5',
dest_detail='5b50867',
diff=(
b'diff --git a/readme b/readme\n'
b'index d6613f5..5b50867 100644\n'
b'--- a/readme\n'
b'+++ b/readme\n'
b'@@ -1 +1,3 @@\n'
b' Hello there\n'
b'+----------\n'
b'+Oh hi!\n'
))
self.create_filediff(
diffset,
source_file='/newfile',
dest_file='/newfile',
source_revision='PRE-CREATION',
dest_detail='',
diff=(
b'diff --git a/new_file b/new_file\n'
b'new file mode 100644\n'
b'index 0000000..ac30bd4\n'
b'--- /dev/null\n'
b'+++ b/new_file\n'
b'@@ -0,0 +1 @@\n'
b'+This is a diffent version of this new file!\n'
))
response = self.client.get('/r/1/diff/1-2/')
# Useful for debugging any actual errors here.
if response.status_code != 200:
print("Error: %s" % self._get_context_var(response, 'error'))
print(self._get_context_var(response, 'trace'))
self.assertEqual(response.status_code, 200)
self.assertEqual(
self._get_context_var(response, 'diff_context')['num_diffs'],
2)
files = self._get_context_var(response, 'files')
self.assertTrue(files)
self.assertEqual(len(files), 2)
self.assertEqual(files[0]['depot_filename'], '/newfile')
self.assertIn('interfilediff', files[0])
self.assertEqual(files[1]['depot_filename'], '/readme')
self.assertIn('interfilediff', files[1])
# Bug 847
def test_interdiff_new_file(self):
"""Testing the diff viewer with interdiffs containing new files"""
review_request = self.create_review_request(create_repository=True,
publish=True)
diffset = self.create_diffset(review_request, revision=1)
self.create_filediff(
diffset,
source_file='/diffutils.py',
dest_file='/diffutils.py',
source_revision='6bba278',
dest_detail='465d217',
diff=(
b'diff --git a/diffutils.py b/diffutils.py\n'
b'index 6bba278..465d217 100644\n'
b'--- a/diffutils.py\n'
b'+++ b/diffutils.py\n'
b'@@ -1,3 +1,4 @@\n'
b'+# diffutils.py\n'
b' import fnmatch\n'
b' import os\n'
b' import re\n'
))
diffset = self.create_diffset(review_request, revision=2)
self.create_filediff(
diffset,
source_file='/diffutils.py',
dest_file='/diffutils.py',
source_revision='6bba278',
dest_detail='465d217',
diff=(
b'diff --git a/diffutils.py b/diffutils.py\n'
b'index 6bba278..465d217 100644\n'
b'--- a/diffutils.py\n'
b'+++ b/diffutils.py\n'
b'@@ -1,3 +1,4 @@\n'
b'+# diffutils.py\n'
b' import fnmatch\n'
b' import os\n'
b' import re\n'
))
self.create_filediff(
diffset,
source_file='/newfile',
dest_file='/newfile',
source_revision='PRE-CREATION',
dest_detail='',
diff=(
b'diff --git a/new_file b/new_file\n'
b'new file mode 100644\n'
b'index 0000000..ac30bd4\n'
b'--- /dev/null\n'
b'+++ b/new_file\n'
b'@@ -0,0 +1 @@\n'
b'+This is a diffent version of this new file!\n'
))
response = self.client.get('/r/1/diff/1-2/')
# Useful for debugging any actual errors here.
if response.status_code != 200:
print("Error: %s" % self._get_context_var(response, 'error'))
print(self._get_context_var(response, 'trace'))
self.assertEqual(response.status_code, 200)
self.assertEqual(
self._get_context_var(response, 'diff_context')['num_diffs'],
2)
files = self._get_context_var(response, 'files')
self.assertTrue(files)
self.assertEqual(len(files), 1)
self.assertEqual(files[0]['depot_filename'], '/newfile')
self.assertIn('interfilediff', files[0])
def test_review_request_etag_with_issues(self):
"""Testing review request ETags with issue status toggling"""
self.client.login(username='doc', password='doc')
# Some objects we need.
user = User.objects.get(username="doc")
review_request = self.create_review_request(create_repository=True,
publish=True)
diffset = self.create_diffset(review_request)
filediff = self.create_filediff(diffset)
# Create a review.
review = self.create_review(review_request, user=user)
comment = self.create_diff_comment(review, filediff,
issue_opened=True)
review.publish()
# Get the etag
response = self.client.get(review_request.get_absolute_url())
self.assertEqual(response.status_code, 200)
etag1 = response['ETag']
self.assertNotEqual(etag1, '')
# Change the issue status
comment.issue_status = Comment.RESOLVED
comment.save()
# Check the etag again
response = self.client.get(review_request.get_absolute_url())
self.assertEqual(response.status_code, 200)
etag2 = response['ETag']
self.assertNotEqual(etag2, '')
# Make sure they're not equal
self.assertNotEqual(etag1, etag2)
# Bug #3384
def test_diff_raw_content_disposition_attachment(self):
"""Testing /diff/raw/ Content-Disposition: attachment; ..."""
review_request = self.create_review_request(create_repository=True,
publish=True)
self.create_diffset(review_request=review_request)
response = self.client.get('/r/%d/diff/raw/' % review_request.pk)
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Disposition'],
'attachment; filename=diffset')
class DraftTests(TestCase):
fixtures = ['test_users', 'test_scmtools']
def test_draft_changes(self):
"""Testing recording of draft changes."""
draft = self._get_draft()
review_request = draft.review_request
old_summary = review_request.summary
old_description = review_request.description
old_testing_done = review_request.testing_done
old_branch = review_request.branch
old_bugs = review_request.get_bug_list()
draft.summary = "New summary"
draft.description = "New description"
draft.testing_done = "New testing done"
draft.branch = "New branch"
draft.bugs_closed = "12, 34, 56"
new_bugs = draft.get_bug_list()
changes = draft.publish()
fields = changes.fields_changed
self.assertIn("summary", fields)
self.assertIn("description", fields)
self.assertIn("testing_done", fields)
self.assertIn("branch", fields)
self.assertIn("bugs_closed", fields)
old_bugs_norm = set([(bug,) for bug in old_bugs])
new_bugs_norm = set([(bug,) for bug in new_bugs])
self.assertEqual(fields["summary"]["old"][0], old_summary)
self.assertEqual(fields["summary"]["new"][0], draft.summary)
self.assertEqual(fields["description"]["old"][0], old_description)
self.assertEqual(fields["description"]["new"][0], draft.description)
self.assertEqual(fields["testing_done"]["old"][0], old_testing_done)
self.assertEqual(fields["testing_done"]["new"][0], draft.testing_done)
self.assertEqual(fields["branch"]["old"][0], old_branch)
self.assertEqual(fields["branch"]["new"][0], draft.branch)
self.assertEqual(set(fields["bugs_closed"]["old"]), old_bugs_norm)
self.assertEqual(set(fields["bugs_closed"]["new"]), new_bugs_norm)
self.assertEqual(set(fields["bugs_closed"]["removed"]), old_bugs_norm)
self.assertEqual(set(fields["bugs_closed"]["added"]), new_bugs_norm)
def _get_draft(self):
"""Convenience function for getting a new draft to work with."""
review_request = self.create_review_request(publish=True)
return ReviewRequestDraft.create(review_request)
class FieldTests(TestCase):
# Bug #1352
def test_long_bug_numbers(self):
"""Testing review requests with very long bug numbers"""
review_request = ReviewRequest()
review_request.bugs_closed = \
'12006153200030304432010,4432009'
self.assertEqual(review_request.get_bug_list(),
['4432009', '12006153200030304432010'])
# Our _("(no summary)") string was failing in the admin UI, as
# django.template.defaultfilters.stringfilter would fail on a
# ugettext_lazy proxy object. We can use any stringfilter for this.
#
# Bug #1346
def test_no_summary(self):
"""Testing review requests with no summary"""
from django.template.defaultfilters import lower
review_request = ReviewRequest()
lower(review_request)
@add_fixtures(['test_users'])
def test_commit_id(self):
"""Testing commit_id migration"""
review_request = self.create_review_request()
review_request.changenum = '123'
self.assertEqual(review_request.commit_id, None)
self.assertEqual(review_request.commit,
six.text_type(review_request.changenum))
self.assertNotEqual(review_request.commit_id, None)
class PostCommitTests(SpyAgency, TestCase):
fixtures = ['test_users', 'test_scmtools']
def setUp(self):
super(PostCommitTests, self).setUp()
self.user = User.objects.create(username='testuser', password='')
self.profile, is_new = Profile.objects.get_or_create(user=self.user)
self.profile.save()
self.testdata_dir = os.path.join(
os.path.dirname(os.path.dirname(__file__)),
'scmtools', 'testdata')
self.repository = self.create_repository(tool_name='Test')
def test_update_from_committed_change(self):
"""Testing post-commit update"""
commit_id = '4'
def get_change(repository, commit_to_get):
self.assertEqual(commit_id, commit_to_get)
commit = Commit()
commit.message = \
'This is my commit message\n\nWith a summary line too.'
diff_filename = os.path.join(self.testdata_dir, 'git_readme.diff')
with open(diff_filename, 'r') as f:
commit.diff = f.read()
return commit
def get_file_exists(repository, path, revision, base_commit_id=None,
request=None):
return (path, revision) in [('/readme', 'd6613f5')]
self.spy_on(self.repository.get_change, call_fake=get_change)
self.spy_on(self.repository.get_file_exists, call_fake=get_file_exists)
review_request = ReviewRequest.objects.create(self.user,
self.repository)
review_request.update_from_commit_id(commit_id)
self.assertEqual(review_request.summary, 'This is my commit message')
self.assertEqual(review_request.description,
'With a summary line too.')
self.assertEqual(review_request.diffset_history.diffsets.count(), 1)
diffset = review_request.diffset_history.diffsets.get()
self.assertEqual(diffset.files.count(), 1)
fileDiff = diffset.files.get()
self.assertEqual(fileDiff.source_file, 'readme')
self.assertEqual(fileDiff.source_revision, 'd6613f5')
def test_update_from_committed_change_with_markdown_escaping(self):
"""Testing post-commit update with markdown escaping"""
def get_change(repository, commit_to_get):
commit = Commit()
commit.message = '* No escaping\n\n* but this needs escaping'
diff_filename = os.path.join(self.testdata_dir, 'git_readme.diff')
with open(diff_filename, 'r') as f:
commit.diff = f.read()
return commit
def get_file_exists(repository, path, revision, base_commit_id=None,
request=None):
return (path, revision) in [('/readme', 'd6613f5')]
self.spy_on(self.repository.get_change, call_fake=get_change)
self.spy_on(self.repository.get_file_exists, call_fake=get_file_exists)
review_request = ReviewRequest.objects.create(self.user,
self.repository)
review_request.rich_text = True
review_request.update_from_commit_id('4')
self.assertEqual(review_request.summary, '* No escaping')
self.assertEqual(review_request.description,
'\\* but this needs escaping')
def test_update_from_committed_change_without_repository_support(self):
"""Testing post-commit update failure conditions"""
self.spy_on(self.repository.__class__.supports_post_commit.fget,
call_fake=lambda self: False)
review_request = ReviewRequest.objects.create(self.user,
self.repository)
self.assertRaises(NotImplementedError,
lambda: review_request.update_from_commit_id('4'))
class ConcurrencyTests(TestCase):
fixtures = ['test_users', 'test_scmtools']
def test_duplicate_reviews(self):
"""Testing consolidation of duplicate reviews"""
body_top = "This is the body_top."
body_bottom = "This is the body_bottom."
comment_text_1 = "Comment text 1"
comment_text_2 = "Comment text 2"
comment_text_3 = "Comment text 3"
# Some objects we need.
user = User.objects.get(username="doc")
review_request = self.create_review_request(create_repository=True,
publish=True)
diffset = self.create_diffset(review_request)
filediff = self.create_filediff(diffset)
# Create the first review.
master_review = self.create_review(review_request, user=user,
body_top=body_top,
body_bottom='')
self.create_diff_comment(master_review, filediff, text=comment_text_1,
first_line=1, num_lines=1)
# Create the second review.
review = self.create_review(review_request, user=user,
body_top='', body_bottom='')
self.create_diff_comment(review, filediff, text=comment_text_2,
first_line=1, num_lines=1)
# Create the third review.
review = self.create_review(review_request, user=user,
body_top='',
body_bottom=body_bottom)
self.create_diff_comment(review, filediff, text=comment_text_3,
first_line=1, num_lines=1)
# Now that we've made a mess, see if we get a single review back.
logging.disable(logging.WARNING)
review = review_request.get_pending_review(user)
self.assertTrue(review)
self.assertEqual(review.id, master_review.id)
self.assertEqual(review.body_top, body_top)
self.assertEqual(review.body_bottom, body_bottom)
comments = list(review.comments.all())
self.assertEqual(len(comments), 3)
self.assertEqual(comments[0].text, comment_text_1)
self.assertEqual(comments[1].text, comment_text_2)
self.assertEqual(comments[2].text, comment_text_3)
class DefaultReviewerTests(TestCase):
fixtures = ['test_scmtools']
def test_for_repository(self):
"""Testing DefaultReviewer.objects.for_repository"""
tool = Tool.objects.get(name='CVS')
default_reviewer1 = DefaultReviewer(name="Test", file_regex=".*")
default_reviewer1.save()
default_reviewer2 = DefaultReviewer(name="Bar", file_regex=".*")
default_reviewer2.save()
repo1 = Repository(name='Test1', path='path1', tool=tool)
repo1.save()
default_reviewer1.repository.add(repo1)
repo2 = Repository(name='Test2', path='path2', tool=tool)
repo2.save()
default_reviewers = DefaultReviewer.objects.for_repository(repo1, None)
self.assertEqual(len(default_reviewers), 2)
self.assertIn(default_reviewer1, default_reviewers)
self.assertIn(default_reviewer2, default_reviewers)
default_reviewers = DefaultReviewer.objects.for_repository(repo2, None)
self.assertEqual(len(default_reviewers), 1)
self.assertIn(default_reviewer2, default_reviewers)
def test_for_repository_with_localsite(self):
"""Testing DefaultReviewer.objects.for_repository with a LocalSite."""
test_site = LocalSite.objects.create(name='test')
default_reviewer1 = DefaultReviewer(name='Test 1', file_regex='.*',
local_site=test_site)
default_reviewer1.save()
default_reviewer2 = DefaultReviewer(name='Test 2', file_regex='.*')
default_reviewer2.save()
default_reviewers = DefaultReviewer.objects.for_repository(
None, test_site)
self.assertEqual(len(default_reviewers), 1)
self.assertIn(default_reviewer1, default_reviewers)
default_reviewers = DefaultReviewer.objects.for_repository(None, None)
self.assertEqual(len(default_reviewers), 1)
self.assertIn(default_reviewer2, default_reviewers)
def test_form_with_localsite(self):
"""Testing DefaultReviewerForm with a LocalSite."""
test_site = LocalSite.objects.create(name='test')
tool = Tool.objects.get(name='CVS')
repo = Repository.objects.create(name='Test', path='path', tool=tool,
local_site=test_site)
user = User.objects.create(username='testuser', password='')
test_site.users.add(user)
group = Group.objects.create(name='test', display_name='Test',
local_site=test_site)
form = DefaultReviewerForm({
'name': 'Test',
'file_regex': '.*',
'local_site': test_site.pk,
'repository': [repo.pk],
'people': [user.pk],
'groups': [group.pk],
})
self.assertTrue(form.is_valid())
default_reviewer = form.save()
self.assertEquals(default_reviewer.local_site, test_site)
self.assertEquals(default_reviewer.repository.get(), repo)
self.assertEquals(default_reviewer.people.get(), user)
self.assertEquals(default_reviewer.groups.get(), group)
def test_form_with_localsite_and_bad_user(self):
"""Testing DefaultReviewerForm with a User not on the same LocalSite.
"""
test_site = LocalSite.objects.create(name='test')
user = User.objects.create(username='testuser', password='')
form = DefaultReviewerForm({
'name': 'Test',
'file_regex': '.*',
'local_site': test_site.pk,
'people': [user.pk],
})
self.assertFalse(form.is_valid())
def test_form_with_localsite_and_bad_group(self):
"""Testing DefaultReviewerForm with a Group not on the same LocalSite.
"""
test_site = LocalSite.objects.create(name='test')
group = Group.objects.create(name='test', display_name='Test')
form = DefaultReviewerForm({
'name': 'Test',
'file_regex': '.*',
'local_site': test_site.pk,
'groups': [group.pk],
})
self.assertFalse(form.is_valid())
group.local_site = test_site
group.save()
form = DefaultReviewerForm({
'name': 'Test',
'file_regex': '.*',
'groups': [group.pk],
})
self.assertFalse(form.is_valid())
def test_form_with_localsite_and_bad_repository(self):
"""Testing DefaultReviewerForm with a Repository not on the same
LocalSite.
"""
test_site = LocalSite.objects.create(name='test')
tool = Tool.objects.get(name='CVS')
repo = Repository.objects.create(name='Test', path='path', tool=tool)
form = DefaultReviewerForm({
'name': 'Test',
'file_regex': '.*',
'local_site': test_site.pk,
'repository': [repo.pk],
})
self.assertFalse(form.is_valid())
repo.local_site = test_site
repo.save()
form = DefaultReviewerForm({
'name': 'Test',
'file_regex': '.*',
'repository': [repo.pk],
})
self.assertFalse(form.is_valid())
class GroupTests(TestCase):
def test_form_with_localsite(self):
"""Tests GroupForm with a LocalSite."""
test_site = LocalSite.objects.create(name='test')
user = User.objects.create(username='testuser', password='')
test_site.users.add(user)
form = GroupForm({
'name': 'test',
'display_name': 'Test',
'local_site': test_site.pk,
'users': [user.pk],
})
self.assertTrue(form.is_valid())
group = form.save()
self.assertEquals(group.local_site, test_site)
self.assertEquals(group.users.get(), user)
def test_form_with_localsite_and_bad_user(self):
"""Tests GroupForm with a User not on the same LocalSite."""
test_site = LocalSite.objects.create(name='test')
user = User.objects.create(username='testuser', password='')
form = GroupForm({
'name': 'test',
'display_name': 'Test',
'local_site': test_site.pk,
'users': [user.pk],
})
self.assertFalse(form.is_valid())
class IfNeatNumberTagTests(TestCase):
def test_milestones(self):
"""Testing the ifneatnumber tag with milestone numbers"""
self.assertNeatNumberResult(100, "")
self.assertNeatNumberResult(1000, "milestone")
self.assertNeatNumberResult(10000, "milestone")
self.assertNeatNumberResult(20000, "milestone")
self.assertNeatNumberResult(20001, "")
def test_palindrome(self):
"""Testing the ifneatnumber tag with palindrome numbers"""
self.assertNeatNumberResult(101, "")
self.assertNeatNumberResult(1001, "palindrome")
self.assertNeatNumberResult(12321, "palindrome")
self.assertNeatNumberResult(20902, "palindrome")
self.assertNeatNumberResult(912219, "palindrome")
self.assertNeatNumberResult(912218, "")
def assertNeatNumberResult(self, rid, expected):
t = Template(
"{% load reviewtags %}"
"{% ifneatnumber " + six.text_type(rid) + " %}"
"{% if milestone %}milestone{% else %}"
"{% if palindrome %}palindrome{% endif %}{% endif %}"
"{% endifneatnumber %}")
self.assertEqual(t.render(Context({})), expected)
class ReviewRequestCounterTests(TestCase):
fixtures = ['test_scmtools']
def setUp(self):
super(ReviewRequestCounterTests, self).setUp()
tool = Tool.objects.get(name='Subversion')
repository = Repository.objects.create(name='Test1', path='path1',
tool=tool)
self.user = User.objects.create(username='testuser', password='')
self.profile, is_new = Profile.objects.get_or_create(user=self.user)
self.profile.save()
self.test_site = LocalSite.objects.create(name='test')
self.site_profile2 = \
LocalSiteProfile.objects.create(user=self.user,
profile=self.profile,
local_site=self.test_site)
self.review_request = ReviewRequest.objects.create(self.user,
repository)
self.profile.star_review_request(self.review_request)
self.site_profile = self.profile.site_profiles.get(local_site=None)
self.assertEqual(self.site_profile.total_outgoing_request_count, 1)
self.assertEqual(self.site_profile.pending_outgoing_request_count, 1)
self.assertEqual(self.site_profile.starred_public_request_count, 0)
self.group = Group.objects.create(name='test-group')
self.group.users.add(self.user)
self._reload_objects()
self.assertEqual(self.site_profile2.total_outgoing_request_count, 0)
self.assertEqual(self.site_profile2.pending_outgoing_request_count, 0)
self.assertEqual(self.site_profile2.starred_public_request_count, 0)
def test_new_site_profile(self):
"""Testing counters on a new LocalSiteProfile"""
self.site_profile.delete()
self.site_profile = \
LocalSiteProfile.objects.create(user=self.user,
profile=self.profile)
self.assertEqual(self.site_profile.total_outgoing_request_count, 1)
self.assertEqual(self.site_profile.pending_outgoing_request_count, 1)
self.assertEqual(self.site_profile.starred_public_request_count, 0)
self.review_request.publish(self.user)
self._reload_objects()
self.assertEqual(self.site_profile.total_outgoing_request_count, 1)
self.assertEqual(self.site_profile.pending_outgoing_request_count, 1)
self.assertEqual(self.site_profile.starred_public_request_count, 1)
def test_outgoing_requests(self):
"""Testing counters with creating outgoing review requests"""
# The review request was already created
self._check_counters(total_outgoing=1,
pending_outgoing=1)
ReviewRequestDraft.create(self.review_request)
self.review_request.publish(self.user)
self._check_counters(total_outgoing=1,
pending_outgoing=1,
starred_public=1)
def test_closing_requests(self, close_type=ReviewRequest.DISCARDED):
"""Testing counters with closing outgoing review requests"""
# The review request was already created
self._check_counters(total_outgoing=1, pending_outgoing=1)
draft = ReviewRequestDraft.create(self.review_request)
draft.target_groups.add(self.group)
draft.target_people.add(self.user)
self.review_request.publish(self.user)
self._check_counters(total_outgoing=1,
pending_outgoing=1,
direct_incoming=1,
total_incoming=1,
starred_public=1,
group_incoming=1)
self.assertTrue(self.review_request.public)
self.assertEqual(self.review_request.status,
ReviewRequest.PENDING_REVIEW)
self.review_request.close(close_type)
self._check_counters(total_outgoing=1)
def test_closing_draft_requests(self, close_type=ReviewRequest.DISCARDED):
"""Testing counters with closing draft review requests"""
# The review request was already created
self._check_counters(total_outgoing=1,
pending_outgoing=1)
self.assertFalse(self.review_request.public)
self.assertEqual(self.review_request.status,
ReviewRequest.PENDING_REVIEW)
self.review_request.close(close_type)
self._check_counters(total_outgoing=1)
def test_closing_closed_requests(self):
"""Testing counters with closing closed review requests"""
# The review request was already created
self._check_counters(total_outgoing=1,
pending_outgoing=1)
self.assertFalse(self.review_request.public)
self.assertEqual(self.review_request.status,
ReviewRequest.PENDING_REVIEW)
self.review_request.close(ReviewRequest.DISCARDED)
self._check_counters(total_outgoing=1)
self.review_request.close(ReviewRequest.SUBMITTED)
self._check_counters(total_outgoing=1)
def test_closing_draft_requests_with_site(self):
"""Testing counters with closing draft review requests on LocalSite"""
self.review_request.delete()
self._check_counters(with_local_site=True)
tool = Tool.objects.get(name='Subversion')
repository = Repository.objects.create(name='Test1', path='path1',
tool=tool,
local_site=self.test_site)
self.review_request = ReviewRequest.objects.create(
self.user,
repository,
local_site=self.test_site)
self._check_counters(with_local_site=True,
total_outgoing=1,
pending_outgoing=1)
self.assertFalse(self.review_request.public)
self.assertEqual(self.review_request.status,
ReviewRequest.PENDING_REVIEW)
self.review_request.close(ReviewRequest.DISCARDED)
self._check_counters(with_local_site=True,
total_outgoing=1)
def test_deleting_requests(self):
"""Testing counters with deleting outgoing review requests"""
# The review request was already created
self._check_counters(total_outgoing=1,
pending_outgoing=1)
draft = ReviewRequestDraft.create(self.review_request)
draft.target_groups.add(self.group)
draft.target_people.add(self.user)
self.review_request.publish(self.user)
self._check_counters(total_outgoing=1,
pending_outgoing=1,
direct_incoming=1,
total_incoming=1,
starred_public=1,
group_incoming=1)
self.review_request.delete()
self._check_counters()
def test_deleting_draft_requests(self):
"""Testing counters with deleting draft review requests"""
# We're simulating what a DefaultReviewer would do by populating
# the ReviewRequest's target users and groups while not public and
# without a draft.
self.review_request.target_people.add(self.user)
self.review_request.target_groups.add(self.group)
# The review request was already created
self._check_counters(total_outgoing=1,
pending_outgoing=1)
self.review_request.delete()
self._check_counters()
def test_deleting_closed_requests(self):
"""Testing counters with deleting closed review requests"""
# We're simulating what a DefaultReviewer would do by populating
# the ReviewRequest's target users and groups while not public and
# without a draft.
self.review_request.target_people.add(self.user)
self.review_request.target_groups.add(self.group)
# The review request was already created
self._check_counters(total_outgoing=1,
pending_outgoing=1)
self.review_request.close(ReviewRequest.DISCARDED)
self._check_counters(total_outgoing=1)
self.review_request.delete()
self._check_counters()
def test_reopen_discarded_requests(self):
"""Testing counters with reopening discarded outgoing review requests
"""
self.test_closing_requests(ReviewRequest.DISCARDED)
self.review_request.reopen()
self.assertFalse(self.review_request.public)
self.assertEqual(self.review_request.status,
ReviewRequest.PENDING_REVIEW)
self._check_counters(total_outgoing=1,
pending_outgoing=1)
self.review_request.publish(self.user)
self._check_counters(total_outgoing=1,
pending_outgoing=1,
direct_incoming=1,
total_incoming=1,
starred_public=1,
group_incoming=1)
def test_reopen_submitted_requests(self):
"""Testing counters with reopening submitted outgoing review requests
"""
self.test_closing_requests(ReviewRequest.SUBMITTED)
self.review_request.reopen()
self.assertTrue(self.review_request.public)
self.assertEqual(self.review_request.status,
ReviewRequest.PENDING_REVIEW)
self._check_counters(total_outgoing=1,
pending_outgoing=1,
direct_incoming=1,
total_incoming=1,
starred_public=1,
group_incoming=1)
self.review_request.publish(self.user)
self._check_counters(total_outgoing=1,
pending_outgoing=1,
direct_incoming=1,
total_incoming=1,
starred_public=1,
group_incoming=1)
def test_reopen_discarded_draft_requests(self):
"""Testing counters with reopening discarded draft review requests"""
self.assertFalse(self.review_request.public)
self.test_closing_draft_requests(ReviewRequest.DISCARDED)
self.review_request.reopen()
self.assertFalse(self.review_request.public)
self.assertEqual(self.review_request.status,
ReviewRequest.PENDING_REVIEW)
self._check_counters(total_outgoing=1,
pending_outgoing=1)
def test_reopen_submitted_draft_requests(self):
"""Testing counters with reopening submitted draft review requests"""
self.test_closing_draft_requests(ReviewRequest.SUBMITTED)
# We're simulating what a DefaultReviewer would do by populating
# the ReviewRequest's target users and groups while not public and
# without a draft.
self.review_request.target_people.add(self.user)
self.review_request.target_groups.add(self.group)
self._check_counters(total_outgoing=1)
self.review_request.reopen()
self.assertTrue(self.review_request.public)
self.assertEqual(self.review_request.status,
ReviewRequest.PENDING_REVIEW)
self._check_counters(total_outgoing=1,
pending_outgoing=1,
direct_incoming=1,
total_incoming=1,
starred_public=1,
group_incoming=1)
def test_double_publish(self):
"""Testing counters with publishing a review request twice"""
self.assertFalse(self.review_request.public)
self.assertEqual(self.review_request.status,
ReviewRequest.PENDING_REVIEW)
# Publish the first time.
self.review_request.publish(self.user)
self._check_counters(total_outgoing=1,
pending_outgoing=1,
starred_public=1)
# Publish the second time.
self.review_request.publish(self.user)
self._check_counters(total_outgoing=1,
pending_outgoing=1,
starred_public=1)
def test_add_group(self):
"""Testing counters when adding a group reviewer"""
draft = ReviewRequestDraft.create(self.review_request)
draft.target_groups.add(self.group)
self._check_counters(total_outgoing=1,
pending_outgoing=1)
self.review_request.publish(self.user)
self._check_counters(total_outgoing=1,
pending_outgoing=1,
total_incoming=1,
group_incoming=1,
starred_public=1)
def test_remove_group(self):
"""Testing counters when removing a group reviewer"""
self.test_add_group()
draft = ReviewRequestDraft.create(self.review_request)
draft.target_groups.remove(self.group)
self._check_counters(total_outgoing=1,
pending_outgoing=1,
total_incoming=1,
group_incoming=1,
starred_public=1)
self.review_request.publish(self.user)
self._check_counters(total_outgoing=1,
pending_outgoing=1,
starred_public=1)
def test_add_person(self):
"""Testing counters when adding a person reviewer"""
draft = ReviewRequestDraft.create(self.review_request)
draft.target_people.add(self.user)
self._check_counters(total_outgoing=1,
pending_outgoing=1)
self.review_request.publish(self.user)
self._check_counters(total_outgoing=1,
pending_outgoing=1,
direct_incoming=1,
total_incoming=1,
starred_public=1)
def test_remove_person(self):
"""Testing counters when removing a person reviewer"""
self.test_add_person()
draft = ReviewRequestDraft.create(self.review_request)
draft.target_people.remove(self.user)
self._check_counters(total_outgoing=1,
pending_outgoing=1,
direct_incoming=1,
total_incoming=1,
starred_public=1)
self.review_request.publish(self.user)
self._check_counters(total_outgoing=1,
pending_outgoing=1,
starred_public=1)
def test_populate_counters(self):
"""Testing counters when populated from a fresh upgrade or clear"""
# The review request was already created
draft = ReviewRequestDraft.create(self.review_request)
draft.target_groups.add(self.group)
draft.target_people.add(self.user)
self.review_request.publish(self.user)
self._check_counters(total_outgoing=1,
pending_outgoing=1,
total_incoming=1,
direct_incoming=1,
starred_public=1,
group_incoming=1)
LocalSiteProfile.objects.update(
direct_incoming_request_count=None,
total_incoming_request_count=None,
pending_outgoing_request_count=None,
total_outgoing_request_count=None,
starred_public_request_count=None)
Group.objects.update(incoming_request_count=None)
self._check_counters(total_outgoing=1,
pending_outgoing=1,
total_incoming=1,
direct_incoming=1,
starred_public=1,
group_incoming=1)
def test_populate_counters_after_change(self):
"""Testing counter inc/dec on uninitialized counter fields"""
# The review request was already created
draft = ReviewRequestDraft.create(self.review_request)
draft.target_groups.add(self.group)
draft.target_people.add(self.user)
self._check_counters(total_outgoing=1,
pending_outgoing=1)
LocalSiteProfile.objects.update(
direct_incoming_request_count=None,
total_incoming_request_count=None,
pending_outgoing_request_count=None,
total_outgoing_request_count=None,
starred_public_request_count=None)
Group.objects.update(incoming_request_count=None)
profile_fields = [
'direct_incoming_request_count',
'total_incoming_request_count',
'pending_outgoing_request_count',
'total_outgoing_request_count',
'starred_public_request_count',
]
# Lock the fields so we don't re-initialize them on publish.
locks = {
self.site_profile: 1,
self.site_profile2: 1,
}
for field in profile_fields:
getattr(LocalSiteProfile, field)._locks = locks
Group.incoming_request_count._locks = locks
# Publish the review request. This will normally try to
# increment/decrement the counts, which it should ignore now.
self.review_request.publish(self.user)
# Unlock the profiles so we can query/re-initialize them again.
for field in profile_fields:
getattr(LocalSiteProfile, field)._locks = {}
Group.incoming_request_count._locks = {}
self._check_counters(total_outgoing=1,
pending_outgoing=1,
direct_incoming=1,
total_incoming=1,
starred_public=1,
group_incoming=1)
def _check_counters(self, total_outgoing=0, pending_outgoing=0,
direct_incoming=0, total_incoming=0,
starred_public=0, group_incoming=0,
with_local_site=False):
self._reload_objects()
if with_local_site:
main_site_profile = self.site_profile2
unused_site_profile = self.site_profile
else:
main_site_profile = self.site_profile
unused_site_profile = self.site_profile2
self.assertEqual(main_site_profile.total_outgoing_request_count,
total_outgoing)
self.assertEqual(main_site_profile.pending_outgoing_request_count,
pending_outgoing)
self.assertEqual(main_site_profile.direct_incoming_request_count,
direct_incoming)
self.assertEqual(main_site_profile.total_incoming_request_count,
total_incoming)
self.assertEqual(main_site_profile.starred_public_request_count,
starred_public)
self.assertEqual(self.group.incoming_request_count, group_incoming)
# These should never be affected by the updates on the main
# LocalSite we're working with, so they should always be 0.
self.assertEqual(unused_site_profile.total_outgoing_request_count, 0)
self.assertEqual(unused_site_profile.pending_outgoing_request_count, 0)
self.assertEqual(unused_site_profile.direct_incoming_request_count, 0)
self.assertEqual(unused_site_profile.total_incoming_request_count, 0)
self.assertEqual(unused_site_profile.starred_public_request_count, 0)
def _reload_objects(self):
self.test_site = LocalSite.objects.get(pk=self.test_site.pk)
self.site_profile = \
LocalSiteProfile.objects.get(pk=self.site_profile.pk)
self.site_profile2 = \
LocalSiteProfile.objects.get(pk=self.site_profile2.pk)
self.group = Group.objects.get(pk=self.group.pk)
class IssueCounterTests(TestCase):
fixtures = ['test_users']
def setUp(self):
super(IssueCounterTests, self).setUp()
self.review_request = self.create_review_request(publish=True)
self.assertEqual(self.review_request.issue_open_count, 0)
self.assertEqual(self.review_request.issue_resolved_count, 0)
self.assertEqual(self.review_request.issue_dropped_count, 0)
self._reset_counts()
@add_fixtures(['test_scmtools'])
def test_init_with_diff_comments(self):
"""Testing ReviewRequest issue counter initialization
from diff comments
"""
self.review_request.repository = self.create_repository()
diffset = self.create_diffset(self.review_request)
filediff = self.create_filediff(diffset)
self._test_issue_counts(
lambda review, issue_opened: self.create_diff_comment(
review, filediff, issue_opened=issue_opened))
def test_init_with_file_attachment_comments(self):
"""Testing ReviewRequest issue counter initialization
from file attachment comments
"""
file_attachment = self.create_file_attachment(self.review_request)
self._test_issue_counts(
lambda review, issue_opened: self.create_file_attachment_comment(
review, file_attachment, issue_opened=issue_opened))
def test_init_with_screenshot_comments(self):
"""Testing ReviewRequest issue counter initialization
from screenshot comments
"""
screenshot = self.create_screenshot(self.review_request)
self._test_issue_counts(
lambda review, issue_opened: self.create_screenshot_comment(
review, screenshot, issue_opened=issue_opened))
@add_fixtures(['test_scmtools'])
def test_init_with_mix(self):
"""Testing ReviewRequest issue counter initialization
from multiple types of comments at once
"""
# The initial implementation for issue status counting broke when
# there were multiple types of comments on a review (such as diff
# comments and file attachment comments). There would be an
# artificially large number of issues reported.
#
# That's been fixed, and this test is ensuring that it doesn't
# regress.
self.review_request.repository = self.create_repository()
diffset = self.create_diffset(self.review_request)
filediff = self.create_filediff(diffset)
file_attachment = self.create_file_attachment(self.review_request)
screenshot = self.create_screenshot(self.review_request)
review = self.create_review(self.review_request)
# One open file attachment comment
self.create_file_attachment_comment(review, file_attachment,
issue_opened=True)
# Two diff comments
self.create_diff_comment(review, filediff, issue_opened=True)
self.create_diff_comment(review, filediff, issue_opened=True)
# Four screenshot comments
self.create_screenshot_comment(review, screenshot, issue_opened=True)
self.create_screenshot_comment(review, screenshot, issue_opened=True)
self.create_screenshot_comment(review, screenshot, issue_opened=True)
self.create_screenshot_comment(review, screenshot, issue_opened=True)
# The issue counts should be end up being 0, since they'll initialize
# during load.
self._reload_object(clear_counters=True)
self.assertEqual(self.review_request.issue_open_count, 0)
self.assertEqual(self.review_request.issue_resolved_count, 0)
self.assertEqual(self.review_request.issue_dropped_count, 0)
# Now publish. We should have 7 open issues, by way of incrementing
# during publish.
review.publish()
self._reload_object()
self.assertEqual(self.review_request.issue_open_count, 7)
self.assertEqual(self.review_request.issue_dropped_count, 0)
self.assertEqual(self.review_request.issue_resolved_count, 0)
# Make sure we get the same number back when initializing counters.
self._reload_object(clear_counters=True)
self.assertEqual(self.review_request.issue_open_count, 7)
self.assertEqual(self.review_request.issue_dropped_count, 0)
self.assertEqual(self.review_request.issue_resolved_count, 0)
def test_init_with_replies(self):
"""Testing ReviewRequest issue counter initialization and replies."""
file_attachment = self.create_file_attachment(self.review_request)
review = self.create_review(self.review_request)
comment = self.create_file_attachment_comment(review, file_attachment,
issue_opened=True)
review.publish()
reply = self.create_reply(review)
self.create_file_attachment_comment(reply, file_attachment,
reply_to=comment,
issue_opened=True)
reply.publish()
self._reload_object(clear_counters=True)
self.assertEqual(self.review_request.issue_open_count, 1)
self.assertEqual(self.review_request.issue_resolved_count, 0)
self.assertEqual(self.review_request.issue_dropped_count, 0)
def test_save_reply_comment(self):
"""Testing ReviewRequest issue counter and saving reply comments."""
file_attachment = self.create_file_attachment(self.review_request)
review = self.create_review(self.review_request)
comment = self.create_file_attachment_comment(review, file_attachment,
issue_opened=True)
review.publish()
self._reload_object(clear_counters=True)
self.assertEqual(self.review_request.issue_open_count, 1)
self.assertEqual(self.review_request.issue_resolved_count, 0)
self.assertEqual(self.review_request.issue_dropped_count, 0)
reply = self.create_reply(review)
reply_comment = self.create_file_attachment_comment(
reply, file_attachment,
reply_to=comment,
issue_opened=True)
reply.publish()
self._reload_object()
self.assertEqual(self.review_request.issue_open_count, 1)
self.assertEqual(self.review_request.issue_resolved_count, 0)
self.assertEqual(self.review_request.issue_dropped_count, 0)
reply_comment.save()
self._reload_object()
self.assertEqual(self.review_request.issue_open_count, 1)
self.assertEqual(self.review_request.issue_resolved_count, 0)
self.assertEqual(self.review_request.issue_dropped_count, 0)
def _test_issue_counts(self, create_comment_func):
review = self.create_review(self.review_request)
# One comment without an issue opened.
create_comment_func(review, issue_opened=False)
# Three comments with an issue opened.
for i in range(3):
create_comment_func(review, issue_opened=True)
# Two comments that will have their issues dropped.
dropped_comments = [
create_comment_func(review, issue_opened=True)
for i in range(2)
]
# One comment that will have its issue resolved.
resolved_comments = [
create_comment_func(review, issue_opened=True)
]
# The issue counts should be end up being 0, since they'll initialize
# during load.
self._reload_object(clear_counters=True)
self.assertEqual(self.review_request.issue_open_count, 0)
self.assertEqual(self.review_request.issue_resolved_count, 0)
self.assertEqual(self.review_request.issue_dropped_count, 0)
# Now publish. We should have 6 open issues, by way of incrementing
# during publish.
review.publish()
self._reload_object()
self.assertEqual(self.review_request.issue_open_count, 6)
self.assertEqual(self.review_request.issue_dropped_count, 0)
self.assertEqual(self.review_request.issue_resolved_count, 0)
# Make sure we get the same number back when initializing counters.
self._reload_object(clear_counters=True)
self.assertEqual(self.review_request.issue_open_count, 6)
self.assertEqual(self.review_request.issue_dropped_count, 0)
self.assertEqual(self.review_request.issue_resolved_count, 0)
# Set the issue statuses.
for comment in dropped_comments:
comment.issue_status = Comment.DROPPED
comment.save()
for comment in resolved_comments:
comment.issue_status = Comment.RESOLVED
comment.save()
self._reload_object()
self.assertEqual(self.review_request.issue_open_count, 3)
self.assertEqual(self.review_request.issue_dropped_count, 2)
self.assertEqual(self.review_request.issue_resolved_count, 1)
# Make sure we get the same number back when initializing counters.
self._reload_object(clear_counters=True)
self.assertEqual(self.review_request.issue_open_count, 3)
self.assertEqual(self.review_request.issue_dropped_count, 2)
self.assertEqual(self.review_request.issue_resolved_count, 1)
def _reload_object(self, clear_counters=False):
if clear_counters:
# 3 queries: One for the review request fetch, one for
# the issue status load, and one for updating the issue counts.
expected_query_count = 3
self._reset_counts()
else:
# One query for the review request fetch.
expected_query_count = 1
with self.assertNumQueries(expected_query_count):
self.review_request = \
ReviewRequest.objects.get(pk=self.review_request.pk)
def _reset_counts(self):
self.review_request.issue_open_count = None
self.review_request.issue_resolved_count = None
self.review_request.issue_dropped_count = None
self.review_request.save()
class PolicyTests(TestCase):
fixtures = ['test_users']
def setUp(self):
super(PolicyTests, self).setUp()
self.user = User.objects.create(username='testuser', password='')
self.anonymous = AnonymousUser()
def test_group_public(self):
"""Testing access to a public review group"""
group = Group.objects.create(name='test-group')
self.assertFalse(group.invite_only)
self.assertTrue(group.is_accessible_by(self.user))
self.assertTrue(group.is_accessible_by(self.anonymous))
self.assertIn(group, Group.objects.accessible(self.user))
self.assertIn(group, Group.objects.accessible(self.anonymous))
def test_group_invite_only_access_denied(self):
"""Testing no access to unjoined invite-only group"""
group = Group.objects.create(name='test-group', invite_only=True)
self.assertTrue(group.invite_only)
self.assertFalse(group.is_accessible_by(self.user))
self.assertFalse(group.is_accessible_by(self.anonymous))
self.assertNotIn(group, Group.objects.accessible(self.user))
self.assertNotIn(group, Group.objects.accessible(self.anonymous))
def test_group_invite_only_access_allowed(self):
"""Testing access to joined invite-only group"""
group = Group.objects.create(name='test-group', invite_only=True)
group.users.add(self.user)
self.assertTrue(group.invite_only)
self.assertTrue(group.is_accessible_by(self.user))
self.assertFalse(group.is_accessible_by(self.anonymous))
self.assertIn(group, Group.objects.accessible(self.user))
self.assertNotIn(group, Group.objects.accessible(self.anonymous))
def test_group_public_hidden(self):
"""Testing visibility of a hidden public group"""
group = Group.objects.create(name='test-group', visible=False)
self.assertFalse(group.visible)
self.assertTrue(group.is_accessible_by(self.user))
self.assertTrue(
group in Group.objects.accessible(self.user, visible_only=False))
self.assertFalse(
group in Group.objects.accessible(self.user, visible_only=True))
def test_group_invite_only_hidden_access_denied(self):
"""Testing visibility of a hidden unjoined invite-only group"""
group = Group.objects.create(name='test-group', visible=False,
invite_only=True)
self.assertFalse(group.visible)
self.assertTrue(group.invite_only)
self.assertFalse(group.is_accessible_by(self.user))
self.assertFalse(
group in Group.objects.accessible(self.user, visible_only=False))
self.assertFalse(
group in Group.objects.accessible(self.user, visible_only=True))
def test_group_invite_only_hidden_access_allowed(self):
"""Testing visibility of a hidden joined invite-only group"""
group = Group.objects.create(name='test-group', visible=False,
invite_only=True)
group.users.add(self.user)
self.assertFalse(group.visible)
self.assertTrue(group.invite_only)
self.assertTrue(group.is_accessible_by(self.user))
self.assertTrue(
group in Group.objects.accessible(self.user, visible_only=False))
self.assertTrue(
group in Group.objects.accessible(self.user, visible_only=True))
def test_group_invite_only_review_request_ownership(self):
"""Testing visibility of review requests assigned to invite-only
groups by a non-member
"""
group = Group.objects.create(name='test-group', visible=False,
invite_only=True)
review_request = self.create_review_request(publish=True,
submitter=self.user)
review_request.target_groups.add(group)
self.assertTrue(review_request.is_accessible_by(self.user))
@add_fixtures(['test_scmtools'])
def test_repository_public(self):
"""Testing access to a public repository"""
tool = Tool.objects.get(name='CVS')
repo = Repository.objects.create(name='Test1', path='path1', tool=tool)
self.assertTrue(repo.public)
self.assertTrue(repo.is_accessible_by(self.user))
self.assertTrue(repo.is_accessible_by(self.anonymous))
@add_fixtures(['test_scmtools'])
def test_repository_private_access_denied(self):
"""Testing no access to a private repository"""
tool = Tool.objects.get(name='CVS')
repo = Repository.objects.create(name='Test1', path='path1', tool=tool,
public=False)
self.assertFalse(repo.public)
self.assertFalse(repo.is_accessible_by(self.user))
self.assertFalse(repo.is_accessible_by(self.anonymous))
@add_fixtures(['test_scmtools'])
def test_repository_private_access_allowed_by_user(self):
"""Testing access to a private repository with user added"""
tool = Tool.objects.get(name='CVS')
repo = Repository.objects.create(name='Test1', path='path1', tool=tool,
public=False)
repo.users.add(self.user)
self.assertFalse(repo.public)
self.assertTrue(repo.is_accessible_by(self.user))
self.assertFalse(repo.is_accessible_by(self.anonymous))
@add_fixtures(['test_scmtools'])
def test_repository_private_access_allowed_by_review_group(self):
"""Testing access to a private repository with joined review group
added
"""
group = Group.objects.create(name='test-group', invite_only=True)
group.users.add(self.user)
tool = Tool.objects.get(name='CVS')
repo = Repository.objects.create(name='Test1', path='path1', tool=tool,
public=False)
repo.review_groups.add(group)
self.assertFalse(repo.public)
self.assertTrue(repo.is_accessible_by(self.user))
self.assertFalse(repo.is_accessible_by(self.anonymous))
def test_review_request_public(self):
"""Testing access to a public review request"""
review_request = self.create_review_request(publish=True)
self.assertTrue(review_request.is_accessible_by(self.user))
self.assertTrue(review_request.is_accessible_by(self.anonymous))
def test_review_request_with_invite_only_group(self):
"""Testing no access to a review request with only an unjoined
invite-only group
"""
group = Group(name='test-group', invite_only=True)
group.save()
review_request = self.create_review_request(publish=True)
review_request.target_groups.add(group)
self.assertFalse(review_request.is_accessible_by(self.user))
self.assertFalse(review_request.is_accessible_by(self.anonymous))
def test_review_request_with_invite_only_group_and_target_user(self):
"""Testing access to a review request with specific target user and
invite-only group
"""
group = Group(name='test-group', invite_only=True)
group.save()
review_request = self.create_review_request(publish=True)
review_request.target_groups.add(group)
review_request.target_people.add(self.user)
self.assertTrue(review_request.is_accessible_by(self.user))
self.assertFalse(review_request.is_accessible_by(self.anonymous))
@add_fixtures(['test_scmtools'])
def test_review_request_with_private_repository(self):
"""Testing no access to a review request with a private repository"""
Group.objects.create(name='test-group', invite_only=True)
review_request = self.create_review_request(create_repository=True,
publish=True)
review_request.repository.public = False
review_request.repository.save()
self.assertFalse(review_request.is_accessible_by(self.user))
self.assertFalse(review_request.is_accessible_by(self.anonymous))
@add_fixtures(['test_scmtools'])
def test_review_request_with_private_repository_allowed_by_user(self):
"""Testing access to a review request with a private repository with
user added
"""
Group.objects.create(name='test-group', invite_only=True)
review_request = self.create_review_request(create_repository=True,
publish=True)
review_request.repository.public = False
review_request.repository.users.add(self.user)
review_request.repository.save()
self.assertTrue(review_request.is_accessible_by(self.user))
self.assertFalse(review_request.is_accessible_by(self.anonymous))
@add_fixtures(['test_scmtools'])
def test_review_request_with_private_repository_allowed_by_review_group(self):
"""Testing access to a review request with a private repository with
review group added
"""
group = Group.objects.create(name='test-group', invite_only=True)
group.users.add(self.user)
review_request = self.create_review_request(create_repository=True,
publish=True)
review_request.repository.public = False
review_request.repository.review_groups.add(group)
review_request.repository.save()
self.assertTrue(review_request.is_accessible_by(self.user))
self.assertFalse(review_request.is_accessible_by(self.anonymous))
class UserInfoboxTests(TestCase):
def test_unicode(self):
"""Testing user_infobox with a user with non-ascii characters"""
user = User.objects.create_user('test', '[email protected]')
user.first_name = 'Test\u21b9'
user.last_name = 'User\u2729'
user.save()
self.client.get(local_site_reverse('user-infobox', args=['test']))
class MarkdownUtilsTests(TestCase):
UNESCAPED_TEXT = r'\`*_{}[]()>#+-.!'
ESCAPED_TEXT = r'\\\`\*\_\{\}\[\]\(\)\>#+-.\!'
def test_markdown_escape(self):
"""Testing markdown_escape"""
self.assertEqual(markdown_escape(self.UNESCAPED_TEXT),
self.ESCAPED_TEXT)
def test_markdown_escape_periods(self):
"""Testing markdown_escape with '.' placement"""
self.assertEqual(
markdown_escape('Line. 1.\n'
'1. Line. 2.\n'
'1.2. Line. 3.\n'
' 1. Line. 4.'),
('Line. 1.\n'
'1\\. Line. 2.\n'
'1\\.2\\. Line. 3.\n'
' 1\\. Line. 4.'))
def test_markdown_escape_atx_headers(self):
"""Testing markdown_escape with '#' placement"""
self.assertEqual(
markdown_escape('### Header\n'
' ## Header ##\n'
'Not # a header'),
('\\#\\#\\# Header\n'
' \\#\\# Header ##\n'
'Not # a header'))
def test_markdown_escape_hyphens(self):
"""Testing markdown_escape with '-' placement"""
self.assertEqual(
markdown_escape('Header\n'
'------\n'
'\n'
'- List item\n'
' - List item\n'
'Just hyp-henated'),
('Header\n'
'\\-\\-\\-\\-\\-\\-\n'
'\n'
'\\- List item\n'
' \\- List item\n'
'Just hyp-henated'))
def test_markdown_escape_plusses(self):
"""Testing markdown_escape with '+' placement"""
self.assertEqual(
markdown_escape('+ List item\n'
'a + b'),
('\\+ List item\n'
'a + b'))
def test_markdown_escape_underscores(self):
"""Testing markdown_escape with '_' placement"""
self.assertEqual(markdown_escape('_foo_'), r'\_foo\_')
self.assertEqual(markdown_escape('__foo__'), r'\_\_foo\_\_')
self.assertEqual(markdown_escape(' _foo_ '), r' \_foo\_ ')
self.assertEqual(markdown_escape('f_o_o'), r'f\_o\_o')
self.assertEqual(markdown_escape('f_o_o_'), r'f\_o\_o\_')
self.assertEqual(markdown_escape('foo_ _bar'), r'foo\_ \_bar')
self.assertEqual(markdown_escape('foo__bar'), r'foo\_\_bar')
self.assertEqual(markdown_escape('foo\n_bar'), 'foo\n\\_bar')
self.assertEqual(markdown_escape('(_foo_)'), r'(\_foo\_)')
def test_markdown_escape_asterisks(self):
"""Testing markdown_escape with '*' placement"""
self.assertEqual(markdown_escape('*foo*'), r'\*foo\*')
self.assertEqual(markdown_escape('**foo**'), r'\*\*foo\*\*')
self.assertEqual(markdown_escape(' *foo* '), r' \*foo\* ')
self.assertEqual(markdown_escape('f*o*o'), r'f*o*o')
self.assertEqual(markdown_escape('f*o*o*'), r'f*o*o\*')
self.assertEqual(markdown_escape('foo* *bar'), r'foo\* \*bar')
self.assertEqual(markdown_escape('foo**bar'), r'foo**bar')
self.assertEqual(markdown_escape('foo\n*bar'), 'foo\n\\*bar')
def test_markdown_escape_parens(self):
"""Testing markdown_escape with '(' and ')' placement"""
self.assertEqual(markdown_escape('[name](link)'), r'\[name\]\(link\)')
self.assertEqual(markdown_escape('(link)'), r'(link)')
self.assertEqual(markdown_escape('](link)'), r'\](link)')
self.assertEqual(markdown_escape('[foo] ](link)'),
r'\[foo\] \](link)')
def test_markdown_unescape(self):
"""Testing markdown_unescape"""
self.assertEqual(markdown_unescape(self.ESCAPED_TEXT),
self.UNESCAPED_TEXT)
| 1tush/reviewboard | reviewboard/reviews/tests.py | Python | mit | 108,697 | 0.000037 |
import avango
import avango.script
import avango.gua.skelanim
from examples_common.GuaVE import GuaVE
import examples_common.navigator
from avango.gua.skelanim.CharacterControl import CharacterControl
from avango.gua.skelanim.AnimationControl import AnimationConfig
### CAMERA CONTROL VIA XBOX CONTROLLER:
def camera_control_xbox_connect(camera_control, device_sensor):
# optional / additional xbox controller settings:
camera_control.XBOX_X.connect_from(device_sensor.Value2)
camera_control.XBOX_Y.connect_from(device_sensor.Value3)
camera_control.XBOX_LZ.connect_from(device_sensor.Value4)
camera_control.XBOX_RZ.connect_from(device_sensor.Value5)
def camera_control_xbox_disconnect(camera_control, device_sensor):
# optional / additional xbox controller settings:
camera_control.XBOX_X.disconnect()
camera_control.XBOX_Y.disconnect()
camera_control.XBOX_LZ.disconnect()
camera_control.XBOX_RZ.disconnect()
def start():
# setup scenegraph
graph = avango.gua.nodes.SceneGraph(Name="scenegraph")
#environment:
tri_mesh_loader = avango.gua.nodes.TriMeshLoader()
path = "/opt/project_animation/demo/data/objects/cube2.obj"
flags = avango.gua.LoaderFlags.NORMALIZE_POSITION \
| avango.gua.LoaderFlags.NORMALIZE_SCALE \
| avango.gua.LoaderFlags.OPTIMIZE_GEOMETRY \
| avango.gua.LoaderFlags.MAKE_PICKABLE \
| avango.gua.LoaderFlags.LOAD_MATERIALS
plane = \
tri_mesh_loader.create_geometry_from_file("cube",
path,
flags)
plane.Transform.value *= \
avango.gua.make_scale_mat(10.0, 0.01, 10.0) *\
avango.gua.make_trans_mat(0, -3, 0)
path2 = "/opt/project_animation/demo/data/objects/glass_2_3_nm.TGA"
plane.Material.value.set_uniform("NormalMap",
path2)
sunlight = avango.gua.nodes.LightNode(
Type=avango.gua.LightType.SUN,
Name="light",
Color=avango.gua.Color(245.0/255.0, 246.0/255.0, 178.0/255.0),
Brightness=5.0,
Transform=(avango.gua.make_rot_mat(119.5, 0.0, 1.0, 0.0) *
avango.gua.make_rot_mat(-10, 1.0, 0.0, 0.0))
)
#view setup:
size = avango.gua.Vec2ui(2560, 1440)
window = avango.gua.nodes.GlfwWindow(
Size=size,
LeftResolution=size
)
window.CursorMode.value = 2
window.EnableFullscreen.value = True
cam = avango.gua.nodes.CameraNode(LeftScreenPath="/screen",
SceneGraph="scenegraph",
Resolution=size,
OutputWindowName="window")
#render pipeline
pipeline_description = avango.gua.nodes.PipelineDescription(
Passes=[
avango.gua.nodes.TriMeshPassDescription(),
avango.gua.nodes.LightVisibilityPassDescription(),
avango.gua.skelanim.nodes.SkeletalAnimationPassDescription(),
avango.gua.nodes.ResolvePassDescription(),
avango.gua.nodes.SSAAPassDescription(),
])
pipeline_description.Passes.value[3].EnableSSAO.value = True
pipeline_description.Passes.value[3].SSAORadius.value = 2.0
pipeline_description.Passes.value[3].SSAOIntensity.value = 2.0
pipeline_description.Passes.value[3].BackgroundMode.value = 1
pipeline_description.Passes.value[3].BackgroundTexture.value = \
"/opt/avango/master/examples/picking/data/textures/skymap.jpg"
pipeline_description.Passes.value[3].ToneMappingMode.value = \
avango.gua.ToneMappingMode.LINEAR
#pipeline_description.EnableABuffer.value = True
cam.PipelineDescription.value = pipeline_description
cam.Transform.value = avango.gua.make_trans_mat(0.0, 0.0, 0.4)
cam.FarClip.value = 300
cam.NearClip.value = 0.01
screen = avango.gua.nodes.ScreenNode(Name="screen", Width=0.8, Height=0.45)
screen.Children.value = [cam]
screen.Transform.value = avango.gua.make_trans_mat(0, 0.1, -2)
graph.Root.value.Children.value = [screen, plane, sunlight]
avango.gua.register_window("window", window)
#setup viewer
viewer = avango.gua.nodes.Viewer()
viewer.SceneGraphs.value = [graph]
viewer.Windows.value = [window]
window.CursorMode.value = avango.gua.CursorMode.DISABLED
#navigation
navigator = examples_common.navigator.Navigator()
#navigator.StartLocation.value = screen.Transform.value.get_translate()
navigator.StartRotation.value = avango.gua.Vec2(0, 790)
navigator.OutTransform.connect_from(screen.Transform)
navigator.RotationSpeed.value = 0.2
navigator.MotionSpeed.value = 0.004
# enable navigation
screen.Transform.connect_from(navigator.OutTransform)
#XBOX Controller
device_sensor = avango.daemon.nodes.DeviceSensor(
DeviceService=avango.daemon.DeviceService())
device_sensor.Station.value = "device-xbox-1"
#unreal tournament characters
skel_mesh_loader = avango.gua.skelanim.nodes.SkeletalAnimationLoader()
#LOOP MODE DEMO BOB
path_bob = "/opt/project_animation/Assets/UnrealTournament/Characters/" +\
"Necris_Male/necris_male_ut4_SKELMESH.FBX"
flags_bob = avango.gua.LoaderFlags.LOAD_MATERIALS \
| avango.gua.LoaderFlags.NORMALIZE_SCALE
bob_loop = \
skel_mesh_loader.create_geometry_from_file("bob_loop",
path_bob,
flags_bob)
bob_loop.Transform.value = avango.gua.make_trans_mat(0.0, -0.01, 0.0) *\
bob_loop.Transform.value *\
avango.gua.make_rot_mat(-90.0, 1.0, 0.0, 0.0) *\
avango.gua.make_scale_mat(0.2, 0.2, 0.2)
graph.Root.value.Children.value.append(bob_loop)
#load animations
path_idle = "/opt/project_animation/Assets/" + \
"UnrealTournament/UniversalAnimations/Idle_Ready_Rif.FBX"
path_run = "/opt/project_animation/Assets/" + \
"UnrealTournament/UniversalAnimations/Run_Fwd_Rif.FBX"
bob_loop.load_animation(path_idle, "idle")
bob_loop.load_animation(path_idle, "idle2")
bob_loop.load_animation(path_run, "run_fwd")
bob_loop.load_animation(path_run, "run_fwd2")
#character control
character_control_loop = CharacterControl()
character_control_loop.my_constructor(bob_loop, bob_loop,
AnimationConfig("idle"), window)
character_control_loop.on_animation_end("idle",
AnimationConfig("run_fwd",
loop=True,
speed=1.0,
duration=3.0))
character_control_loop.on_animation_end("run_fwd",
AnimationConfig("idle2",
loop=False,
speed=1.0,
duration=3.0))
character_control_loop.on_animation_end("idle2",
AnimationConfig("run_fwd2",
loop=False,
speed=1.0,
duration=3.0))
character_control_loop.on_animation_end("run_fwd2",
AnimationConfig("idle",
loop=True,
speed=1.0,
duration=3.0))
#SPEED DEMO BOB
bob_speed = skel_mesh_loader.create_geometry_from_file("bob_speed",
path_bob,
flags_bob)
bob_speed.Transform.value = avango.gua.make_trans_mat(0.1, -0.01, 0.0) * \
bob_speed.Transform.value * \
avango.gua.make_rot_mat(-90.0, 1.0, 0.0, 0.0) * \
avango.gua.make_scale_mat(0.2, 0.2, 0.2)
graph.Root.value.Children.value.append(bob_speed)
#load animations
bob_speed.load_animation(path_idle, "idle")
bob_speed.load_animation(path_idle, "idle2")
bob_speed.load_animation(path_run, "run_fwd")
bob_speed.load_animation(path_run, "run_fwd2")
#character control
character_control_speed = CharacterControl()
character_control_speed.my_constructor(bob_speed, bob_speed,
AnimationConfig("idle"), window)
character_control_speed.on_animation_end("idle",
AnimationConfig("run_fwd",
loop=True,
speed=1.0,
duration=3.0))
character_control_speed.on_animation_end("run_fwd",
AnimationConfig("idle2",
loop=True,
speed=1.5,
duration=3.0))
character_control_speed.on_animation_end("idle2",
AnimationConfig("run_fwd2",
loop=True,
speed=1.5,
duration=3.0))
character_control_speed.on_animation_end("run_fwd2",
AnimationConfig("idle",
loop=True,
speed=1.0,
duration=3.0))
#DURATION DEMO BOB
bob_duration = skel_mesh_loader.create_geometry_from_file("bob_duration",
path_bob,
flags_bob)
bob_duration.Transform.value = \
avango.gua.make_trans_mat(0.2, -0.01, 0.0) * \
bob_duration.Transform.value * \
avango.gua.make_rot_mat(-90.0, 1.0, 0.0, 0.0) * \
avango.gua.make_scale_mat(0.2, 0.2, 0.2)
graph.Root.value.Children.value.append(bob_duration)
#load animations
bob_duration.load_animation(path_idle, "idle")
bob_duration.load_animation(path_idle, "idle2")
bob_duration.load_animation(path_run, "run_fwd")
bob_duration.load_animation(path_run, "run_fwd2")
#character control
character_control_duration = CharacterControl()
character_control_duration.my_constructor(bob_duration, bob_duration,
AnimationConfig("idle"), window)
character_control_duration.on_animation_end("idle",
AnimationConfig("run_fwd",
loop=True,
speed=1.0,
duration=1.0))
character_control_duration.on_animation_end("run_fwd",
AnimationConfig("idle2",
loop=True,
speed=1.0,
duration=2.0))
character_control_duration.on_animation_end("idle2",
AnimationConfig("run_fwd2",
loop=True,
speed=1.0,
duration=2.0))
character_control_duration.on_animation_end("run_fwd2",
AnimationConfig("idle",
loop=True,
speed=1.0,
duration=1.0))
#start animation sequence
character_control_loop.switch_animation(AnimationConfig("idle",
loop=False))
character_control_speed.switch_animation(AnimationConfig("idle",
loop=False))
character_control_duration.switch_animation(AnimationConfig("idle",
loop=False))
guaVE = GuaVE()
guaVE.start(locals(), globals())
viewer.run()
if __name__ == '__main__':
start()
| yaroslav-tarasov/avango | avango-skelanim/examples/skeletal_character_control/main_animation_config.py | Python | lgpl-3.0 | 13,239 | 0.001586 |
__author__ = 'oier'
import json
from flask import Flask, make_response
app = Flask(__name__)
import seaborn as sns
import numpy as np
import pandas as pd
import os
from datetime import datetime
import matplotlib.pyplot as plt
import sys
from matplotlib.figure import Figure
from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
from io import StringIO
from sklearn import linear_model
from models import InputForm, ValueSelector
from flask import Flask, render_template, request
from compute import compute, load_data, line_plot
@app.route('/')
def index():
return 'Hello World!'
def form_values(request):
data = load_data()
form = ValueSelector(request)
form.value.choices = [(k,i) for k,i in enumerate(data.columns)]
return(form)
@app.route('/blood', methods=['GET', 'POST'])
def blood():
form = form_values(request.form)
if request.method == 'POST':# and form.validate():
result = line_plot(form.value.data)
else:
print("False")
result = None
return render_template('plot.html',
form=form, result=result)
@app.route('/vib1', methods=['GET', 'POST'])
def vib1():
#form = InputForm(request.form)
form = form_values(request.form)
if request.method == 'POST' and form.validate():
result = compute(form.A.data, form.b.data,
form.w.data, form.T.data)
else:
result = None
return render_template('view_plain.html',
form=form, result=result)
if __name__ == '__main__':
app.run() | oiertwo/vampyr | flask/index.py | Python | mit | 1,592 | 0.014447 |
"""
Sql support for multilingual models
"""
| ziima/django-multilingual-ds9 | multilingual/models/sql/__init__.py | Python | mit | 44 | 0 |
# Copyright 2018 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Change nullable value of path_id
Revision ID: 13ecc2dd6f7f
Revises: 4747cc26b9c6
Create Date: 2018-07-24 16:47:01.378226
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = '13ecc2dd6f7f'
down_revision = '4747cc26b9c6'
from alembic import op
import sqlalchemy as sa
def upgrade(active_plugins=None, options=None):
op.alter_column('vnffgchains', 'path_id',
existing_type=sa.String(length=255),
nullable=True)
op.alter_column('vnffgnfps', 'path_id',
existing_type=sa.String(length=255),
nullable=True)
| openstack/tacker | tacker/db/migration/alembic_migrations/versions/13ecc2dd6f7f_change_nullable_value_of_path_id.py | Python | apache-2.0 | 1,218 | 0.004926 |
import argparse
from collections import Counter
from itertools import chain
from numpy import save, array
from os import listdir
from os import path
import re
import sys
import dataproc as dp
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--dir', type=str, help='Source directory to conll-data.')
parser.add_argument('-n', '--name', type=str, help='Specifies the corpus name.')
parser.add_argument('-u', '--maxlength', type=int, default=sys.maxsize, help='Max sentence length to avoid memory errors.')
parser.add_argument('-l', '--minlength', type=int, default=0, help='Minimal sentence length, default is 0.')
parser.add_argument('-r', '--removeunknown', type=int, default=0, help='Determines if sentences with rare words should be dropped. Any number != 0 says no-drop.')
parser.add_argument('-w', '--wordfreq', type=int, default=1, help='Minimum frequence of words, words with lower'
+' frequence will be replaced by <UNKNOWN>. Default=5')
parser.add_argument('-c', '--case', type=int, default=1, help='Determines, if the vocabulary should be case sensitive. It is on per default, 0 means non-case sensitive.')
parser.add_argument('-p', '--punctuation', type=int, default=1, help='0 - delete punctuation; any other number - keep punctuation')
args = parser.parse_args()
target_file = './data/'+args.name+'_data.hdf5'
alphabet_file = './data/'+args.name+'_ix2tok.npy'
len_upper_limit = args.maxlength
len_lower_limit = args.minlength
word_freq_limit = args.wordfreq
EOS = '<EOS>'
if args.punctuation:
def filter_punc(s):
return s
else:
def filter_punc(s):
return re.sub('\.|\?|!|;|:|,', '', s)
if args.case:
def transform(s):
return s
else:
def transform(s):
return s.lower()
seqs = []
drop_seqs = []
files = filter(lambda f: f.endswith('.conll'), listdir(args.dir))
for fname in files:
with open(path.join(args.dir, fname)) as f:
data = f.read()
for sentence in data.split('\n\n'):
seq = []
for line in sentence.split('\n'):
if line.strip():
word = filter_punc(transform(line.split('\t')[1]))
if word: seq.append(word)
if len(seq) <= len_upper_limit and len(seq) >= len_lower_limit:
seq.append(EOS)
seqs.append(seq)
else:
drop_seqs.append(seq)
counter = Counter(list(chain(*seqs)))
ix_seq = []
ix_seqs = []
tok2ix = {} if args.removeunknown or args.wordfreq == 1 else {'<UNKNOWN>': 0}
ix = len(tok2ix)
for seq in seqs:
for tok in seq:
if counter[tok] < word_freq_limit:
if args.removeunknown:
ix_seq = []
break
else:
ix_seq.append(0)
else:
if tok in tok2ix:
ix_seq.append(tok2ix[tok])
else:
tok2ix[tok] = ix
ix_seq.append(ix)
ix += 1
if ix_seq:
ix_seqs.append(ix_seq)
else:
drop_seqs.append(ix_seq)
ix_seq = []
seq_arr = array(ix_seqs)
print('Dropping', len(drop_seqs), 'sentences containing', len(list(chain(*drop_seqs))), 'tokens.')
print(len(ix_seqs), 'sentences with', len(list(chain(*ix_seqs))), 'tokens remaining.')
print('Vocabulary size:', len(tok2ix))
# save sentences
split_n = int(.9*seq_arr.shape[0])
dp.split_hdf5_file(target_file, seq_arr[:split_n], seq_arr[split_n:], varlen=True)
# save vocab indexing
ix2tok = {v: k for k, v in tok2ix.items()}
save(alphabet_file, array(ix2tok)) | Copper-Head/the-three-stooges | conll2hdf5.py | Python | mit | 3,576 | 0.004754 |
from .user_permission import *
from .application_permission import *
from .application_permission_relation import *
from .user_group_permission import *
| skyoo/jumpserver | apps/perms/api/application/__init__.py | Python | gpl-2.0 | 153 | 0 |
## Open a serial connection with Arduino.
import time
import serial
ser = serial.Serial("COM9", 9600) # Open serial port that Arduino is using
time.sleep(3) # Wait 3 seconds for Arduino to reset
print ser # Print serial config
print "Sending serial command to OPEN the dust cover."
ser.write("O")
print "Closing serial connection."
ser.close()
# Reminder to close the connection when finished
if(ser.isOpen()):
print "Serial connection is still open."
| kellogg76/ArduinoTelescopeDustCover | open.py | Python | mit | 507 | 0.003945 |
"""
Parser and evaluator for FormulaResponse and NumericalResponse
Uses pyparsing to parse. Main function as of now is evaluator().
"""
import math
import numbers
import operator
import numpy
import scipy.constants
from pyparsing import (
CaselessLiteral,
Combine,
Forward,
Group,
Literal,
MatchFirst,
Optional,
ParseResults,
Suppress,
Word,
ZeroOrMore,
alphanums,
alphas,
nums,
stringEnd
)
import functions
# Functions available by default
# We use scimath variants which give complex results when needed. For example:
# np.sqrt(-4+0j) = 2j
# np.sqrt(-4) = nan, but
# np.lib.scimath.sqrt(-4) = 2j
DEFAULT_FUNCTIONS = {
'sin': numpy.sin,
'cos': numpy.cos,
'tan': numpy.tan,
'sec': functions.sec,
'csc': functions.csc,
'cot': functions.cot,
'sqrt': numpy.lib.scimath.sqrt,
'log10': numpy.lib.scimath.log10,
'log2': numpy.lib.scimath.log2,
'ln': numpy.lib.scimath.log,
'exp': numpy.exp,
'arccos': numpy.lib.scimath.arccos,
'arcsin': numpy.lib.scimath.arcsin,
'arctan': numpy.arctan,
'arcsec': functions.arcsec,
'arccsc': functions.arccsc,
'arccot': functions.arccot,
'abs': numpy.abs,
'fact': math.factorial,
'factorial': math.factorial,
'sinh': numpy.sinh,
'cosh': numpy.cosh,
'tanh': numpy.tanh,
'sech': functions.sech,
'csch': functions.csch,
'coth': functions.coth,
'arcsinh': numpy.arcsinh,
'arccosh': numpy.arccosh,
'arctanh': numpy.lib.scimath.arctanh,
'arcsech': functions.arcsech,
'arccsch': functions.arccsch,
'arccoth': functions.arccoth
}
DEFAULT_VARIABLES = {
'i': numpy.complex(0, 1),
'j': numpy.complex(0, 1),
'e': numpy.e,
'pi': numpy.pi,
'k': scipy.constants.k, # Boltzmann: 1.3806488e-23 (Joules/Kelvin)
'c': scipy.constants.c, # Light Speed: 2.998e8 (m/s)
'T': 298.15, # Typical room temperature: 298.15 (Kelvin), same as 25C/77F
'q': scipy.constants.e # Fund. Charge: 1.602176565e-19 (Coulombs)
}
# We eliminated the following extreme suffixes:
# P (1e15), E (1e18), Z (1e21), Y (1e24),
# f (1e-15), a (1e-18), z (1e-21), y (1e-24)
# since they're rarely used, and potentially confusing.
# They may also conflict with variables if we ever allow e.g.
# 5R instead of 5*R
SUFFIXES = {
'%': 0.01, 'k': 1e3, 'M': 1e6, 'G': 1e9, 'T': 1e12,
'c': 1e-2, 'm': 1e-3, 'u': 1e-6, 'n': 1e-9, 'p': 1e-12
}
class UndefinedVariable(Exception):
"""
Indicate when a student inputs a variable which was not expected.
"""
pass
def lower_dict(input_dict):
"""
Convert all keys in a dictionary to lowercase; keep their original values.
Keep in mind that it is possible (but not useful?) to define different
variables that have the same lowercase representation. It would be hard to
tell which is used in the final dict and which isn't.
"""
return {k.lower(): v for k, v in input_dict.iteritems()}
# The following few functions define evaluation actions, which are run on lists
# of results from each parse component. They convert the strings and (previously
# calculated) numbers into the number that component represents.
def super_float(text):
"""
Like float, but with SI extensions. 1k goes to 1000.
"""
if text[-1] in SUFFIXES:
return float(text[:-1]) * SUFFIXES[text[-1]]
else:
return float(text)
def eval_number(parse_result):
"""
Create a float out of its string parts.
e.g. [ '7.13', 'e', '3' ] -> 7130
Calls super_float above.
"""
return super_float("".join(parse_result))
def eval_atom(parse_result):
"""
Return the value wrapped by the atom.
In the case of parenthesis, ignore them.
"""
# Find first number in the list
result = next(k for k in parse_result if isinstance(k, numbers.Number))
return result
def eval_power(parse_result):
"""
Take a list of numbers and exponentiate them, right to left.
e.g. [ 2, 3, 2 ] -> 2^3^2 = 2^(3^2) -> 512
(not to be interpreted (2^3)^2 = 64)
"""
# `reduce` will go from left to right; reverse the list.
parse_result = reversed(
[k for k in parse_result
if isinstance(k, numbers.Number)] # Ignore the '^' marks.
)
# Having reversed it, raise `b` to the power of `a`.
power = reduce(lambda a, b: b ** a, parse_result)
return power
def eval_parallel(parse_result):
"""
Compute numbers according to the parallel resistors operator.
BTW it is commutative. Its formula is given by
out = 1 / (1/in1 + 1/in2 + ...)
e.g. [ 1, 2 ] -> 2/3
Return NaN if there is a zero among the inputs.
"""
if len(parse_result) == 1:
return parse_result[0]
if 0 in parse_result:
return float('nan')
reciprocals = [1. / e for e in parse_result
if isinstance(e, numbers.Number)]
return 1. / sum(reciprocals)
def eval_sum(parse_result):
"""
Add the inputs, keeping in mind their sign.
[ 1, '+', 2, '-', 3 ] -> 0
Allow a leading + or -.
"""
total = 0.0
current_op = operator.add
for token in parse_result:
if token == '+':
current_op = operator.add
elif token == '-':
current_op = operator.sub
else:
total = current_op(total, token)
return total
def eval_product(parse_result):
"""
Multiply the inputs.
[ 1, '*', 2, '/', 3 ] -> 0.66
"""
prod = 1.0
current_op = operator.mul
for token in parse_result:
if token == '*':
current_op = operator.mul
elif token == '/':
current_op = operator.truediv
else:
prod = current_op(prod, token)
return prod
def add_defaults(variables, functions, case_sensitive):
"""
Create dictionaries with both the default and user-defined variables.
"""
all_variables = dict(DEFAULT_VARIABLES)
all_functions = dict(DEFAULT_FUNCTIONS)
all_variables.update(variables)
all_functions.update(functions)
if not case_sensitive:
all_variables = lower_dict(all_variables)
all_functions = lower_dict(all_functions)
return (all_variables, all_functions)
def evaluator(variables, functions, math_expr, case_sensitive=False):
"""
Evaluate an expression; that is, take a string of math and return a float.
-Variables are passed as a dictionary from string to value. They must be
python numbers.
-Unary functions are passed as a dictionary from string to function.
"""
# No need to go further.
if math_expr.strip() == "":
return float('nan')
# Parse the tree.
math_interpreter = ParseAugmenter(math_expr, case_sensitive)
math_interpreter.parse_algebra()
# Get our variables together.
all_variables, all_functions = add_defaults(variables, functions, case_sensitive)
# ...and check them
math_interpreter.check_variables(all_variables, all_functions)
# Create a recursion to evaluate the tree.
if case_sensitive:
casify = lambda x: x
else:
casify = lambda x: x.lower() # Lowercase for case insens.
evaluate_actions = {
'number': eval_number,
'variable': lambda x: all_variables[casify(x[0])],
'function': lambda x: all_functions[casify(x[0])](x[1]),
'atom': eval_atom,
'power': eval_power,
'parallel': eval_parallel,
'product': eval_product,
'sum': eval_sum
}
return math_interpreter.reduce_tree(evaluate_actions)
class ParseAugmenter(object):
"""
Holds the data for a particular parse.
Retains the `math_expr` and `case_sensitive` so they needn't be passed
around method to method.
Eventually holds the parse tree and sets of variables as well.
"""
def __init__(self, math_expr, case_sensitive=False):
"""
Create the ParseAugmenter for a given math expression string.
Do the parsing later, when called like `OBJ.parse_algebra()`.
"""
self.case_sensitive = case_sensitive
self.math_expr = math_expr
self.tree = None
self.variables_used = set()
self.functions_used = set()
def vpa(tokens):
"""
When a variable is recognized, store it in `variables_used`.
"""
varname = tokens[0][0]
self.variables_used.add(varname)
def fpa(tokens):
"""
When a function is recognized, store it in `functions_used`.
"""
varname = tokens[0][0]
self.functions_used.add(varname)
self.variable_parse_action = vpa
self.function_parse_action = fpa
def parse_algebra(self):
"""
Parse an algebraic expression into a tree.
Store a `pyparsing.ParseResult` in `self.tree` with proper groupings to
reflect parenthesis and order of operations. Leave all operators in the
tree and do not parse any strings of numbers into their float versions.
Adding the groups and result names makes the `repr()` of the result
really gross. For debugging, use something like
print OBJ.tree.asXML()
"""
# 0.33 or 7 or .34 or 16.
number_part = Word(nums)
inner_number = (number_part + Optional("." + Optional(number_part))) | ("." + number_part)
# pyparsing allows spaces between tokens--`Combine` prevents that.
inner_number = Combine(inner_number)
# SI suffixes and percent.
number_suffix = MatchFirst(Literal(k) for k in SUFFIXES.keys())
# 0.33k or 17
plus_minus = Literal('+') | Literal('-')
number = Group(
Optional(plus_minus) +
inner_number +
Optional(CaselessLiteral("E") + Optional(plus_minus) + number_part) +
Optional(number_suffix)
)
number = number("number")
# Predefine recursive variables.
expr = Forward()
# Handle variables passed in. They must start with letters/underscores
# and may contain numbers afterward.
inner_varname = Word(alphas + "_", alphanums + "_")
varname = Group(inner_varname)("variable")
varname.setParseAction(self.variable_parse_action)
# Same thing for functions.
function = Group(inner_varname + Suppress("(") + expr + Suppress(")"))("function")
function.setParseAction(self.function_parse_action)
atom = number | function | varname | "(" + expr + ")"
atom = Group(atom)("atom")
# Do the following in the correct order to preserve order of operation.
pow_term = atom + ZeroOrMore("^" + atom)
pow_term = Group(pow_term)("power")
par_term = pow_term + ZeroOrMore('||' + pow_term) # 5k || 4k
par_term = Group(par_term)("parallel")
prod_term = par_term + ZeroOrMore((Literal('*') | Literal('/')) + par_term) # 7 * 5 / 4
prod_term = Group(prod_term)("product")
sum_term = Optional(plus_minus) + prod_term + ZeroOrMore(plus_minus + prod_term) # -5 + 4 - 3
sum_term = Group(sum_term)("sum")
# Finish the recursion.
expr << sum_term # pylint: disable=pointless-statement
self.tree = (expr + stringEnd).parseString(self.math_expr)[0]
def reduce_tree(self, handle_actions, terminal_converter=None):
"""
Call `handle_actions` recursively on `self.tree` and return result.
`handle_actions` is a dictionary of node names (e.g. 'product', 'sum',
etc&) to functions. These functions are of the following form:
-input: a list of processed child nodes. If it includes any terminal
nodes in the list, they will be given as their processed forms also.
-output: whatever to be passed to the level higher, and what to
return for the final node.
`terminal_converter` is a function that takes in a token and returns a
processed form. The default of `None` just leaves them as strings.
"""
def handle_node(node):
"""
Return the result representing the node, using recursion.
Call the appropriate `handle_action` for this node. As its inputs,
feed it the output of `handle_node` for each child node.
"""
if not isinstance(node, ParseResults):
# Then treat it as a terminal node.
if terminal_converter is None:
return node
else:
return terminal_converter(node)
node_name = node.getName()
if node_name not in handle_actions: # pragma: no cover
raise Exception(u"Unknown branch name '{}'".format(node_name))
action = handle_actions[node_name]
handled_kids = [handle_node(k) for k in node]
return action(handled_kids)
# Find the value of the entire tree.
return handle_node(self.tree)
def check_variables(self, valid_variables, valid_functions):
"""
Confirm that all the variables used in the tree are valid/defined.
Otherwise, raise an UndefinedVariable containing all bad variables.
"""
if self.case_sensitive:
casify = lambda x: x
else:
casify = lambda x: x.lower() # Lowercase for case insens.
# Test if casify(X) is valid, but return the actual bad input (i.e. X)
bad_vars = set(var for var in self.variables_used
if casify(var) not in valid_variables)
bad_vars.update(func for func in self.functions_used
if casify(func) not in valid_functions)
if bad_vars:
raise UndefinedVariable(' '.join(sorted(bad_vars)))
| proversity-org/edx-platform | common/lib/calc/calc/calc.py | Python | agpl-3.0 | 13,906 | 0.000791 |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
################################################################################
# Euler 25
# 1000-digit Fibonacci number
# Author: Eugene Kolo - 2014
# Contact: www.eugenekolo.com
# The Fibonacci sequence is defined by the recurrence relation:
# Fn = Fn−1 + Fn−2, where F1 = 1 and F2 = 1.
# Hence the first 12 terms will be: F1 = 1 F2 = 1 F3 = 2 F4 = 3 F5 = 5 F6 = 8 F7 = 13
# F8 = 21 F9 = 34 F10 = 55 F11 = 89 F12 = 144
# The 12th term, F12, is the first term to contain three digits.
# What is the first term in the Fibonacci sequence to contain 1000 digits?
################################################################################
def solve():
from eulerlib import fib
n = 0
while (len(str(fib(n))) < 1000):
n +=1
return n
if __name__ == '__main__':
print(solve())
| eugenekolo/project-euler | euler025.py | Python | mit | 856 | 0.008216 |
# -*- coding: utf-8 -*-
"""
Contains any static and global variables for MythTV Python Bindings
"""
OWN_VERSION = (0,23,1,0)
SCHEMA_VERSION = 1254
MVSCHEMA_VERSION = 1032
NVSCHEMA_VERSION = 1004
PROTO_VERSION = 23056
PROGRAM_FIELDS = 47
BACKEND_SEP = '[]:[]'
| ekristen/mythboxee | mythtv/MythStatic.py | Python | mit | 261 | 0.011494 |
import re
import spotlight
from requests import ConnectionError, HTTPError
class EnglishQuestionParser():
"""
Poor-man's english question parser. Not even close to conclusive, but
appears to construct some decent w|a queries and responses.
__author__ = 'seanfitz'
"""
def __init__(self):
self.regexes = [
#re.compile(
# ".*(?P<QuestionWord>are) "
# "(?P<Query>.*)"),
re.compile(
".*(?P<QuestionWord>who|what|when|where|why|which|whose) "
"(?P<Query1>.*) (?P<QuestionVerb>is|are|was|were) "
"(?P<Query2>.*)"),
re.compile(
".*(?P<QuestionWord>are|is) "
"(?P<Query1>.*) (?P<QuestionVerb>an|a|an example off|an instance off) "
"(?P<Query2>.*)"),
re.compile(
"(?P<Query1>.*) (?P<QuestionVerb>and) "
"(?P<Query2>.*) (?P<QuestionWord>in common)"),
re.compile(
".*(?P<QuestionWord>talk|rant|think) "
"(?P<QuestionVerb>\w+) (?P<Query>.*)"),
re.compile(
".*(?P<QuestionWord>who|what|when|where|why|which|how|example|examples) "
"(?P<QuestionVerb>\w+) (?P<Query>.*)")
]
def _normalize(self, groupdict):
if 'Query' in groupdict:
return groupdict
elif 'Query1' and 'Query2' in groupdict:
return {
'QuestionWord': groupdict.get('QuestionWord'),
'QuestionVerb': groupdict.get('QuestionVerb'),
'Query': ' '.join([groupdict.get('Query1'), groupdict.get(
'Query2')])
}
def parse(self, utterance):
for regex in self.regexes:
match = regex.match(utterance)
if match:
return self._normalize(match.groupdict())
return None
class LILACSQuestionParser():
def __init__(self, host="http://spotlight.sztaki.hu:2222/rest/annotate"):
# 222 2en 8pt 5fr
self.parser = EnglishQuestionParser()
self.host = host
def process_entitys(self, text):
subjects, parents, synonims = self.tag_from_dbpedia(text)
center = 666
center_node = ""
for node in subjects:
if subjects[node] < center:
center = subjects[node]
center_node = node
target = 666
#TODO better select target mechanism
target_node = ""
for node in subjects:
if subjects[node] < target and node != center_node:
target = subjects[node]
target_node = node
parse = self.poor_parse(text)
try:
question = parse["QuestionWord"]
except:
question = "unknown"
middle = [node for node in subjects if node != center_node and node != target_node]
return center_node, target_node, parents, synonims, middle, question
def poor_parse(self, text):
return self.parser.parse(text)
def tag_from_dbpedia(self, text):
text = text.lower()
subjects = {}
parents = {}
synonims = {}
try:
annotations = spotlight.annotate(self.host, text, spotter='Default')
for annotation in annotations:
# how sure we are this is about this dbpedia entry
score = annotation["similarityScore"]
# entry we are talking about
subject = annotation["surfaceForm"].lower()
# smaller is closer to be main topic of sentence
offset = annotation["offset"]
# TODO tweak this value and make configuable
if float(score) < 0.4:
continue
subjects.setdefault(subject, offset)
# categorie of this <- linked nodes <- parsing for dbpedia search
if annotation["types"]:
p = []
types = annotation["types"].split(",")
for type in types:
type = type.replace("DBpedia:", "").replace("Schema:", "").replace("Http://xmlns.com/foaf/0.1/", "").lower()
if type not in p:
p.append(type)
parents.setdefault(subject, p)
# dbpedia link
url = annotation["URI"]
#print "link: " + url
dbpedia_name = url.replace("http://dbpedia.org/resource/", "").replace("_", " ")
if dbpedia_name.lower() not in subject:
synonims.setdefault(subject, dbpedia_name.lower())
except ConnectionError as e:
# TODO use logger
print e
except HTTPError as e:
print e
return subjects, parents, synonims
def test_qp():
parser = LILACSQuestionParser()
questions = ["how to kill animals ( a cow ) and make meat", "what is a living being", "why are humans living beings", "give examples of animals"]
for text in questions:
center_node, target_node, parents, synonims, midle, question = parser.process_entitys(text)
print "\nQuestion: " + text
print "question_type: " + question
print "center_node: " + center_node
print "target_node: " + target_node
print "parents: " + str(parents)
print "relevant_nodes: " + str(midle)
print "synonims: " + str(synonims)
| JarbasAI/jarbas-core | mycroft/jarbas-skills/LILACS_core/question_parser.py | Python | gpl-3.0 | 5,535 | 0.003433 |
#
# $Id: PDG.py,v 1.5 2009-01-26 03:05:43 ssnyder Exp $
# File: PDG.py
# Created: sss, Mar 2005
# Purpose: Define PDG ID codes.
#
"""
This module contains names for the various PDG particle ID codes.
The names are the same as in EventKernel/PdtPdg.h.
This module also contains a dictionary pdgid_names mapping ID codes
back to printable strings, and a function pdgid_to_name to do this
conversion. Similarly, root_names and pdgid_to_root_name translate to
strings with root markup.
"""
from __future__ import absolute_import
from ROOT import TDatabasePDG
from pkg_resources import resource_filename
import os
db = TDatabasePDG()
db.ReadPDGTable(resource_filename('rootpy', 'etc/pdg_table.txt'))
def GetParticle(id):
return db.GetParticle(id)
# Table to translate from PDG IDs to printable strings.
pdgid_names = {}
# Table to translate from PDG IDs to strings with root markup.
root_names = {}
def id_to_name(id):
"""
Convert a PDG ID to a printable string.
"""
name = pdgid_names.get(id)
if not name:
name = repr(id)
return name
def id_to_root_name(id):
"""
Convert a PDG ID to a string with root markup.
"""
name = root_names.get(id)
if not name:
name = repr(id)
return name
#
# Table of PDG IDs, associating the ID codes with up to several names.
# This is formatted as one big string to make it easier to maintain
# (don't need to quote everything individually).
# The format of each line is like this:
#
# mname = id pname rname
#
# An attribute mname will be added to this module with a value of id.
# These names are intended to match those in PdgPdt.h.
# pname is a printable name for the entry, and rname is a name
# with root-style markup. These names will be put into the pdgid_names
# and root_names dictionaries, respectively. They can be left as `!'
# if no name is available. pname and rname should not contain spaces.
# Blank lines or those starting with `#' will be ignored.
#
_pdgtable = \
"""
d = 1 D d
anti_d = -1 DBAR #bar{d}
u = 2 U u
anti_u = -2 UBAR #bar{u}
s = 3 S s
anti_s = -3 SBAR #bar{s}
c = 4 C c
anti_c = -4 CBAR #bar{c}
b = 5 B b
anti_b = -5 BBAR #bar{b}
t = 6 T t
anti_t = -6 TBAR #bar{t}
l = 7 LPRIME !
anti_l = -7 LPRIMEBAR !
h = 8 ! !
anti_h = -8 ! !
g = 21 GLUE g
e_minus = 11 E- e^{-}
e_plus = -11 E+ e^{+}
nu_e = 12 NUE #nu_{e}
anti_nu_e = -12 ANUE #bar{#nu}_{e}
mu_minus = 13 MU- #mu^{-}
mu_plus = -13 MU+ #mu^{+}
nu_mu = 14 NUM #nu_{#mu}
anti_nu_mu = -14 ANUM #bar{#nu}_{#mu}
tau_minus = 15 TAU- #tau^{-}
tau_plus = -15 TAU+ #tau^{+}
nu_tau = 16 NUT #nu_{#tau}
anti_nu_tau = -16 ANUT #bar{nu}_{#tau}
L_minus = 17 ! !
L_plus = -17 ! !
nu_L = 18 ! !
anti_nu_L = -18 ! !
gamma = 22 PHOT #gamma
Z0 = 23 Z0 Z
W_plus = 24 W+ W^{+}
W_minus = -24 W- W^{-}
Higgs0 = 25 H0 h^{0}
reggeon = 28 ! !
pomeron = 29 ! !
Z_prime0 = 32 ! !
Z_prime_prime0 = 33 ! !
W_prime_plus = 34 ! !
W_prime_minus = -34 ! !
Higgs_prime0 = 35 ! !
A0 = 36 ! !
Higgs_plus = 37 ! !
Higgs_minus = -37 ! !
R0 = 40 ! !
anti_R0 = -40 ! !
specflav = 81 ! !
rndmflav = 82 ! !
anti_rndmflav = -82 ! !
phasespa = 83 ! !
c_minushadron = 84 ! !
anti_c_minushadron = -84 ! !
b_minushadron = 85 ! !
anti_b_minushadron = -85 ! !
t_minushadron = 86 ! !
anti_t_minushadron = -86 ! !
Wvirt_plus = 89 ! !
Wvirt_minus = -89 ! !
diquark = 90 ! !
anti_diquark = -90 ! !
cluster = 91 CLUSTER cluster
string = 92 ! !
indep = 93 ! !
CMshower = 94 ! !
SPHEaxis = 95 ! !
THRUaxis = 96 ! !
CLUSjet = 97 ! !
CELLjet = 98 ! !
table = 99 ! !
pi0 = 111 PI0 #pi^{0}
pi_plus = 211 PI+ #pi^{+}
pi_minus = -211 PI- #pi^{-}
pi_diffr_plus = 210 ! !
pi_diffr_minus = -210 ! !
pi_2S0 = 20111 ! !
pi_2S_plus = 20211 ! !
pi_2S_minus = -20211 ! !
eta = 221 ETA #eta
eta_2S = 20221 ! !
eta_prime = 331 ! !
rho0 = 113 ! #rho^{0}
rho_plus = 213 RHO+ #rho^{+}
rho_minus = -213 RHO- #rho^{-}
rho_2S0 = 30113 ! !
rho_2S_plus = 30213 ! !
rho_2S_minus = -30213 ! !
rho_3S0 = 40113 ! !
rho_3S_plus = 40213 ! !
rho_3S_minus = -40213 ! !
omega = 223 ! !
omega_2S = 30223 ! !
phi = 333 PHI #phi
a_00 = 10111 ! !
a_0_plus = 10211 ! !
a_0_minus = -10211 ! !
f_0 = 10221 ! !
f_prime_0 = 10331 ! !
b_10 = 10113 ! !
b_1_plus = 10213 ! !
b_1_minus = -10213 ! !
h_1 = 10223 h_1 h_{1}
h_prime_1 = 10333 ! !
a_10 = 20113 ! !
a_1_plus = 20213 ! !
a_1_minus = -20213 ! !
f_1 = 20223 ! !
f_prime_1 = 20333 ! !
a_20 = 115 ! !
a_2_plus = 215 a_2+ a_{2}^{+}
a_2_minus = -215 a_2- a_{2}^{-}
f_2 = 225 ! !
f_prime_2 = 335 ! !
K0 = 311 K0 K^{0}
anti_K0 = -311 K0BAR #bar{K}^0
K_S0 = 310 K_S0 K_{S}^{0}
K_L0 = 130 K_L0 K_{L}^{0}
K_plus = 321 K+ K^{+}
K_minus = -321 K- K^{-}
K_star0 = 313 K* K^{*}
anti_K_star0 = -313 K*BAR #bar{K}^{*}
K_star_plus = 323 ! !
K_star_minus = -323 ! !
K_0_star0 = 10311 ! !
anti_K_0_star0 = -10311 ! !
K_0_star_plus = 10321 ! !
K_0_star_minus = -10321 ! !
K_10 = 10313 ! !
anti_K_10 = -10313 ! !
K_1_plus = 10323 ! !
K_1_minus = -10323 ! !
K_2_star0 = 315 ! !
anti_K_2_star0 = -315 ! !
K_2_star_plus = 325 K_2*+ K_{2}^{*+}
K_2_star_minus = -325 K_2*- K_{2}^{*-}
K_prime_10 = 20313 ! !
anti_K_prime_10 = -20313 ! !
K_prime_1_plus = 20323 ! !
K_prime_1_minus = -20323 ! !
D_plus = 411 D+ D^{+}
D_minus = -411 D- D^{-}
D0 = 421 D0 D^{0}
anti_D0 = -421 D0BAR #bar{D}^{0}
D_star_plus = 413 ! !
D_star_minus = -413 ! !
D_star0 = 423 ! !
anti_D_star0 = -423 ! !
D_0_star_plus = 10411 ! !
D_0_star_minus = -10411 ! !
D_0_star0 = 10421 ! !
anti_D_0_star0 = -10421 ! !
D_1_plus = 10413 ! !
D_1_minus = -10413 ! !
D_10 = 10423 ! !
anti_D_10 = -10423 ! !
D_2_star_plus = 415 ! !
D_2_star_minus = -415 ! !
D_2_star0 = 425 ! !
anti_D_2_star0 = -425 ! !
D_prime_1_plus = 20413 ! !
D_prime_1_minus = -20413 ! !
D_prime_10 = 20423 ! !
anti_D_prime_10 = -20423 ! !
D_s_plus = 431 D_S+ D_{s}^{+}
D_s_minus = -431 D_S- D_{s}^{-}
D_s_star_plus = 433 ! !
D_s_star_minus = -433 ! !
D_s0_star_plus = 10431 ! !
D_s0_star_minus = -10431 ! !
D_s1_plus = 10433 ! !
D_s1_minus = -10433 ! !
D_s2_star_plus = 435 ! !
D_s2_star_minus = -435 ! !
D_prime_s1_plus = 20433 ! !
D_prime_s1_minus = -20433 ! !
B0 = 511 B0 B^{0}
anti_B0 = -511 B0BAR #bar{B}^{0}
B_plus = 521 B+ B^{+}
B_minus = -521 B- B^{-}
B_star0 = 513 ! !
anti_B_star0 = -513 ! !
B_star_plus = 523 ! !
B_star_minus = -523 ! !
B_0_star0 = 10511 ! !
anti_B_0_star0 = -10511 ! !
B_0_star_plus = 10521 ! !
B_0_star_minus = -10521 ! !
B_10 = 10513 ! !
anti_B_10 = -10513 ! !
B_1_plus = 10523 ! !
B_1_minus = -10523 ! !
B_2_star0 = 515 ! !
anti_B_2_star0 = -515 ! !
B_2_star_plus = 525 ! !
B_2_star_minus = -525 ! !
B_prime_10 = 20513 ! !
anti_B_prime_10 = -20513 ! !
B_prime_1_plus = 20523 ! !
B_prime_1_minus = -20523 ! !
B_s0 = 531 B_S0 B_{s}^{0}
anti_B_s0 = -531 B_S0BAR #bar{B}_{s}^{0}
B_s_star0 = 533 ! !
anti_B_s_star0 = -533 ! !
B_s0_star0 = 10531 ! !
anti_B_s0_star0 = -10531 ! !
B_s10 = 10533 ! !
anti_B_s10 = -10533 ! !
B_s2_star0 = 535 ! !
anti_B_s2_star0 = -535 ! !
B_prime_s10 = 20533 ! !
anti_B_prime_s10 = -20533 ! !
B_c_plus = 541 BC+ B_{c}^{+}
B_c_minus = -541 BC- B_{c}^{-}
B_c_star_plus = 543 BC*+ B_{c}^{*+}
B_c_star_minus = -543 BC*- B_{c}^{*-}
B_c0_star_plus = 10541 ! !
B_c0_star_minus = -10541 ! !
B_c1_plus = 10543 ! !
B_c1_minus = -10543 ! !
B_c2_star_plus = 545 ! !
B_c2_star_minus = -545 ! !
B_prime_c1_plus = 20543 ! !
B_prime_c1_minus = -20543 ! !
eta_c = 441 ! !
eta_c_2S = 20441 ! !
J_psi = 443 JPSI J/#psi
psi_2S = 20443 ! !
chi_c0 = 10441 ! !
chi_c1 = 10443 ! !
chi_c2 = 445 ! !
eta_b_2S = 20551 ! !
eta_b_3S = 40551 ! !
Upsilon = 553 ! !
Upsilon_2S = 20553 ! !
Upsilon_3S = 60553 ! !
Upsilon_4S = 70553 ! !
Upsilon_5S = 80553 ! !
h_b = 10553 ! !
h_b_2P = 40553 ! !
h_b_3P = 100553 ! !
chi_b0 = 551 ! !
chi_b1 = 20553 ! !
chi_b2 = 555 ! !
chi_b0_2P = 30551 ! !
chi_b1_2P = 50553 ! !
chi_b2_2P = 10555 ! !
chi_b0_3P = 50551 ! !
chi_b1_3P = 110553 ! !
chi_b2_3P = 20555 ! !
eta_b2_1D = 40555 ! !
eta_b2_2D = 60555 ! !
Upsilon_1_1D = 120553 ! !
Upsilon_2_1D = 30555 ! !
Upsilon_3_1D = 557 ! !
Upsilon_1_2D = 130553 ! !
Upsilon_2_2D = 50555 ! !
Upsilon_3_2D = 10557 ! !
Delta_minus = 1114 DELTA- #Delta^{-}
anti_Delta_plus = -1114 DELTA+ #Delta^{+}
n_diffr = 2110 ! !
anti_n_diffr = -2110 ! !
n0 = 2112 N n
anti_n0 = -2112 NBAR #bar{n}
Delta0 = 2114 ! !
anti_Delta0 = -2114 ! !
p_diffr_plus = 2210 ! !
anti_p_diffr_minus = -2210 ! !
p_plus = 2212 P+ p^{+}
anti_p_minus = -2212 P- p^{-}
Delta_plus = 2214 ! !
anti_Delta_minus = -2214 ! !
Delta_plus_plus = 2224 ! !
anti_Delta_minus_minus = -2224 ! !
Sigma_minus = 3112 SIGMA- #Sigma^{-}
anti_Sigma_plus = -3112 SIGMABAR+ #bar{#Sigma}^{+}
Sigma_star_minus = 3114 ! !
anti_Sigma_star_plus = -3114 ! !
Lambda0 = 3122 LAMBDA_D0 #Lambda^{0}
anti_Lambda0 = -3122 LAMBDABAR_D0 #bar{#Lambda}^{0}
Sigma0 = 3212 ! !
anti_Sigma0 = -3212 ! !
Sigma_star0 = 3214 ! !
anti_Sigma_star0 = -3214 ! !
Sigma_plus = 3222 SIGMA+ #Sigma^{+}
anti_Sigma_minus = -3222 SIGMABAR- #bar{#Sigma}^{-}
Sigma_star_plus = 3224 ! !
anti_Sigma_star_minus = -3224 ! !
Xi_minus = 3312 XI- #Xi^{-}
anti_Xi_plus = -3312 XI+ #Xi^{+}
Xi_star_minus = 3314 ! !
anti_Xi_star_plus = -3314 ! !
Xi0 = 3322 XI0 #Xi^{0}
anti_Xi0 = -3322 XIBAR0 #bar{Xi}^{0}
Xi_star0 = 3324 ! !
anti_Xi_star0 = -3324 ! !
Omega_minus = 3334 ! !
anti_Omega_plus = -3334 ! !
Sigma_c0 = 4112 ! !
anti_Sigma_c0 = -4112 ! !
Sigma_c_star0 = 4114 SIGMA_C0* #Sigma_{c}^{*0}
anti_Sigma_c_star0 = -4114 SIGMABAR_C0* #bar{#Sigma}_{c}^{*0}
Lambda_c_plus = 4122 LAMBDA_C+ #Lambda_{c}^{+}
anti_Lambda_c_minus = -4122 LAMBDA_C- #Lambda_{c}^{-}
Xi_c0 = 4132 XI_C0 #Xi_{c}^{0}
anti_Xi_c0 = -4132 XIBAR_C0 #bar{#Xi}_{c}^{0}
Sigma_c_plus = 4212 SIGMA_C+ #Sigma_{c}^{+}
anti_Sigma_c_minus = -4212 SIGMA_C- #Sigma_{c}^{-}
Sigma_c_star_plus = 4214 SIGMA_C+* #Sigma_{c}^{*+}
anti_Sigma_c_star_minus = -4214 SIGMA_C-* #Sigma_{c}^{*-}
Sigma_c_plus_plus = 4222 SIGMA_C++ #Sigma_{c}^{++}
anti_Sigma_c_minus_minus = -4222 SIGMA_C-- #Sigma_{c}^{--}
Sigma_c_star_plus_plus = 4224 SIGMA_C++* #Sigma_{c}^{*++}
anti_Sigma_c_star_minus_minus = -4224 SIGMA_C--* #Sigma_{c}^{*--}
Xi_c_plus = 4322 XI_C+ #Xi_{c}^{+}
anti_Xi_c_minus = -4322 XI_C- #Xi_{c}^{-}
Xi_prime_c0 = 4312 XI'_C0 #Xi\'_{c}^{0}
Xi_primeanti__c0 = -4312 XIBAR'_C0 #bar{#Xi}\'_{c}^{0}
Xi_c_star0 = 4314 XI_C0* #Xi_{c}^{*0}
anti_Xi_c_star0 = -4314 XIBAR_C0* #bar{#Xi}_{c}^{*0}
Xi_prime_c_plus = 4232 XI'_C+ #Xi\'_{c}^{+}
Xi_primeanti__c_minus = -4232 XIBAR'_C- #Xi\'_{c}^{-}
Xi_c_star_plus = 4324 XI_C+* #Xi_{c}^{*+}
anti_Xi_c_star_minus = -4324 XI_C-* #Xi_{c}^{*-}
Omega_c0 = 4332 OMEGA_C0 #Omega_{c}^{0}
anti_Omega_c0 = -4332 OMEGABAR_C0 #bar{#Omega}_{c}^{0}
Omega_c_star0 = 4334 OMEGA_C0* #Omega_{c}^{*0}
anti_Omega_c_star0 = -4334 OMEGA_C0* #bar{#Omega}_{c}^{*0}
Sigma_b_minus = 5112 SIGMA_B- #Sigma_{b}^{-}'
anti_Sigma_b_plus = -5112 SIGMA_B+ #Sigma_{b}^{+}'
Sigma_b_star_minus = 5114 ! !
anti_Sigma_b_star_plus = -5114 ! !
Lambda_b0 = 5122 LAMBDA_B0 #Lambda_{b}^{0}
anti_Lambda_b0 = -5122 LAMBDA_B0BAR #bar{#Lambda}_{b}^0
Xi_b_minus = 5132 ! !
anti_Xi_b_plus = -5132 ! !
Sigma_b0 = 5212 SIGMA_B0 #Sigma_{b}^{0}
anti_Sigma_b0 = -5212 SIGMABAR_B0 #bar{#Sigma}_{b}^{0}
Sigma_b_star0 = 5214 ! !
anti_Sigma_b_star0 = -5214 ! !
Sigma_b_plus = 5222 ! !
anti_Sigma_b_minus = -5222 ! !
Sigma_star_ = 5224 ! !
anti_Sigma_b_star_minus = -5224 ! !
Xi_b0 = 5232 XI_B0 #Xi_b^{0}
anti_Xi_b0 = -5232 XIBAR_B0 #bar{#Xi}_b^{0}
Xi_prime_b_minus = 5312 ! !
anti_Xi_prime_b_plus = -5312 ! !
Xi_b_star_minus = 5314 ! !
anti_Xi_b_star_plus = -5314 ! !
Xi_prime_b0 = 5322 ! !
anti_Xi_prime_b0 = -5322 ! !
Xi_b_star0 = 5324 ! !
anti_Xi_b_star0 = -5324 ! !
Omega_b_minus = 5332 ! !
anti_Omega_b_plus = -5332 ! !
Omega_b_star_minus = 5334 ! !
anti_Omega_b_star_plus = -5334 ! !
dd_0 = 1101 ! !
anti_dd_0 = -1101 ! !
ud_0 = 2101 UD0 !
anti_ud_0 = -2101 UD0BAR !
uu_0 = 2201 ! !
anti_uu_0 = -2201 ! !
sd_0 = 3101 ! !
anti_sd_0 = -3101 ! !
su_0 = 3201 ! !
anti_su_0 = -3201 ! !
ss_0 = 3301 ! !
anti_ss_0 = -3301 ! !
cd_0 = 4101 ! !
anti_cd_0 = -4101 ! !
cu_0 = 4201 ! !
anti_cu_0 = -4201 ! !
cs_0 = 4301 ! !
anti_cs_0 = -4301 ! !
cc_0 = 4401 ! !
anti_cc_0 = -4401 ! !
bd_0 = 5101 ! !
anti_bd_0 = -5101 ! !
bu_0 = 5201 ! !
anti_bu_0 = -5201 ! !
bs_0 = 5301 ! !
anti_bs_0 = -5301 ! !
bc_0 = 5401 ! !
anti_bc_0 = -5401 ! !
bb_0 = 5501 ! !
anti_bb_0 = -5501 ! !
dd_1 = 1103 ! !
anti_dd_1 = -1103 ! !
ud_1 = 2103 ! !
anti_ud_1 = -2103 ! !
uu_1 = 2203 ! !
anti_uu_1 = -2203 ! !
sd_1 = 3103 ! !
anti_sd_1 = -3103 ! !
su_1 = 3203 ! !
anti_su_1 = -3203 ! !
ss_1 = 3303 ! !
anti_ss_1 = -3303 ! !
cd_1 = 4103 ! !
anti_cd_1 = -4103 ! !
cu_1 = 4203 ! !
anti_cu_1 = -4203 ! !
cs_1 = 4303 ! !
anti_cs_1 = -4303 ! !
cc_1 = 4403 ! !
anti_cc_1 = -4403 ! !
bd_1 = 5103 ! !
anti_bd_1 = -5103 ! !
bu_1 = 5203 ! !
anti_bu_1 = -5203 ! !
bs_1 = 5303 ! !
anti_bs_1 = -5303 ! !
bc_1 = 5403 ! !
anti_bc_1 = -5403 ! !
bb_1 = 5503 ! !
anti_bb_1 = -5503 ! !
# SUSY Particles names modified from /Control/AthenaCommon/PDGTABLE.MeV
# naming convention change
# '~' to 's_'
# '(' to '_'
# ')' to nothing
# '+' to 'plus'
# '' to '_'
# for the negatively charged particles so I add "minus" to the name and a corresponding "plus" entry with -pdg code
# for the neutrals I add a corresponding "anti" entry with -pdg code
# for the particles with positive charge entries I add a corresponding "minus" entry with -pdg code
# ************ (the above is not consistent with the convention that minus=particle plus=anti-particle
#
# Next remove Majorana particles and rename L-R stau to mass eigenstates.
#
# This is all ugly but sort of consistent with previous naming convention
s_e_minus_L =1000011 ! !
s_e_plus_L =-1000011 ! !
s_nu_e_L =1000012 ! !
s_anti_nu_e_L =-1000012 ! !
s_mu_minus_L =1000013 ! !
s_mu_plus_L =-1000013 ! !
s_nu_mu_L =1000014 ! !
s_anti_nu_mu_L =-1000014 ! !
# s_tau_minus_L =1000015
# s_tau_plus_L =-1000015
# L-R mixing significant use _1 and _2 for names instead
s_tau_minus_1 =1000015 ! !
s_tau_plus_1 =-1000015 ! !
s_nu_tau_L =1000016 ! !
s_anti_nu_tau_L=-1000016 ! !
s_e_minus_R =2000011 ! !
s_e_plus_R =-2000011 ! !
s_mu_minus_R =2000013 ! !
s_mu_plus_R =-2000013 ! !
s_tau_minus_2 =2000015 ! !
s_tau_plus_2 =-2000015 ! !
s_g =1000021 ! !
# s_anti_g =-1000021 # Majorana
s_chi_0_1 =1000022 ! !
# s_anti_chi_0_1 =-1000022 # Majorana
s_chi_0_2 =1000023 ! !
# s_anti_chi_0_2 =-1000023 # Majorana
s_chi_plus_1 =1000024 ! !
# Majorana
s_chi_minus_1 =-1000024 ! !
s_chi_0_3 =1000025 ! !
# s_anti_chi_0_3 =-1000025 # Majorana
s_chi_0_4 =1000035 ! !
# s_anti_chi_0_4 =-1000035 # Majorana
s_chi_plus_2 =1000037 ! !
s_chi_minus_2 =-1000037 ! !
s_G =1000039 ! !
# s_anti_G =-1000039 # Majorana
# note mismatch with PDGTable and pre-existing PdtPdg.h
#M 999 0.E+00 +0.0E+00 -0.0E+00 Geantino 0
#W 999 0.E+00 +0.0E+00 -0.0E+00 Geantino 0
# doubly charged Higgs
Higgs_plus_plus_L = 9900041 ! !
Higgs_minus_minus_L = -9900041 ! !
Higgs_plus_plus_R = 9900042 ! !
Higgs_minus_minus_R = -9900042 ! !
# Null particles
deuteron = 0 ! !
tritium = 0 ! !
alpha = 0 ! !
geantino = 0 ! !
He3 = 0 ! !
Cerenkov = 0 ! !
null = 0 ! !
# Some extra particles that weren't in PdgPdt.h
Xi_cc_plus = 4412 XI_CC+ #Xi_{cc}^{+}
anti_Xi_cc_minus = -4412 XI_CC- #Xi_{cc}^{-}
Xi_cc_plus_plus = 4422 XI_CC++ #Xi_{cc}^{++}
anti_Xi_cc_minus_minus = -4422 XI_CC-- #Xi_{cc}^{--}
Xi_cc_star_plus = 4414 XI_CC+* #Xi_{cc}^{*+}
anti_Xi_cc_star_minus = -4414 XI_CC-* #Xi_{cc}^{*-}
Xi_cc_star_plus_plus = 4424 XI_CC++* #Xi_{cc}^{*++}
anti_Xi_cc_star_minus_minus = -4424 XI_CC--* #Xi_{cc}^{*--}
Omega_cc_plus = 4432 OMEGA_CC+ #Omega_{cc}^{+}
anti_Omega_cc_minus = -4432 OMEGA_CC- #Omega_{cc}^{-}
Omega_cc_star_plus = 4434 OMEGA_CC+* #Omega_{cc}^{*+}
anti_Omega_cc_star_minus = -4434 OMEGA_CC-* #Omega_{cc}^{*-}
Omega_ccc_plus_plus = 4444 OMEGA_CCC++ #Omega_{ccc}^{++}
anti_Omega_ccc_minus_minus = -4444 OMEGA_CCC-- #Omega_{ccc}^{--}
# A couple extra synonyms that weren't in PdgPdt.h.
e = e_minus ! !
mu = mu_minus ! !
tau = tau_minus ! !
W = W_plus ! !
"""
# Parse _pdgtable and fill in dictionaries.
def _fill_dicts():
import string
pdgid_names.clear()
root_names.clear()
for line in _pdgtable.split ('\n'):
line = line.strip()
if len(line) == 0 or line[0] == '#': continue
ll = line.split('=', 1)
if len(ll) < 2:
print('bad line: {0}'.format(line))
continue
mname = string.strip(ll[0])
ll = ll[1].split()
if len(ll) < 1:
print('bad line: {0}'.format(line))
continue
id = ll[0]
pname = None
if len(ll) >= 2 and ll[1] != '!':
pname = ll[1]
rname = None
if len(ll) >= 3 and ll[2] != '!':
rname = ll[2]
try:
id = int(id)
except ValueError:
id = globals().get(id)
if id == None:
print('bad line: {0}'.format(line))
continue
if pname == None:
pname = mname
if rname == None:
rname = pname
globals()[mname] = id
if not pdgid_names.has_key(id):
pdgid_names[id] = pname
if not root_names.has_key(id):
root_names[id] = rname
return
# Fill the dictionaries.
_fill_dicts()
# Kill these now to save memory.
del _pdgtable
del _fill_dicts
| mverzett/rootpy | rootpy/extern/hep/pdg.py | Python | gpl-3.0 | 31,391 | 0.00051 |
"""
This module contains classes for data and graph visualization.
For that purpose, there are classes for the creation of simple graph's images,
images with path (critical, shortest, strongest) depiction and giving to a user
the chance to customize images and the way nodes and edges are depicted.
Apart from this, there are also classes for the creation of frequency diagrams
(for many measures of nodes such as closeness centrality, clustering coefficient),
and diagrams of average degree and average shortest path length evolution through
the time.
"""
__author__ = 'Thodoris Sotiropoulos'
from mvc.controller.analysis import Community
from mvc.controller.analysis import Path
import matplotlib
matplotlib.use('AGG')
import StringIO
import pylab as plt
import networkx as nx
import copy
import math
from random import random
class GraphImage:
"""
This class represents an image of graph and how graph's nodes and edges
are depicted.
For example, nodes of graph are depicted with red and edges are depicted
with black.
"""
def __init__(self, image_style, graphfile):
"""
Initialize image of graph according to what should be depicted.
For example style of image is defined such as size of nodes, edge color,
node shape, node color, edge width, edge style.
Moreover, an encoded string of image based on base64 encoding is created,
without any depiction of any path, community, etc.
:param image_style Style of image.
:param graphfile Graph object which is going to depicted.
"""
self.url = None
self.communities = None
self.communities_image = False
self.communities_color = {}
self.level = 1
self.path_image = False
self.paths = None
self.ranking = None
self.ranking_image = False
self.graph = graphfile.graph
self.image_style = image_style
self.simple_image()
def get_node_pos(self):
"""
Gets layout of graph's nodes.
:return Position of nodes.
"""
pos = nx.get_node_attributes(self.graph.graph, 'position')
return pos
def draw_edge_weights(self, pos):
"""
Draws edge weights.
For undirected graphs, weight label is positioned at the center of edge.
For directed graphs, weight label is positioned at the side of target node.
For example, is there is an edge between nodes A and B as the following
A --> B with weight C, label C is going to be depicted at the side of node
B.
:param pos Position of nodes.
"""
if self.graph.graphtype == 'Undirected':
return self.draw_edge_weights_undirected(pos)
edge_list = []
for u, v in self.graph.graph.edges():
edge_labels = {}
e1 = (u, v)
edge_labels[tuple(e1)] = self.graph.graph.edge[u][v]['weight']
if edge_list.count(str(u + v)) == 0 and self.graph.graphtype == 'Directed':
nx.draw_networkx_edge_labels(self.graph, pos, edge_labels=edge_labels,
font_size=9, label_pos=0.2)
if self.graph.graph.has_edge(v, u):
edge_lab = {}
e2 = (v, u)
edge_list.append(str(v + u))
edge_lab[tuple(e2)] = self.graph.graph.edge[v][u]['weight']
nx.draw_networkx_edge_labels(self.graph, pos, edge_labels=edge_lab,
font_size=9, label_pos=0.2)
def draw_edge_weights_undirected(self, pos):
"""
Draws edge weights.
For undirected graphs, weight label is positioned at the center of edge.
:param pos Position of nodes.
"""
edge_labels = {}
for u, v in self.graph.graph.edges():
e = (u, v)
edge_labels[tuple(e)] = self.graph.graph.edge[u][v]['weight']
nx.draw_networkx_edge_labels(self.graph, pos, edge_labels=edge_labels,
font_size=9)
def create_image_url(self):
"""
Creates an encoded string of PNG image of graph based on base64 encoding.
"""
plt.axis("off")
try:
rv = StringIO.StringIO()
plt.savefig(rv, format="png")
self.url = "data:image/png;base64,%s" % rv.getvalue().encode("base64").strip()
finally:
plt.clf()
plt.close()
def simple_image(self):
"""
Creates a simple image of graph visualization without any depiction of
path between two nodes, or communities, or defining size and color of
nodes according to their values in a measure (closeness centrality,
clustering coefficient, etc.)
"""
pos = self.get_node_pos()
self.draw_nodes(pos)
self.draw_edges(pos)
self.create_image_url()
def draw_nodes(self, pos):
"""
Draws nodes of graphs according to their style.
Node style is defined by the node size, node color, node shape.
:param pos Position of nodes.
"""
nodes = self.graph.graph.nodes()
nx.draw_networkx_nodes(self.graph.graph, pos, nodelist=nodes,
node_size=self.image_style.node_size,
node_color=self.image_style.node_color,
node_shape=self.image_style.node_shape)
def create_path(self, path=None):
"""
Creates an image of graph with a depiction of path between two nodes.
Path can be the critical, shortest, strongest path between these two nodes.
:param path True if image depicts a path between two nodes, false
otherwise.
"""
self.path_image = True
self.communities_image = False
self.ranking_image = False
if path is not None:
self.paths = path
pos = self.get_node_pos()
self.draw_path_nodes(pos)
self.draw_path_edges(pos)
self.create_image_url()
def draw_path_nodes(self, pos):
"""
Draws nodes in an image which depicts a path between two nodes.
Nodes which are included in this path, are depicted with crimson
color and size bigger than the size of nodes which are not included
in this path.
:param pos Position of nodes.
"""
for path in self.paths.path_sequence:
nx.draw_networkx_nodes(self.graph.graph, pos, nodelist=path,
node_size=self.image_style.node_size + 100,
node_color='crimson',
node_shape=self.image_style.node_shape)
rest_nodes = Path.get_nodes_which_are_not_in_path(self.graph.graph,
self.paths.path_sequence)
nx.draw_networkx_nodes(self.graph.graph, pos, nodelist=rest_nodes,
node_size=self.image_style.node_size,
node_color=self.image_style.node_color,
node_shape=self.image_style.node_shape)
def draw_path_edges(self, pos):
"""
Draws edges in an image which depicts a path between two nodes.
Edges which are included in this path, are depicted with black
color, width bigger than the size of nodes which are not inlcuded
in this path and with a dashed line.
:param pos Position of nodes.
"""
all_vertices = []
for path in self.paths.path_sequence:
path_vertices = Path.get_path_edges(path)
all_vertices.append(path_vertices)
nx.draw_networkx_edges(self.graph.graph, pos, edgelist=path_vertices,
width=self.image_style.edge_width + 1,
edge_color="black", style="dashed")
rest_edges = Path.get_edges_which_are_not_in_paths(self.graph.graph,
all_vertices)
label = self.graph.get_node_label()
nx.draw_networkx_edges(self.graph.graph, pos, rest_edges,
width=self.image_style.edge_width,
edge_color=self.image_style.edge_color,
style=self.image_style.edge_style)
nx.draw_networkx_labels(self.graph.graph, pos, labels=label,
font_size=self.image_style.font_size,
font_color=self.image_style.font_color)
if self.graph.is_weighted and self.image_style.edge_label:
self.draw_edge_weights(pos)
def draw_communities(self, pos):
"""
Draws communities of graph. Each community is depicted with different
color. Each community is consisted of a list of nodes.
:param pos Position of nodes.
"""
g = nx.Graph(self.graph.graph)
self.communities = Community(g)
counter = 0
for community in self.communities.communities[self.level - 1]:
if not self.communities_image:
color = (random(), random(), random())
self.communities_color[counter] = color
else:
try:
color = self.communities_color[counter]
except KeyError:
color = (random(), random(), random())
self.communities_color[counter] = color
nx.draw_networkx_nodes(g, pos, nodelist=community.nodes(),
node_size=self.image_style.node_size,
node_color=color,
node_shape=self.image_style.node_shape)
counter += 1
self.communities_image = True
def image_communities(self, level=None):
"""
Creates an image of graph with a depiction of communities which
are detected in graph.
:param level Level of communities according to the Girvan Newman
algorithm. Higher value of this parameter means more communities which
are consisted of fewer nodes, whereas lowe value of this parameter means
fewer communities which are consisted of more nodes.
"""
self.path_image = False
self.ranking_image = False
if level is not None:
self.level = level
pos = self.get_node_pos()
self.draw_communities(pos)
self.draw_edges(pos)
self.create_image_url()
def update_image(self):
""" Update image of graph according to a change by user."""
if self.communities_image:
self.image_communities()
elif self.path_image:
self.create_path()
elif self.ranking_image:
self.ranking_nodes_image()
else:
self.simple_image()
def draw_edges(self, pos):
"""
Draws edges of graphs according to their style.
Edge style is defined by the edge width, edge color, node style and
edge weight labels.
:param pos Position of nodes.
"""
label = self.graph.get_node_label()
nx.draw_networkx_edges(self.graph.graph, pos, self.graph.graph.edges(),
width=self.image_style.edge_width,
edge_color=self.image_style.edge_color,
style=self.image_style.edge_style)
nx.draw_networkx_labels(self.graph.graph, pos, labels=label,
font_size=self.image_style.font_size,
font_color=self.image_style.font_color)
if self.graph.is_weighted and self.image_style.edge_label:
self.draw_edge_weights(pos)
def rank_nodes_by_color(self, pos):
"""
Draws nodes of graph with the color of each node depending on
its value in a measure such as closeness centrality, clustering
coefficient.
:param pos Position of nodes.
"""
nx.draw_networkx_nodes(self.graph.graph, pos,
nodelist=self.graph.graph.nodes(),
node_size=self.image_style.node_size,
node_color=self.ranking.color_ranking,
node_shape=self.image_style.node_shape,
cmap=plt.get_cmap(self.ranking.cmap))
def rank_nodes_by_size(self, pos):
"""
Draws nodes of graph with the size of each node depending on
its value in a measure such as closeness centrality, clustering
coefficient.
:param pos Position of nodes.
"""
nx.draw_networkx_nodes(self.graph.graph,
pos, nodelist=self.ranking.size_ranking[1],
node_size=self.ranking.size_ranking[0],
node_color=self.image_style.node_color,
node_shape=self.image_style.node_shape)
def rank_nodes_by_color_and_size(self, pos):
"""
Draws nodes of graph with the color and size of each node depending on
its value in a measure such as closeness centrality, clustering
coefficient.
:param pos Position of nodes.
"""
nx.draw_networkx_nodes(self.graph.graph, pos,
nodelist=self.ranking.size_ranking[1],
node_size=self.ranking.size_ranking[0],
node_color=self.ranking.color_ranking,
node_shape=self.image_style.node_shape,
cmap=plt.get_cmap(self.ranking.cmap))
def ranking_nodes_image(self, ranking=None):
"""
Creates a simple image of graph visualization defining size and color of
nodes according to their values in a measure (closeness centrality,
clustering coefficient, etc.)
:param ranking True if it is an image with a ranking of nodes, false
otherwise.
"""
self.ranking_image = True
self.communities_image = False
self.path_image = False
if ranking is not None:
self.ranking = ranking
pos = self.get_node_pos()
if self.ranking.type == 'colorRanking':
self.rank_nodes_by_color(pos)
elif self.ranking.type == 'sizeRanking':
self.rank_nodes_by_size(pos)
else:
self.rank_nodes_by_color_and_size(pos)
self.draw_edges(pos)
self.create_image_url()
class ImageStyle:
"""
This class defines the style of graph's image.
More specifically, it defines the layout of nodes, the node size, the node
color, the edge width, the edge color, the node shape (circle, square, etc),
the edge style (dashed line, solid line, etc), the font size and color of
node labels.
Moreover it defines, if edge weights would be depicted or not (for weighted
graphs only.)
"""
def __init__(self, selection='random', n_size=500, edge_width=1.0,
ncolor='red', ecolor='red', estyle='solid', shape='o',
fsize=12, fcolor='black', weights='on'):
self.layout = selection
self.node_size = n_size
self.edge_width = edge_width
self.node_color = ncolor
self.edge_color = ecolor
self.edge_style = estyle
self.node_shape = shape
self.font_size = fsize
self.font_color = fcolor
self.edge_label = weights
def get_x_values(measure, graphfile):
"""
Gets list of values of each node on a measure defined by
the first parameter.
This measure can be closeness centrality, clustering coefficient, etc.
:param measure Measure to get values of each node.
:param graphfile Graph object.
:return list of values.
"""
graph = graphfile.graph.graph
values = list(nx.get_node_attributes(graph, measure).values())
return values
class Diagram:
"""
This class defines a diagram which represents the data visualization of
nodes of a graph.
This diagram can be a histogram which describes the distribution of a measure
such as closeness centrality, clustering coefficient amongst nodes of graph.
There is also one more type of diagrams for growing networks only such
Albert Barabasi graphs. This category of diagram describes the
evolution of average degree and average shortest path length amongst
different time.
"""
def __init__(self, x_values_type, graphfile):
"""
Initializes a distribution frequency histogram of a graph's nodes
measure defined by the parameter.
:param x_values_type Values type of x-axis.
:param graphfile Graph object.
"""
self.x_values_type = x_values_type
self.classes_number = 0
self.class_width = 0.0
self.bar_frequencies = []
self.polygon_frequencies = []
self.central_values = []
self.initial_values = []
self.values = get_x_values(self.x_values_type, graphfile)
self.initialize_values()
self.create_histogram()
self.create_polygon()
self.url = Diagram.get_url()
def initialize_values(self):
"""
Initializes the required class attributes in order a histogram can
be created.
These class attributes refer to:
- The variance between the max value and min value of the measure.
- The number of classes which are required.
- Class width.
- The central values of each class.
- Polygon frequency values of each class.
- Histogram frequency values of each class.
"""
min_value = min(self.values)
max_value = max(self.values)
variance = max_value - min_value
self.classes_number = int(math.ceil(1 + 3.3 * math.log10(len(self.values)))) + 1
self.class_width = variance / float(self.classes_number)
self.central_values = [min_value - self.class_width / 2]
classes = [(min_value, min_value + self.class_width)]
self.central_values.append((min_value + min_value + self.class_width) / 2)
for i in range(1, self.classes_number + 1):
lower_limit = min_value + (i * self.class_width)
upper_limit = min_value + (i + 1) * self.class_width
classes.append((lower_limit, upper_limit))
self.central_values.append((lower_limit + upper_limit) / 2)
self.polygon_frequencies.append(0)
for diagram_class in classes:
counter = 0
for value in self.values:
if diagram_class[0] <= value < diagram_class[1]:
counter += 1
self.bar_frequencies.append(counter)
self.polygon_frequencies.append(counter)
self.initial_values.append(diagram_class[0])
def create_polygon(self):
""" Creates the polygon frequency diagram. """
plt.plot(self.central_values, self.polygon_frequencies)
def create_histogram(self):
""" Creates the histogram frequency diagram. """
plt.subplots()
opacity = 0.7
error_config = {'ecolor': '0.1'}
plt.bar(self.initial_values,
self.bar_frequencies,
self.class_width,
alpha=opacity,
color='green',
error_kw=error_config)
plt.xlabel(str.capitalize(self.x_values_type))
plt.ylabel('Number of Nodes')
plt.title(str.capitalize(self.x_values_type) + ' Distribution')
plt.legend()
plt.tight_layout()
@staticmethod
def graph_evolution_over_time(time, graphfile):
"""
Creates a diagram which describes the evolution of average degree
and average shortest path length amongst time.
For growing networks only such as Albert Barabasi graph.
:param time: The upper value of time.
:param graphfile: Graph object.
:return: One encoded string of image of average degree evolution and
one encoded string of image of average shortest path length evolution.
Both encoded strings are based on base64 encoding.
"""
graph_copy = copy.deepcopy(graphfile.graph)
values_to_analyze = graph_copy.calculate_evolution_over_time(time)
degree_values = values_to_analyze[0].values()
shortest_path_values = values_to_analyze[1].values()
x_values = values_to_analyze[0].keys()
plt.figure(1)
plt.plot(x_values, degree_values, 'ro-')
plt.xlabel('Time')
plt.ylabel('Degree')
plt.title('Degree over time')
url1 = Diagram.get_url()
plt.figure(2)
plt.plot(x_values, shortest_path_values, 'ro-')
plt.xlabel('Time')
plt.ylabel('Average shortest path length')
plt.title('Average shortest path length over time')
url2 = Diagram.get_url()
return url1, url2
@staticmethod
def get_url():
try:
rv = StringIO.StringIO()
plt.savefig(rv, format="png")
url = "data:image/png;base64,%s" % rv.getvalue().encode("base64").strip()
finally:
plt.clf()
plt.close()
return url | theosotr/netxanal | mvc/controller/visualization.py | Python | apache-2.0 | 21,545 | 0.001439 |
'''
Copyright (c) 2011-2014, Agora Games, LLC All rights reserved.
https://github.com/agoragames/haigha/blob/master/LICENSE.txt
'''
import struct
import sys
from collections import deque
from haigha.reader import Reader
class Frame(object):
'''
Base class for a frame.
'''
# Exceptions
class FrameError(Exception):
'''Base class for all frame errors'''
class FormatError(FrameError):
'''The frame was mal-formed.'''
class InvalidFrameType(FrameError):
'''The frame type is unknown.'''
# Class data
_frame_type_map = {}
# Class methods
@classmethod
def register(cls):
'''
Register a frame type.
'''
cls._frame_type_map[cls.type()] = cls
@classmethod
def type(self):
'''
Fetch the type of this frame. Should be an octet.
'''
raise NotImplementedError()
@classmethod
def read_frames(cls, reader):
'''
Read one or more frames from an IO stream. Buffer must support file
object interface.
After reading, caller will need to check if there are bytes remaining
in the stream. If there are, then that implies that there is one or
more incomplete frames and more data needs to be read. The position
of the cursor in the frame stream will mark the point at which the
last good frame was read. If the caller is expecting a sequence of
frames and only received a part of that sequence, they are responsible
for buffering those frames until the rest of the frames in the sequence
have arrived.
'''
rval = deque()
while True:
frame_start_pos = reader.tell()
try:
frame = Frame._read_frame(reader)
except Reader.BufferUnderflow:
# No more data in the stream
frame = None
except Reader.ReaderError as e:
# Some other format error
raise Frame.FormatError, str(e), sys.exc_info()[-1]
except struct.error as e:
raise Frame.FormatError, str(e), sys.exc_info()[-1]
if frame is None:
reader.seek(frame_start_pos)
break
rval.append(frame)
return rval
@classmethod
def _read_frame(cls, reader):
'''
Read a single frame from a Reader. Will return None if there is an
incomplete frame in the stream.
Raise MissingFooter if there's a problem reading the footer byte.
'''
frame_type = reader.read_octet()
channel_id = reader.read_short()
size = reader.read_long()
payload = Reader(reader, reader.tell(), size)
# Seek to end of payload
reader.seek(size, 1)
ch = reader.read_octet() # footer
if ch != 0xce:
raise Frame.FormatError(
'Framing error, unexpected byte: %x. frame type %x. channel %d, payload size %d',
ch, frame_type, channel_id, size)
frame_class = cls._frame_type_map.get(frame_type)
if not frame_class:
raise Frame.InvalidFrameType("Unknown frame type %x", frame_type)
return frame_class.parse(channel_id, payload)
# Instance methods
def __init__(self, channel_id=-1):
self._channel_id = channel_id
@classmethod
def parse(cls, channel_id, payload):
'''
Subclasses need to implement parsing of their frames. Should return
a new instance of their type.
'''
raise NotImplementedError()
@property
def channel_id(self):
return self._channel_id
def __str__(self):
return "%s[channel: %d]" % (self.__class__.__name__, self.channel_id)
def __repr__(self):
# Have to actually call the method rather than __repr__==__str__
# because subclasses overload __str__
return str(self)
def write_frame(self, stream):
'''
Write this frame.
'''
raise NotImplementedError()
| TeamODrKnow/doctor-know | haigha/frames/frame.py | Python | mit | 4,107 | 0.000243 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
import calendar
import logging
from flask import Flask, abort, jsonify, render_template, request
from flask.json import JSONEncoder
from flask_compress import Compress
from datetime import datetime
from s2sphere import LatLng
from pogom.utils import get_args
from datetime import timedelta
from collections import OrderedDict
from . import config
from .models import Pokemon, Gym, Pokestop, ScannedLocation, MainWorker, WorkerStatus
from .utils import now
log = logging.getLogger(__name__)
compress = Compress()
class Pogom(Flask):
def __init__(self, import_name, **kwargs):
super(Pogom, self).__init__(import_name, **kwargs)
compress.init_app(self)
self.json_encoder = CustomJSONEncoder
self.route("/", methods=['GET'])(self.fullmap)
self.route("/raw_data", methods=['GET'])(self.raw_data)
self.route("/loc", methods=['GET'])(self.loc)
self.route("/next_loc", methods=['POST'])(self.next_loc)
self.route("/mobile", methods=['GET'])(self.list_pokemon)
self.route("/search_control", methods=['GET'])(self.get_search_control)
self.route("/search_control", methods=['POST'])(self.post_search_control)
self.route("/stats", methods=['GET'])(self.get_stats)
self.route("/status", methods=['GET'])(self.get_status)
self.route("/status", methods=['POST'])(self.post_status)
self.route("/gym_data", methods=['GET'])(self.get_gymdata)
def set_search_control(self, control):
self.search_control = control
def set_heartbeat_control(self, heartb):
self.heartbeat = heartb
def set_location_queue(self, queue):
self.location_queue = queue
def set_current_location(self, location):
self.current_location = location
def get_search_control(self):
return jsonify({'status': not self.search_control.is_set()})
def post_search_control(self):
args = get_args()
if not args.search_control or args.on_demand_timeout > 0:
return 'Search control is disabled', 403
action = request.args.get('action', 'none')
if action == 'on':
self.search_control.clear()
log.info('Search thread resumed')
elif action == 'off':
self.search_control.set()
log.info('Search thread paused')
else:
return jsonify({'message': 'invalid use of api'})
return self.get_search_control()
def fullmap(self):
self.heartbeat[0] = now()
args = get_args()
if args.on_demand_timeout > 0:
self.search_control.clear()
fixed_display = "none" if args.fixed_location else "inline"
search_display = "inline" if args.search_control and args.on_demand_timeout <= 0 else "none"
scan_display = "none" if (args.only_server or args.fixed_location or args.spawnpoint_scanning) else "inline"
map_lat = self.current_location[0]
map_lng = self.current_location[1]
if request.args:
map_lat = request.args.get('lat') or self.current_location[0]
map_lng = request.args.get('lon') or self.current_location[1]
return render_template('map.html',
lat=map_lat,
lng=map_lng,
gmaps_key=config['GMAPS_KEY'],
lang=config['LOCALE'],
is_fixed=fixed_display,
search_control=search_display,
show_scan=scan_display
)
def raw_data(self):
self.heartbeat[0] = now()
args = get_args()
if args.on_demand_timeout > 0:
self.search_control.clear()
d = {}
# Request time of this request.
d['timestamp'] = datetime.utcnow()
# Request time of previous request.
if request.args.get('timestamp'):
timestamp = int(request.args.get('timestamp'))
timestamp -= 1000 # Overlap, for rounding errors.
else:
timestamp = 0
swLat = request.args.get('swLat')
swLng = request.args.get('swLng')
neLat = request.args.get('neLat')
neLng = request.args.get('neLng')
oSwLat = request.args.get('oSwLat')
oSwLng = request.args.get('oSwLng')
oNeLat = request.args.get('oNeLat')
oNeLng = request.args.get('oNeLng')
# Previous switch settings.
lastgyms = request.args.get('lastgyms')
lastpokestops = request.args.get('lastpokestops')
lastpokemon = request.args.get('lastpokemon')
lastslocs = request.args.get('lastslocs')
lastspawns = request.args.get('lastspawns')
if request.args.get('luredonly', 'true') == 'true':
luredonly = True
else:
luredonly = False
# Current switch settings saved for next request.
if request.args.get('gyms', 'true') == 'true':
d['lastgyms'] = request.args.get('gyms', 'true')
if request.args.get('pokestops', 'true') == 'true':
d['lastpokestops'] = request.args.get('pokestops', 'true')
if request.args.get('pokemon', 'true') == 'true':
d['lastpokemon'] = request.args.get('pokemon', 'true')
if request.args.get('scanned', 'true') == 'true':
d['lastslocs'] = request.args.get('scanned', 'true')
if request.args.get('spawnpoints', 'false') == 'true':
d['lastspawns'] = request.args.get('spawnpoints', 'false')
# If old coords are not equal to current coords we have moved/zoomed!
if oSwLng < swLng and oSwLat < swLat and oNeLat > neLat and oNeLng > neLng:
newArea = False # We zoomed in no new area uncovered.
elif not (oSwLat == swLat and oSwLng == swLng and oNeLat == neLat and oNeLng == neLng):
newArea = True
else:
newArea = False
# Pass current coords as old coords.
d['oSwLat'] = swLat
d['oSwLng'] = swLng
d['oNeLat'] = neLat
d['oNeLng'] = neLng
if request.args.get('pokemon', 'true') == 'true':
if request.args.get('ids'):
ids = [int(x) for x in request.args.get('ids').split(',')]
d['pokemons'] = Pokemon.get_active_by_id(ids, swLat, swLng,
neLat, neLng)
elif lastpokemon != 'true':
# If this is first request since switch on, load all pokemon on screen.
d['pokemons'] = Pokemon.get_active(swLat, swLng, neLat, neLng)
else:
# If map is already populated only request modified Pokemon since last request time.
d['pokemons'] = Pokemon.get_active(swLat, swLng, neLat, neLng, timestamp=timestamp)
if newArea:
# If screen is moved add newly uncovered Pokemon to the ones that were modified since last request time.
d['pokemons'] = d['pokemons'] + (Pokemon.get_active(swLat, swLng, neLat, neLng, oSwLat=oSwLat, oSwLng=oSwLng, oNeLat=oNeLat, oNeLng=oNeLng))
if request.args.get('eids'):
# Exclude id's of pokemon that are hidden.
eids = [int(x) for x in request.args.get('eids').split(',')]
d['pokemons'] = [x for x in d['pokemons'] if x['pokemon_id'] not in eids]
if request.args.get('reids'):
reids = [int(x) for x in request.args.get('reids').split(',')]
d['pokemons'] = d['pokemons'] + (Pokemon.get_active_by_id(reids, swLat, swLng, neLat, neLng))
d['reids'] = reids
if request.args.get('pokestops', 'true') == 'true':
if lastpokestops != 'true':
d['pokestops'] = Pokestop.get_stops(swLat, swLng, neLat, neLng, lured=luredonly)
else:
d['pokestops'] = Pokestop.get_stops(swLat, swLng, neLat, neLng, timestamp=timestamp)
if newArea:
d['pokestops'] = d['pokestops'] + (Pokestop.get_stops(swLat, swLng, neLat, neLng, oSwLat=oSwLat, oSwLng=oSwLng, oNeLat=oNeLat, oNeLng=oNeLng, lured=luredonly))
if request.args.get('gyms', 'true') == 'true':
if lastgyms != 'true':
d['gyms'] = Gym.get_gyms(swLat, swLng, neLat, neLng)
else:
d['gyms'] = Gym.get_gyms(swLat, swLng, neLat, neLng, timestamp=timestamp)
if newArea:
d['gyms'].update(Gym.get_gyms(swLat, swLng, neLat, neLng, oSwLat=oSwLat, oSwLng=oSwLng, oNeLat=oNeLat, oNeLng=oNeLng))
if request.args.get('scanned', 'true') == 'true':
if lastslocs != 'true':
d['scanned'] = ScannedLocation.get_recent(swLat, swLng, neLat, neLng)
else:
d['scanned'] = ScannedLocation.get_recent(swLat, swLng, neLat, neLng, timestamp=timestamp)
if newArea:
d['scanned'] = d['scanned'] + (ScannedLocation.get_recent(swLat, swLng, neLat, neLng, oSwLat=oSwLat, oSwLng=oSwLng, oNeLat=oNeLat, oNeLng=oNeLng))
selected_duration = None
# for stats and changed nest points etc, limit pokemon queried.
for duration in self.get_valid_stat_input()["duration"]["items"].values():
if duration["selected"] == "SELECTED":
selected_duration = duration["value"]
break
if request.args.get('seen', 'false') == 'true':
d['seen'] = Pokemon.get_seen(selected_duration)
if request.args.get('appearances', 'false') == 'true':
d['appearances'] = Pokemon.get_appearances(request.args.get('pokemonid'), selected_duration)
if request.args.get('appearancesDetails', 'false') == 'true':
d['appearancesTimes'] = Pokemon.get_appearances_times_by_spawnpoint(request.args.get('pokemonid'),
request.args.get('spawnpoint_id'),
selected_duration)
if request.args.get('spawnpoints', 'false') == 'true':
if lastspawns != 'true':
d['spawnpoints'] = Pokemon.get_spawnpoints(swLat=swLat, swLng=swLng, neLat=neLat, neLng=neLng)
else:
d['spawnpoints'] = Pokemon.get_spawnpoints(swLat=swLat, swLng=swLng, neLat=neLat, neLng=neLng, timestamp=timestamp)
if newArea:
d['spawnpoints'] = d['spawnpoints'] + (Pokemon.get_spawnpoints(swLat, swLng, neLat, neLng, oSwLat=oSwLat, oSwLng=oSwLng, oNeLat=oNeLat, oNeLng=oNeLng))
if request.args.get('status', 'false') == 'true':
args = get_args()
d = {}
if args.status_page_password is None:
d['error'] = 'Access denied'
elif request.args.get('password', None) == args.status_page_password:
d['main_workers'] = MainWorker.get_all()
d['workers'] = WorkerStatus.get_all()
return jsonify(d)
def loc(self):
d = {}
d['lat'] = self.current_location[0]
d['lng'] = self.current_location[1]
return jsonify(d)
def next_loc(self):
args = get_args()
if args.fixed_location:
return 'Location changes are turned off', 403
# Part of query string.
if request.args:
lat = request.args.get('lat', type=float)
lon = request.args.get('lon', type=float)
# From post requests.
if request.form:
lat = request.form.get('lat', type=float)
lon = request.form.get('lon', type=float)
if not (lat and lon):
log.warning('Invalid next location: %s,%s', lat, lon)
return 'bad parameters', 400
else:
self.location_queue.put((lat, lon, 0))
self.set_current_location((lat, lon, 0))
log.info('Changing next location: %s,%s', lat, lon)
return self.loc()
def list_pokemon(self):
# todo: Check if client is Android/iOS/Desktop for geolink, currently
# only supports Android.
pokemon_list = []
# Allow client to specify location.
lat = request.args.get('lat', self.current_location[0], type=float)
lon = request.args.get('lon', self.current_location[1], type=float)
origin_point = LatLng.from_degrees(lat, lon)
for pokemon in Pokemon.get_active(None, None, None, None):
pokemon_point = LatLng.from_degrees(pokemon['latitude'],
pokemon['longitude'])
diff = pokemon_point - origin_point
diff_lat = diff.lat().degrees
diff_lng = diff.lng().degrees
direction = (('N' if diff_lat >= 0 else 'S')
if abs(diff_lat) > 1e-4 else '') +\
(('E' if diff_lng >= 0 else 'W')
if abs(diff_lng) > 1e-4 else '')
entry = {
'id': pokemon['pokemon_id'],
'name': pokemon['pokemon_name'],
'card_dir': direction,
'distance': int(origin_point.get_distance(
pokemon_point).radians * 6366468.241830914),
'time_to_disappear': '%d min %d sec' % (divmod((
pokemon['disappear_time'] - datetime.utcnow()).seconds, 60)),
'disappear_time': pokemon['disappear_time'],
'disappear_sec': (pokemon['disappear_time'] - datetime.utcnow()).seconds,
'latitude': pokemon['latitude'],
'longitude': pokemon['longitude']
}
pokemon_list.append((entry, entry['distance']))
pokemon_list = [y[0] for y in sorted(pokemon_list, key=lambda x: x[1])]
return render_template('mobile_list.html',
pokemon_list=pokemon_list,
origin_lat=lat,
origin_lng=lon)
def get_valid_stat_input(self):
duration = request.args.get("duration", type=str)
sort = request.args.get("sort", type=str)
order = request.args.get("order", type=str)
valid_durations = OrderedDict()
valid_durations["1h"] = {"display": "Last Hour", "value": timedelta(hours=1), "selected": ("SELECTED" if duration == "1h" else "")}
valid_durations["3h"] = {"display": "Last 3 Hours", "value": timedelta(hours=3), "selected": ("SELECTED" if duration == "3h" else "")}
valid_durations["6h"] = {"display": "Last 6 Hours", "value": timedelta(hours=6), "selected": ("SELECTED" if duration == "6h" else "")}
valid_durations["12h"] = {"display": "Last 12 Hours", "value": timedelta(hours=12), "selected": ("SELECTED" if duration == "12h" else "")}
valid_durations["1d"] = {"display": "Last Day", "value": timedelta(days=1), "selected": ("SELECTED" if duration == "1d" else "")}
valid_durations["7d"] = {"display": "Last 7 Days", "value": timedelta(days=7), "selected": ("SELECTED" if duration == "7d" else "")}
valid_durations["14d"] = {"display": "Last 14 Days", "value": timedelta(days=14), "selected": ("SELECTED" if duration == "14d" else "")}
valid_durations["1m"] = {"display": "Last Month", "value": timedelta(days=365 / 12), "selected": ("SELECTED" if duration == "1m" else "")}
valid_durations["3m"] = {"display": "Last 3 Months", "value": timedelta(days=3 * 365 / 12), "selected": ("SELECTED" if duration == "3m" else "")}
valid_durations["6m"] = {"display": "Last 6 Months", "value": timedelta(days=6 * 365 / 12), "selected": ("SELECTED" if duration == "6m" else "")}
valid_durations["1y"] = {"display": "Last Year", "value": timedelta(days=365), "selected": ("SELECTED" if duration == "1y" else "")}
valid_durations["all"] = {"display": "Map Lifetime", "value": 0, "selected": ("SELECTED" if duration == "all" else "")}
if duration not in valid_durations:
valid_durations["1d"]["selected"] = "SELECTED"
valid_sort = OrderedDict()
valid_sort["count"] = {"display": "Count", "selected": ("SELECTED" if sort == "count" else "")}
valid_sort["id"] = {"display": "Pokedex Number", "selected": ("SELECTED" if sort == "id" else "")}
valid_sort["name"] = {"display": "Pokemon Name", "selected": ("SELECTED" if sort == "name" else "")}
if sort not in valid_sort:
valid_sort["count"]["selected"] = "SELECTED"
valid_order = OrderedDict()
valid_order["asc"] = {"display": "Ascending", "selected": ("SELECTED" if order == "asc" else "")}
valid_order["desc"] = {"display": "Descending", "selected": ("SELECTED" if order == "desc" else "")}
if order not in valid_order:
valid_order["desc"]["selected"] = "SELECTED"
valid_input = OrderedDict()
valid_input["duration"] = {"display": "Duration", "items": valid_durations}
valid_input["sort"] = {"display": "Sort", "items": valid_sort}
valid_input["order"] = {"display": "Order", "items": valid_order}
return valid_input
def get_stats(self):
return render_template('statistics.html',
lat=self.current_location[0],
lng=self.current_location[1],
gmaps_key=config['GMAPS_KEY'],
valid_input=self.get_valid_stat_input()
)
def get_gymdata(self):
gym_id = request.args.get('id')
gym = Gym.get_gym(gym_id)
return jsonify(gym)
def get_status(self):
args = get_args()
if args.status_page_password is None:
abort(404)
return render_template('status.html')
def post_status(self):
args = get_args()
d = {}
if args.status_page_password is None:
abort(404)
if request.form.get('password', None) == args.status_page_password:
d['login'] = 'ok'
d['main_workers'] = MainWorker.get_all()
d['workers'] = WorkerStatus.get_all()
else:
d['login'] = 'failed'
return jsonify(d)
class CustomJSONEncoder(JSONEncoder):
def default(self, obj):
try:
if isinstance(obj, datetime):
if obj.utcoffset() is not None:
obj = obj - obj.utcoffset()
millis = int(
calendar.timegm(obj.timetuple()) * 1000 +
obj.microsecond / 1000
)
return millis
iterable = iter(obj)
except TypeError:
pass
else:
return list(iterable)
return JSONEncoder.default(self, obj)
| pretsell/PokemonGo-Map | pogom/app.py | Python | agpl-3.0 | 19,025 | 0.002628 |
from django.conf.urls import url
from cats.views.cat import (
CatList,
CatDetail
)
from cats.views.breed import (
BreedList,
BreedDetail
)
urlpatterns = [
# Cats URL's
url(r'^cats/$', CatList.as_view(), name='list'),
url(r'^cats/(?P<pk>\d+)/$', CatDetail.as_view(), name='detail'),
# Breeds URL's
url(r'^breeds/$', BreedList.as_view(), name='list_breeds'),
url(r'^breeds/(?P<pk>\d+)/$', BreedDetail.as_view(), name='detail_breed'),
]
| OscaRoa/api-cats | cats/urls.py | Python | mit | 475 | 0 |
#!/usr/bin/env python
# -*- python -*-
# ex: set syntax=python:
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import sys,socket,os,time,re
port=None
host=None
if os.environ.has_key("SHELLPORT"):
try:
port=int(os.environ.get("SHELLPORT"))
except:
print("error: parsing SHELLPORT")
if os.environ.has_key("SHELLSERVER"):
host=os.environ.get("SHELLSERVER")
if len(sys.argv)>1 and re.search('^--shellserver=',sys.argv[1]):
shellserver=sys.argv[1][14:]
if shellserver.find(':')>-1:
host=shellserver[0:shellserver.find(':')]
try:
port=int(shellserver[shellserver.find(':')+1:])
except:
True
sys.argv=sys.argv[1:]
if (host==None or port==None):
print("error: SHELLPORT and SHELLSERVER must be set")
sys.exit(1)
args=""
for item in sys.argv[1:]:
args+=item+" "
s=socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((host,port))
s.send("abc_android %s" % args)
result=''
timeout=300
starttime=time.time()
while True:
newdata=s.recv(1024)
# print("data: "+newdata)
result+=newdata
if re.search("EXITCODE=[0-9]+\s",result):
break
if result.find("-- application argument")>-1:
break
if result.find("\n$")>-1:
break
if time.time()-starttime>timeout:
print("error: timeout detected")
break
s.close()
if re.search("EXITCODE=[0-9]+\s",result):
exitcode=result[result.find("EXITCODE")+9:]
if exitcode.find("$")>-1:
exitcode=exitcode[0:exitcode.find("$")]
try:
exitcode=int(exitcode.strip())
except:
True
result=result[0:result.find("EXITCODE")]
else:
exitcode=0
print(result)
sys.exit(exitcode)
| adobe-flash/avmplus | build/buildbot/slaves/android/scripts/shell-client-android.py | Python | mpl-2.0 | 1,874 | 0.026147 |
import unittest
import pandas as pd
import nose.tools
from mia.features.blobs import detect_blobs
from mia.features.intensity import detect_intensity
from mia.utils import preprocess_image
from ..test_utils import get_file_path
class IntensityTests(unittest.TestCase):
@classmethod
def setupClass(cls):
img_path = get_file_path("mias/mdb154.png")
msk_path = get_file_path("mias/masks/mdb154_mask.png")
cls._img, cls._msk = preprocess_image(img_path, msk_path)
# def test_detect_intensity(self):
# blobs = detect_blobs(self._img, self._msk)
# intensity = detect_intensity(self._img, blobs)
#
# nose.tools.assert_true(isinstance(intensity, pd.DataFrame))
# nose.tools.assert_equal(intensity.shape[1], 10)
| samueljackson92/major-project | src/tests/regression_tests/intensity_regression_test.py | Python | mit | 780 | 0 |
a = {'b',] | idea4bsd/idea4bsd | python/testData/psi/NotClosedBraceSet.py | Python | apache-2.0 | 10 | 0.2 |
# Standard Library Imports
import configargparse
from datetime import datetime, timedelta
from glob import glob
import json
import logging
from math import radians, sin, cos, atan2, sqrt, degrees
import os
import sys
import re
# 3rd Party Imports
# Local Imports
from . import config
log = logging.getLogger('Utils')
################################################### SYSTEM UTILITIES ###################################################
# Checks is a line contains any subsititions located in args
def contains_arg(line, args):
for word in args:
if ('<' + word + '>') in line:
return True
return False
def get_path(path):
if not os.path.isabs(path): # If not absolute path
path = os.path.join(config['ROOT_PATH'], path)
return path
def parse_boolean(val):
b = str(val).lower()
if b in {'t', 'true', 'y', 'yes'}:
return True
if b in ('f', 'false', 'n', 'no'):
return False
return None
def parse_unicode(bytestring):
decoded_string = bytestring.decode(sys.getfilesystemencoding())
return decoded_string
# Used for lazy installs - installs required module with pip
def pip_install(module, version):
import subprocess
target = "{}=={}".format(module, version)
log.info("Attempting to pip install %s..." % target)
subprocess.call(['pip', 'install', target])
log.info("%s install complete." % target)
# Used to exit when leftover parameters are founds
def reject_leftover_parameters(dict_, location):
if len(dict_) > 0:
log.error("Unknown parameters at {}: ".format(location))
log.error(dict_.keys())
log.error("Please consult the PokeAlarm documentation for accepted parameters.")
sys.exit(1)
# Load a key from the given dict, or throw an error if it isn't there
def require_and_remove_key(key, _dict, location):
if key in _dict:
return _dict.pop(key)
else:
log.error("The parameter '{}' is required for {}".format(key, location)
+ " Please check the PokeAlarm documentation for correct formatting.")
sys.exit(1)
########################################################################################################################
################################################## POKEMON UTILITIES ###################################################
# Returns the id corresponding with the pokemon name (use all locales for flexibility)
def get_pkmn_id(pokemon_name):
name = pokemon_name.lower()
if not hasattr(get_pkmn_id, 'ids'):
get_pkmn_id.ids = {}
files = glob(get_path('locales/*/pokemon.json'))
for file_ in files:
with open(file_, 'r') as f:
j = json.loads(f.read())
for id_ in j:
nm = j[id_].lower()
get_pkmn_id.ids[nm] = int(id_)
return get_pkmn_id.ids.get(name)
# Returns the id corresponding with the move (use all locales for flexibility)
def get_move_id(move_name):
name = move_name.lower()
if not hasattr(get_move_id, 'ids'):
get_move_id.ids = {}
files = glob(get_path('locales/*/moves.json'))
for file_ in files:
with open(file_, 'r') as f:
j = json.loads(f.read())
for id_ in j:
nm = j[id_].lower()
get_move_id.ids[nm] = int(id_)
return get_move_id.ids.get(name)
# Returns the id corresponding with the pokemon name (use all locales for flexibility)
def get_team_id(team_name):
name = team_name.lower()
if not hasattr(get_team_id, 'ids'):
get_team_id.ids = {}
files = glob(get_path('locales/*/teams.json'))
for file_ in files:
with open(file_, 'r') as f:
j = json.loads(f.read())
for id_ in j:
nm = j[id_].lower()
get_team_id.ids[nm] = int(id_)
return get_team_id.ids.get(name)
# Returns the damage of a move when requesting
def get_move_damage(move_id):
if not hasattr(get_move_damage, 'info'):
get_move_damage.info = {}
file_ = get_path('locales/move_info.json')
with open(file_, 'r') as f:
j = json.loads(f.read())
for id_ in j:
get_move_damage.info[int(id_)] = j[id_]['damage']
return get_move_damage.info.get(move_id, 'unkn')
# Returns the dps of a move when requesting
def get_move_dps(move_id):
if not hasattr(get_move_dps, 'info'):
get_move_dps.info = {}
file_ = get_path('locales/move_info.json')
with open(file_, 'r') as f:
j = json.loads(f.read())
for id_ in j:
get_move_dps.info[int(id_)] = j[id_]['dps']
return get_move_dps.info.get(move_id, 'unkn')
# Returns the duration of a move when requesting
def get_move_duration(move_id):
if not hasattr(get_move_duration, 'info'):
get_move_duration.info = {}
file_ = get_path('locales/move_info.json')
with open(file_, 'r') as f:
j = json.loads(f.read())
for id_ in j:
get_move_duration.info[int(id_)] = j[id_]['duration']
return get_move_duration.info.get(move_id, 'unkn')
# Returns the duation of a move when requesting
def get_move_energy(move_id):
if not hasattr(get_move_energy, 'info'):
get_move_energy.info = {}
file_ = get_path('locales/move_info.json')
with open(file_, 'r') as f:
j = json.loads(f.read())
for id_ in j:
get_move_energy.info[int(id_)] = j[id_]['energy']
return get_move_energy.info.get(move_id, 'unkn')
# Returns the base height for a pokemon
def get_base_height(pokemon_id):
if not hasattr(get_base_height, 'info'):
get_base_height.info = {}
file_ = get_path('locales/base_stats.json')
with open(file_, 'r') as f:
j = json.loads(f.read())
for id_ in j:
get_base_height.info[int(id_)] = j[id_].get('height')
return get_base_height.info.get(pokemon_id)
# Returns the base weight for a pokemon
def get_base_weight(pokemon_id):
if not hasattr(get_base_weight, 'info'):
get_base_weight.info = {}
file_ = get_path('locales/base_stats.json')
with open(file_, 'r') as f:
j = json.loads(f.read())
for id_ in j:
get_base_weight.info[int(id_)] = j[id_].get('weight')
return get_base_weight.info.get(pokemon_id)
# Returns the size ratio of a pokemon
def size_ratio(pokemon_id, height, weight):
height_ratio = height / get_base_height(pokemon_id)
weight_ratio = weight / get_base_weight(pokemon_id)
return height_ratio + weight_ratio
# Returns the (appraisal) size of a pokemon:
def get_pokemon_size(pokemon_id, height, weight):
size = size_ratio(pokemon_id, height, weight)
if size < 1.5:
return 'tiny'
elif size <= 1.75:
return 'small'
elif size < 2.25:
return 'normal'
elif size <= 2.5:
return 'large'
else:
return 'big'
# Returns the gender symbol of a pokemon:
def get_pokemon_gender(gender):
if gender == 1:
return u'\u2642' # male symbol
elif gender == 2:
return u'\u2640' # female symbol
elif gender == 3:
return u'\u26b2' #neutral
return '?' # catch all
########################################################################################################################
################################################# GMAPS API UTILITIES ##################################################
# Returns a String link to Google Maps Pin at the location
def get_gmaps_link(lat, lng):
latlng = '{},{}'.format(repr(lat), repr(lng))
return 'http://maps.google.com/maps?q={}'.format(latlng)
#Returns a String link to Apple Maps Pin at the location
def get_applemaps_link(lat, lng):
latLon = '{},{}'.format(repr(lat), repr(lng))
return 'http://maps.apple.com/maps?daddr={}&z=10&t=s&dirflg=w'.format(latLon)
# Returns a static map url with <lat> and <lng> parameters for dynamic test
def get_static_map_url(settings, api_key=None): # TODO: optimize formatting
if not parse_boolean(settings.get('enabled', 'True')):
return None
width = settings.get('width', '250')
height = settings.get('height', '125')
maptype = settings.get('maptype', 'roadmap')
zoom = settings.get('zoom', '15')
center = '{},{}'.format('<lat>', '<lng>')
query_center = 'center={}'.format(center)
query_markers = 'markers=color:red%7C{}'.format(center)
query_size = 'size={}x{}'.format(width, height)
query_zoom = 'zoom={}'.format(zoom)
query_maptype = 'maptype={}'.format(maptype)
map_ = ('https://maps.googleapis.com/maps/api/staticmap?' +
query_center + '&' + query_markers + '&' +
query_maptype + '&' + query_size + '&' + query_zoom)
if api_key is not None:
map_ += ('&key=%s' % api_key)
log.debug("API_KEY added to static map url.")
return map_
########################################################################################################################
################################################## GENERAL UTILITIES ###################################################
# Returns a cardinal direction (N/NW/W/SW, etc) of the pokemon from the origin point, if set
def get_cardinal_dir(pt_a, pt_b=None):
if pt_b is None:
return '?'
lat1, lng1, lat2, lng2 = map(radians, [pt_b[0], pt_b[1], pt_a[0], pt_a[1]])
directions = ["S", "SE", "E", "NE", "N", "NW", "W", "SW", "S"]
bearing = (degrees(atan2(cos(lat1) * sin(lat2) - sin(lat1) * cos(lat2) * cos(lng2 - lng1),
sin(lng2 - lng1) * cos(lat2))) + 450) % 360
return directions[int(round(bearing / 45))]
# Return the distance formatted correctly
def get_dist_as_str(dist):
if dist == 'unkn':
return 'unkn'
if config['UNITS'] == 'imperial':
if dist > 1760: # yards per mile
return "{:.1f}mi".format(dist / 1760.0)
else:
return "{:.1f}yd".format(dist)
else: # Metric
if dist > 1000: # meters per km
return "{:.1f}km".format(dist / 1000.0)
else:
return "{:.1f}m".format(dist)
# Returns an integer representing the distance between A and B
def get_earth_dist(pt_a, pt_b=None):
if type(pt_a) is str or pt_b is None:
return 'unkn' # No location set
log.debug("Calculating distance from {} to {}".format(pt_a, pt_b))
lat_a = radians(pt_a[0])
lng_a = radians(pt_a[1])
lat_b = radians(pt_b[0])
lng_b = radians(pt_b[1])
lat_delta = lat_b - lat_a
lng_delta = lng_b - lng_a
a = sin(lat_delta / 2) ** 2 + cos(lat_a) * cos(lat_b) * sin(lng_delta / 2) ** 2
c = 2 * atan2(sqrt(a), sqrt(1 - a))
radius = 6373000 # radius of earth in meters
if config['UNITS'] == 'imperial':
radius = 6975175 # radius of earth in yards
dist = c * radius
return dist
# Return the time as a string in different formats
def get_time_as_str(t, timezone=None):
if timezone is None:
timezone = config.get("TIMEZONE")
s = (t - datetime.utcnow()).total_seconds()
(m, s) = divmod(s, 60)
(h, m) = divmod(m, 60)
d = timedelta(hours=h, minutes=m, seconds=s)
if timezone is not None:
disappear_time = datetime.now(tz=timezone) + d
else:
disappear_time = datetime.now() + d
# Time remaining in minutes and seconds
time_left = "%dm %ds" % (m, s)
# Dissapear time in 12h format, eg "2:30:16 PM"
time_12 = disappear_time.strftime("%I:%M:%S") + disappear_time.strftime("%p").lower()
# Dissapear time in 24h format including seconds, eg "14:30:16"
time_24 = disappear_time.strftime("%H:%M:%S")
return time_left, time_12, time_24
########################################################################################################################
| xc0ut/PokeAlarm | PokeAlarm/Utils.py | Python | agpl-3.0 | 11,971 | 0.00259 |
from asyncio import AbstractEventLoop
import aiomysql.sa
import asyncpg
from asyncio_extras import async_contextmanager
from cetus.types import (ConnectionType,
MySQLConnectionType,
PostgresConnectionType)
from sqlalchemy.engine.url import URL
DEFAULT_MYSQL_PORT = 3306
DEFAULT_POSTGRES_PORT = 5432
DEFAULT_MIN_CONNECTIONS_LIMIT = 10
DEFAULT_CONNECTION_TIMEOUT = 60
@async_contextmanager
async def get_connection_pool(
*, db_uri: URL,
is_mysql: bool,
timeout: float = DEFAULT_CONNECTION_TIMEOUT,
min_size: int = DEFAULT_MIN_CONNECTIONS_LIMIT,
max_size: int,
loop: AbstractEventLoop):
if is_mysql:
async with get_mysql_connection_pool(
db_uri,
timeout=timeout,
min_size=min_size,
max_size=max_size,
loop=loop) as connection_pool:
yield connection_pool
else:
async with get_postgres_connection_pool(
db_uri,
timeout=timeout,
min_size=min_size,
max_size=max_size,
loop=loop) as connection_pool:
yield connection_pool
@async_contextmanager
async def get_mysql_connection_pool(
db_uri: URL, *,
timeout: float = DEFAULT_CONNECTION_TIMEOUT,
min_size: int = DEFAULT_MIN_CONNECTIONS_LIMIT,
max_size: int,
loop: AbstractEventLoop):
# `None` port causes exceptions
port = db_uri.port or DEFAULT_MYSQL_PORT
# we use engine instead of plain connection pool
# because `aiomysql` has transactions API
# only for engine-based connections
async with aiomysql.sa.create_engine(
host=db_uri.host,
port=port,
user=db_uri.username,
password=db_uri.password,
db=db_uri.database,
charset='utf8',
connect_timeout=timeout,
# TODO: check if `asyncpg` connections
# are autocommit by default
autocommit=True,
minsize=min_size,
maxsize=max_size,
loop=loop) as engine:
yield engine
@async_contextmanager
async def get_postgres_connection_pool(
db_uri: URL, *,
timeout: float = DEFAULT_CONNECTION_TIMEOUT,
min_size: int = DEFAULT_MIN_CONNECTIONS_LIMIT,
max_size: int,
loop: AbstractEventLoop):
# for symmetry with MySQL case
port = db_uri.port or DEFAULT_POSTGRES_PORT
async with asyncpg.create_pool(
host=db_uri.host,
port=port,
user=db_uri.username,
password=db_uri.password,
database=db_uri.database,
timeout=timeout,
min_size=min_size,
max_size=max_size,
loop=loop) as pool:
yield pool
@async_contextmanager
async def begin_transaction(
*, connection: ConnectionType,
is_mysql: bool):
if is_mysql:
async with begin_mysql_transaction(connection):
yield
else:
async with begin_postgres_transaction(connection):
yield
@async_contextmanager
async def begin_mysql_transaction(
connection: MySQLConnectionType):
transaction = connection.begin()
async with transaction:
yield
@async_contextmanager
async def begin_postgres_transaction(
connection: PostgresConnectionType,
*, isolation: str = 'read_committed',
read_only: bool = False,
deferrable: bool = False):
transaction = connection.transaction(
isolation=isolation,
readonly=read_only,
deferrable=deferrable)
async with transaction:
yield
@async_contextmanager
async def get_connection(
*, db_uri: URL,
is_mysql: bool,
timeout: float = DEFAULT_CONNECTION_TIMEOUT,
loop: AbstractEventLoop):
if is_mysql:
async with get_mysql_connection(
db_uri,
timeout=timeout,
loop=loop) as connection:
yield connection
else:
async with get_postgres_connection(
db_uri,
timeout=timeout,
loop=loop) as connection:
yield connection
@async_contextmanager
async def get_mysql_connection(
db_uri: URL, *,
timeout: float = DEFAULT_CONNECTION_TIMEOUT,
loop: AbstractEventLoop):
# `None` port causes exceptions
port = db_uri.port or DEFAULT_MYSQL_PORT
# we use engine-based connection
# instead of plain connection
# because `aiomysql` has transactions API
# only for engine-based connections
async with aiomysql.sa.create_engine(
host=db_uri.host,
port=port,
user=db_uri.username,
password=db_uri.password,
db=db_uri.database,
charset='utf8',
connect_timeout=timeout,
# TODO: check if `asyncpg` connections
# are autocommit by default
autocommit=True,
minsize=1,
maxsize=1,
loop=loop) as engine:
async with engine.acquire() as connection:
yield connection
@async_contextmanager
async def get_postgres_connection(
db_uri: URL, *,
timeout: float = DEFAULT_CONNECTION_TIMEOUT,
loop: AbstractEventLoop):
# for symmetry with MySQL case
port = db_uri.port or DEFAULT_POSTGRES_PORT
connection = await asyncpg.connect(
host=db_uri.host,
port=port,
user=db_uri.username,
password=db_uri.password,
database=db_uri.database,
timeout=timeout,
loop=loop)
try:
yield connection
finally:
await connection.close()
| lycantropos/cetus | cetus/data_access/connectors.py | Python | mit | 5,800 | 0 |
# Copyright 2014 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.orchestration import base
from tempest.lib import exceptions as lib_exc
from tempest import test
class TemplateYAMLNegativeTestJSON(base.BaseOrchestrationTest):
template = """
HeatTemplateFormatVersion: '2012-12-12'
Description: |
Template which creates only a new user
Resources:
CfnUser:
Type: AWS::IAM::User
"""
invalid_template_url = 'http://www.example.com/template.yaml'
@classmethod
def resource_setup(cls):
super(TemplateYAMLNegativeTestJSON, cls).resource_setup()
cls.parameters = {}
@test.attr(type=['negative'])
@test.idempotent_id('5586cbca-ddc4-4152-9db8-fa1ce5fc1876')
def test_validate_template_url(self):
"""Validating template passing url to it."""
self.assertRaises(lib_exc.BadRequest,
self.client.validate_template_url,
template_url=self.invalid_template_url,
parameters=self.parameters)
class TemplateAWSNegativeTestJSON(TemplateYAMLNegativeTestJSON):
template = """
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "Template which creates only a new user",
"Resources" : {
"CfnUser" : {
"Type" : "AWS::IAM::User"
}
}
}
"""
invalid_template_url = 'http://www.example.com/template.template'
| HybridF5/tempest_debug | tempest/api/orchestration/stacks/test_templates_negative.py | Python | apache-2.0 | 1,954 | 0 |
""" Test for too many branches. """
# pylint: disable=using-constant-test
def wrong(): # [too-many-branches]
""" Has too many branches. """
if 1:
pass
elif 1:
pass
elif 1:
pass
elif 1:
pass
elif 1:
pass
elif 1:
pass
try:
pass
finally:
pass
if 2:
pass
while True:
pass
if 1:
pass
elif 2:
pass
elif 3:
pass
def good():
""" Too many branches only if we take
into consideration the nested functions.
"""
def nested_1():
""" empty """
if 1:
pass
elif 2:
pass
elif 3:
pass
elif 4:
pass
nested_1()
try:
pass
finally:
pass
try:
pass
finally:
pass
if 1:
pass
elif 2:
pass
elif 3:
pass
elif 4:
pass
elif 5:
pass
elif 6:
pass
elif 7:
pass
| ruchee/vimrc | vimfiles/bundle/vim-python/submodules/pylint/tests/functional/t/too/too_many_branches.py | Python | mit | 1,099 | 0.00273 |
__author__ = 'noe'
from bhmm.hidden.impl_c.hidden import * | marscher/bhmm | bhmm/hidden/impl_c/__init__.py | Python | lgpl-3.0 | 59 | 0.016949 |
# -*- coding: utf-8 -*-
import access
import util
@auth.requires_login()
def index():
"""Produces a list of the feedback obtained for a given venue,
or for all venues."""
venue_id = request.args(0)
if venue_id == 'all':
q = (db.submission.user == get_user_email())
else:
q = ((db.submission.user == get_user_email())
& (db.submission.venue_id == venue_id))
db.submission.id.represent = lambda x, r: A(T('View'), _class='btn', _href=URL('submission', 'view_own_submission', args=['v', r.id]))
db.submission.id.label = T('Submission')
db.submission.id.readable = True
db.submission.venue_id.readable = True
grid = SQLFORM.grid(q,
fields=[db.submission.id, db.submission.venue_id,
db.submission.date_created, db.submission.date_updated, ],
csv=False, details=False, create=False, editable=False, deletable=False,
args=request.args[:1],
maxtextlength=24,
)
return dict(grid=grid)
@auth.requires_login()
def view_feedback():
"""Shows detailed feedback for a user in a venue.
This controller accepts various types of arguments:
* 's', submission_id
* 'u', venue_id, username
* 'v', venue_id (in which case, shows own submission to that venue)
"""
if len(request.args) == 0:
redirect(URL('default', 'index'))
if request.args(0) == 's':
# submission_id
n_args = 2
subm = db.submission(request.args(1)) or redirect(URL('default', 'index'))
c = db.venue(subm.venue_id) or redirect(URL('default', 'index'))
username = subm.user
elif request.args(0) == 'v':
# venue_id
n_args = 2
c = db.venue(request.args(1)) or redirect(URL('default', 'index'))
username = get_user_email()
subm = db((db.submission.user == username) & (db.submission.venue_id == c.id)).select().first()
else:
# venue_id, username
n_args = 3
c = db.venue(request.args(1)) or redirect(URL('default', 'index'))
username = request.args(2) or redirect(URL('default', 'index'))
subm = db((db.submission.user == username) & (db.submission.venue_id == c.id)).select().first()
# Checks permissions.
props = db(db.user_properties.user == get_user_email()).select().first()
if props == None:
session.flash = T('Not authorized.')
redirect(URL('default', 'index'))
is_author = (username == get_user_email())
can_view_feedback = access.can_view_feedback(c, props) or is_author
if (not can_view_feedback):
session.flash = T('Not authorized.')
redirect(URL('default', 'index'))
if not (access.can_view_feedback(c, props) or datetime.utcnow() > c.rate_close_date):
session.flash = T('The ratings are not yet available.')
redirect(URL('feedback', 'index', args=['all']))
# Produces the link to edit the feedback.
edit_feedback_link = None
if subm is not None and access.can_observe(c, props):
edit_feedback_link = A(T('Edit feedback'), _class='btn',
_href=URL('submission', 'edit_feedback', args=[subm.id]))
# Produces the download link.
download_link = None
if subm is not None and c.allow_file_upload and subm.content is not None:
if is_author:
download_link = A(T('Download'), _class='btn',
_href=URL('submission', 'download_author', args=[subm.id, subm.content]))
else:
download_link = A(T('Download'), _class='btn',
_href=URL('submission', 'download_manager', args=[subm.id, subm.content]))
venue_link = A(c.name, _href=URL('venues', 'view_venue', args=[c.id]))
# Submission link.
subm_link = None
if subm is not None and c.allow_link_submission:
subm_link = A(subm.link, _href=subm.link)
# Submission content and feedback.
subm_comment = None
subm_feedback = None
if subm is not None:
raw_subm_comment = keystore_read(subm.comment)
if raw_subm_comment is not None and len(raw_subm_comment) > 0:
subm_comment = MARKMIN(keystore_read(subm.comment))
raw_feedback = keystore_read(subm.feedback)
if raw_feedback is not None and len(raw_feedback) > 0:
subm_feedback = MARKMIN(raw_feedback)
# Display settings.
db.submission.percentile.readable = True
db.submission.comment.readable = True
db.submission.feedback.readable = True
if access.can_observe(c, props):
db.submission.quality.readable = True
db.submission.error.readable = True
# Reads the grade information.
submission_grade = submission_percentile = None
review_grade = review_percentile = user_reputation = None
final_grade = final_percentile = None
assigned_grade = None
if c.grades_released:
grade_info = db((db.grades.user == username) & (db.grades.venue_id == c.id)).select().first()
if grade_info is not None:
submission_grade = represent_quality(grade_info.submission_grade, None)
submission_percentile = represent_percentage(grade_info.submission_percentile, None)
review_grade = represent_quality_10(grade_info.accuracy, None)
review_percentile = represent_percentage(grade_info.accuracy_percentile, None)
user_reputation = represent_01_as_percentage(grade_info.reputation, None)
final_grade = represent_quality(grade_info.grade, None)
final_percentile = represent_percentage(grade_info.percentile, None)
assigned_grade = represent_quality(grade_info.assigned_grade, None)
# Makes a grid of comments.
db.task.submission_name.readable = False
db.task.assigned_date.readable = False
db.task.completed_date.readable = False
db.task.rejected.readable = True
db.task.helpfulness.readable = db.task.helpfulness.writable = True
# Prevent editing the comments; the only thing editable should be the "is bogus" field.
db.task.comments.writable = False
db.task.comments.readable = True
ranking_link = None
if access.can_observe(c, props):
db.task.user.readable = True
db.task.completed_date.readable = True
links = [
dict(header=T('Review details'), body= lambda r:
A(T('View'), _class='btn', _href=URL('ranking', 'view_comparison', args=[r.id]))),
]
details = False
if subm is not None:
ranking_link = A(T('details'), _href=URL('ranking', 'view_comparisons_given_submission', args=[subm.id]))
reviews_link = A(T('details'), _href=URL('ranking', 'view_comparisons_given_user', args=[username, c.id]))
db.task.user.represent = lambda v, r: A(v, _href=URL('ranking', 'view_comparisons_given_user',
args=[v, c.id], user_signature=True))
else:
user_reputation = None
links = [
dict(header=T('Review feedback'), body = lambda r:
A(T('Give feedback'), _class='btn',
_href=URL('feedback', 'reply_to_review', args=[r.id], user_signature=True))),
]
details = False
ranking_link = None
reviews_link = None
if subm is not None:
q = ((db.task.submission_id == subm.id) & (db.task.is_completed == True))
# q = (db.task.submission_id == subm.id)
else:
q = (db.task.id == -1)
grid = SQLFORM.grid(q,
fields=[db.task.id, db.task.user, db.task.rejected, db.task.comments, db.task.helpfulness, ],
details = details,
csv=False, create=False, editable=False, deletable=False, searchable=False,
links=links,
args=request.args[:n_args],
maxtextlength=24,
)
return dict(subm=subm, download_link=download_link, subm_link=subm_link, username=username,
subm_comment=subm_comment, subm_feedback=subm_feedback,
edit_feedback_link=edit_feedback_link,
is_admin=is_user_admin(),
submission_grade=submission_grade, submission_percentile=submission_percentile,
review_grade=review_grade, review_percentile=review_percentile,
user_reputation=user_reputation,
final_grade=final_grade, final_percentile=final_percentile,
assigned_grade=assigned_grade,
venue_link=venue_link, grid=grid, ranking_link=ranking_link,
reviews_link=reviews_link)
@auth.requires_signature()
def reply_to_review():
t = db.task(request.args(0)) or redirect(URL('default', 'index'))
db.task.submission_name.readable = False
db.task.assigned_date.readable = False
db.task.completed_date.readable = False
db.task.comments.readable = False
db.task.helpfulness.readable = db.task.helpfulness.writable = True
db.task.feedback.readable = db.task.feedback.writable = True
form = SQLFORM(db.task, record=t)
form.vars.feedback = keystore_read(t.feedback)
if form.process(onvalidation=validate_review_feedback(t)).accepted:
session.flash = T('Updated.')
redirect(URL('feedback', 'view_feedback', args=['s', t.submission_id]))
link_to_submission = A(T('View submission'), _href=URL('submission', 'view_own_submission', args=['v', t.submission_id]))
review_comments = MARKMIN(keystore_read(t.comments))
return dict(form=form, link_to_submission=link_to_submission, review_comments=review_comments)
def validate_review_feedback(t):
def f(form):
if not form.errors:
feedback_id = keystore_update(t.feedback, form.vars.feedback)
form.vars.feedback = feedback_id
return f
@auth.requires_login()
def view_my_reviews():
"""This controller displays the reviews a user has written for a venue, along with
the feedback they received."""
c = db.venue(request.args(0)) or redirect(URL('rating', 'review_index'))
link_to_venue = A(c.name, _href=URL('venues', 'view_venue', args=[c.id]))
link_to_eval = A(T('My evaluation in this venue'), _class='btn',
_href=URL('feedback', 'view_feedback', args=['v', c.id]))
q = ((db.task.user == get_user_email()) & (db.task.venue_id == c.id))
db.task.rejected.readable = True
db.task.helpfulness.readable = True
db.task.comments.readable = True
db.task.feedback.readable = True
# To prevent chopping
db.task.submission_name.represent = represent_text_field
grid = SQLFORM.grid(q,
fields=[db.task.submission_name, db.task.rejected, db.task.helpfulness],
details=True,
editable=False, deletable=False, create=False, searchable=False,
csv=False,
args=request.args[:1],
maxtextlength=24,
)
return dict(grid=grid, link_to_venue=link_to_venue, link_to_eval=link_to_eval)
| lucadealfaro/crowdranker | controllers/feedback.py | Python | bsd-3-clause | 10,966 | 0.005471 |
#!/usr/bin/env python3
import unittest
import fileLists
class TestFileLists(unittest.TestCase):
def testOneEntry(self):
l=fileLists.listTableFiles([
'2014.3765.1.1.department.diff.csv',
])
self.assertEqual(len(l),1)
t=l[0]
self.assertEqual(t.stageYear,2014)
self.assertEqual(t.documentNumber,3765)
self.assertEqual(t.paragraphNumber,'1.1')
self.assertEqual(t.table,'department')
self.assertIsInstance(t.action,fileLists.DiffAction)
def testOneEntryWithDirectory(self):
l=fileLists.listTableFiles([
'tables\\2014.3574.3.department.set(2014).csv'
])
self.assertEqual(len(l),1)
def testSort(self):
l=fileLists.listTableFiles([
'2014.3765.7.1.department.diff.csv',
'2014.3765.10.4.department.diff.csv',
'2014.3765.1.1.department.diff.csv',
])
self.assertEqual(len(l),3)
self.assertEqual(l[0].paragraphNumber,'1.1')
self.assertEqual(l[1].paragraphNumber,'7.1')
self.assertEqual(l[2].paragraphNumber,'10.4')
def testSet(self):
l=fileLists.listTableFiles([
'2014.3765.1.1.department.set(2015,2016).csv',
])
self.assertEqual(len(l),1)
self.assertIsInstance(l[0].action,fileLists.SetAction)
self.assertEqual(l[0].action.fiscalYears,{2015,2016})
def testDiffset(self):
l=fileLists.listTableFiles([
'2014.3765.1.1.department.diffset(1234,2015,2016).csv',
])
self.assertEqual(len(l),1)
self.assertIsInstance(l[0].action,fileLists.DiffsetAction)
self.assertEqual(l[0].action.documentNumber,1234)
self.assertEqual(l[0].action.fiscalYears,{2015,2016})
if __name__=='__main__':
unittest.main()
| AntonKhorev/spb-budget-db | 3-db/testFileLists.py | Python | bsd-2-clause | 1,567 | 0.051053 |
"""
Classes to cache and read specific items from github issues in a uniform way
"""
from functools import partial as Partial
import datetime
import time
import shelve
# Requires PyGithub version >= 1.13 for access to raw_data attribute
import github
# Needed to not confuse cached 'None' objects
class Nothing(object):
raw_data = None
# Needed to signal list cache, not github object
class SearchResults(object):
def __init__(self, *stuff):
self.raw_data = stuff
class GithubCache(object):
"""
Auto-refreshing github.GithubObject.GithubObject from dict
"""
cache_hits = 0
cache_misses = 0
cache_lifetimes = {
'default': datetime.timedelta(hours=2),
github.GitCommit.GitCommit: datetime.timedelta(days=30),
github.NamedUser.NamedUser: datetime.timedelta(days=30),
github.Commit.Commit: datetime.timedelta(days=30),
github.Issue.Issue: datetime.timedelta(minutes=30),
github.PullRequest.PullRequest: datetime.timedelta(hours=1),
# Special case for github.Issue.Issue
'closed': datetime.timedelta(days=30),
SearchResults: datetime.timedelta(minutes=10),
github.NamedUser.NamedUser: datetime.timedelta(hours=2),
github.GitAuthor.GitAuthor: datetime.timedelta(days=9999),
'total_issues': datetime.timedelta(days=9999)
}
def __init__(self, github_obj, cache_get_partial, cache_set_partial,
cache_del_partial, pre_fetch_partial, fetch_partial):
self.github = github_obj
self.cache_get = cache_get_partial # Returns native dict
self.cache_set = cache_set_partial # called with value=dict
self.cache_del = cache_del_partial
self.pre_fetch = pre_fetch_partial # called with nothing
self.fetch = fetch_partial # Returns github.GithubObject.GithubObject
def __call__(self):
"""
Retrieve instance from fresh or cached data
"""
# microseconds aren't useful when fetch takes ~1 second
now = datetime.datetime.utcnow()
now = datetime.datetime(year=now.year, month=now.month,
day=now.day, hour=now.hour,
minute=now.minute, second=0, microsecond=0)
try:
data = self.cached_data()
if data['expires'] < now:
raise KeyError # refresh cache
self.cache_hits += 1
except KeyError:
data = self.fetched_data(now)
self.cache_set(value=data)
self.cache_misses += 1
# Any exceptions thrown during conversion should purge cache entry
try:
# Format data for consumption
if data['klass'] == github.PaginatedList.PaginatedList:
inside_klass = data['inside_klass']
result = []
for item in data['raw_data']:
result.append(
self.github.create_from_raw_data(inside_klass,
item))
return result
elif data['klass'] == Nothing:
return None # it's a None object
elif data['klass'] == SearchResults:
return data['raw_data'] # just the contents
else:
return self.github.create_from_raw_data(data['klass'],
data['raw_data'])
except:
try:
self.cache_del()
except KeyError:
pass # doesn't exist in cache, ignore
raise # original exception
@staticmethod
def format_data(klass, expires, raw_data, inside_klass=None):
"""
Enforce uniform data format for fetched data
"""
if inside_klass is None:
return {'klass': klass,
'fetched': datetime.datetime.utcnow(),
'expires': expires,
'raw_data': raw_data}
else:
return {'klass': klass,
'inside_klass': inside_klass,
'fetched': datetime.datetime.utcnow(),
'expires': expires,
'raw_data': raw_data}
def fetched_data(self, now):
"""
Return dictionary containing freshly fetched values
"""
try:
if callable(self.pre_fetch):
self.pre_fetch()
fetched_obj = self.fetch()
except github.GithubException, detail:
if detail.status == 404:
raise KeyError('Github item not-found error while calling %s '
'with args=%s and dargs=%s' % (self.fetch.func,
self.fetch.args,
self.fetch.keywords))
else:
raise
if fetched_obj is None:
fetched_obj = Nothing()
klass = fetched_obj.__class__
# github.PaginatedList.PaginatedList need special handling
if isinstance(fetched_obj, github.PaginatedList.PaginatedList):
raw_data = [item.raw_data for item in fetched_obj]
inside_klass = fetched_obj[0].__class__
expires = now + self.cache_lifetimes.get(inside_klass,
self.cache_lifetimes['default'])
return self.__class__.format_data(klass,
now + self.cache_lifetimes.get(
inside_klass,
self.cache_lifetimes[
'default']),
raw_data, inside_klass)
else:
expires = now + self.cache_lifetimes.get(klass,
# else default
self.cache_lifetimes['default'])
# closed issues/pull requests don't change much
if hasattr(fetched_obj, 'closed_at'):
if fetched_obj.closed_at is not None:
expires = now + self.cache_lifetimes['closed']
return self.__class__.format_data(klass, expires,
fetched_obj.raw_data)
def cached_data(self):
"""
Return dictionary containing cached values or raise KeyError
"""
try:
return self.cache_get() # maybe raise KeyError or TypeError
except KeyError:
raise
except:
# Try to delete the entry
self.cache_del()
raise
class GithubIssuesBase(list):
"""
Base class for cached list of github issues
"""
# Force static pickle protocol version
protocol = 2
# Class to use for cache management
cache_class = GithubCache
def __init__(self, github_obj, repo_full_name, cache_filename):
"""
Initialize cache and reference github repository issues
"""
self.github = github_obj
self.repo_full_name = repo_full_name
self.shelf = shelve.open(filename=cache_filename,
protocol=self.protocol,
writeback=True)
# Avoid exceeding rate-limit per hour
requests = self.github.rate_limiting[1] # requests per hour
period = 60.0 * 60.0 # one hour in seconds
sleeptime = period / requests
self.pre_fetch_partial = Partial(time.sleep, sleeptime)
# self.pre_fetch_partial = None # cheat-mode enable (no delays)
repo_cache_key = 'repo_%s' % self.repo_full_name
# get_repo called same way throughout instance life
cache_get_partial = Partial(self.shelf.__getitem__, repo_cache_key)
cache_set_partial = Partial(self.shelf.__setitem__, repo_cache_key)
cache_del_partial = Partial(self.shelf.__delitem__, repo_cache_key)
fetch_partial = Partial(self.github.get_repo,
self.repo_full_name)
# Callable instance retrieves cached or fetched value for key
self.get_repo = self.cache_class(self.github,
cache_get_partial,
cache_set_partial,
cache_del_partial,
self.pre_fetch_partial,
fetch_partial)
super(GithubIssuesBase, self).__init__()
def __del__(self):
"""
Make sure cache is saved
"""
try:
self.shelf.close()
except AttributeError:
pass # Open must have failed
def __len__(self):
"""
Binary search through issue numbers until largest identified
"""
increment = 1000
last_issue = 1
if not self.__contains__(last_issue):
return 0 # no issues
while increment > 0:
while self.__contains__(last_issue):
last_issue += increment
# Fall back to prior one
last_issue -= increment
# Chop increment in half
increment /= 2
return last_issue
def __contains__(self, key):
try:
# Must call this classes method specifically
GithubIssuesBase.__getitem__(self, key)
except KeyError:
return False
return True
def __iter__(self):
for key in self.keys():
yield self[key]
def __setitem__(self, key, value):
raise KeyError("Read only mapping while trying to set %s to %s"
% (str(key), str(value)))
def __delitem__(self, key):
raise KeyError(
"Read only mapping while trying to delete %s" % str(key))
def __getitem__(self, key):
"""
Return a standardized dict of github issue unless NoEnumerate=True
"""
repo = self.get_repo()
# Enforce uniform key string
cache_key = self.get_issue_cache_key(key)
fetch_partial = Partial(repo.get_issue, int(key))
item = self.get_gh_obj(cache_key, fetch_partial)
# No exception raised, update cache on disk
self.shelf.sync()
return item
def get_issue_cache_key(self, number):
return 'repo_%s_issue_%s' % (self.repo_full_name, str(int(number)))
def has_key(self, key):
return self.__contains__(key)
def items(self):
# Iterator comprehension
return (self[key] for key in self.keys())
def keys(self):
# Iterators are simply better
return xrange(1, self.__len__() + 1)
def values(self):
# Iterator comprehension
return (value for (key, value) in self.items())
class GithubIssues(GithubIssuesBase, object):
"""
Read-only List-like interface to cached github issues in standardized format
"""
# Marshal callables for key to github.Issue.Issue value
marshal_map = {
'number': lambda gh_obj: getattr(gh_obj, 'number'),
'summary': lambda gh_obj: getattr(gh_obj, 'title'),
'description': lambda gh_obj: getattr(gh_obj, 'body'),
'modified': lambda gh_obj: getattr(gh_obj, 'updated_at'),
'commits': NotImplementedError, # setup in __init__
'opened': lambda gh_obj: getattr(gh_obj, 'created_at'),
'closed': lambda gh_obj: getattr(gh_obj, 'closed_at'),
'assigned': lambda gh_obj: getattr(gh_obj, 'assignee'),
'author': lambda gh_obj: getattr(gh_obj, 'user').login,
'commit_authors': NotImplementedError, # setup in __init__
'comments': lambda gh_obj: getattr(gh_obj, 'comments'),
'comment_authors': NotImplementedError, # setup in __init__
'labels': lambda gh_obj: [label.name for label in gh_obj.labels],
'url': lambda gh_obj: getattr(gh_obj, 'html_url'),
'github_issue': lambda gh_obj: gh_obj
}
# Storage for property values
_cache_hits = 0 # Tracks temporary cache instances
_cache_misses = 0 # Tracks temporary cache instances
def __init__(self, github_obj, repo_full_name):
"""
Initialize cache and reference github repository issues
"""
cache_filename = self.__class__.__name__ + '.cache'
super(GithubIssues, self).__init__(github_obj,
repo_full_name,
cache_filename)
# These marshal functions require state
self.marshal_map['commits'] = self.gh_pr_commits
self.marshal_map['commit_authors'] = self.gh_pr_commit_authors
self.marshal_map['comment_authors'] = self.gh_issue_comment_authors
def __del__(self):
self.vacuum()
super(GithubIssues, self).__del__()
def vacuum(self):
"""Vacuum up all expired entries"""
# Can't modify list while iterating
keys_to_del = []
now = datetime.datetime.utcnow()
for key, value in self.shelf.items():
# no need to be precise
if value['expires'] <= now:
keys_to_del.append(key)
for key in keys_to_del:
del self.shelf[key]
@property
def cache_hits(self):
return self.get_repo.cache_hits + self._cache_hits
@property
def cache_misses(self):
return self.get_repo.cache_misses + self._cache_misses
def __getitem__(self, key):
"""
Return a standardized dict of github issue
"""
item = self.marshal_gh_obj(super(GithubIssues, self).__getitem__(key))
self.shelf.sync()
return item
def __len__(self):
"""
Return cached number of issues
"""
cache_key = 'repo_%s_total_issues' % self.repo_full_name
# seconds aren't useful when fetch takes > 1 minute
now = datetime.datetime.utcnow()
now = datetime.datetime(year=now.year, month=now.month,
day=now.day, hour=now.hour,
minute=now.minute, second=0, microsecond=0)
# Easier to do custom caching behavior here than fuss with GithubCache
try:
cache_data = self.shelf.__getitem__(cache_key)
if cache_data['expires'] < now:
raise KeyError
# Bypass search_result caching used in self.search()
searchresult = self.make_search_results(
{'since': cache_data['since']})
# about to change the number
cache_data['since'] = now
# total equal to old count plus new count since then
cache_data['raw_data'] += len(searchresult.raw_data)
except KeyError:
cache_data = {}
# doesn't expire ever
cache_data['expires'] = now + GithubCache.cache_lifetimes[
'total_issues']
cache_data['since'] = now
# This will take a while if issue cache is stale
cache_data['raw_data'] = super(GithubIssues, self).__len__()
self.shelf.__setitem__(cache_key, cache_data)
return cache_data['raw_data']
def get_gh_obj(self, cache_key, fetch_partial):
"""
Helper to get object possibly from cache and update counters
"""
cache_get_partial = Partial(self.shelf.__getitem__,
cache_key)
cache_set_partial = Partial(self.shelf.__setitem__,
cache_key)
cache_del_partial = Partial(self.shelf.__delitem__,
cache_key)
# Callable instance could change every time
get_obj = GithubCache(self.github,
cache_get_partial,
cache_set_partial,
cache_del_partial,
self.pre_fetch_partial,
fetch_partial)
result = get_obj()
self._cache_hits += get_obj.cache_hits
self._cache_misses += get_obj.cache_misses
return result # DOES NOT SYNC DATA!
def search(self, criteria):
"""
Return a list of issue-numbers that match a search criteria.
:param criteria: Dictionary of search terms
state - str - 'open', 'closed'
assignee - list of str (login), "none" or "*"
mentioned - str (login)
labels - list of str (label name)
sort - str - 'created', 'updated', 'comments'
direction - str - 'asc', 'desc'
since - datetime.datetime
"""
valid_criteria = {}
# use search dictionary to form hash for cached results
search_cache_key = 'issue_search'
# Validate & transform criteria
if criteria.has_key('state'):
state = str(criteria['state'])
if state not in ('open', 'closed'):
raise ValueError("'state' criteria must be 'open' or 'closed'")
valid_criteria['state'] = state
search_cache_key = '%s_%s' % (search_cache_key, state)
if criteria.has_key('assignee'):
assignee = str(criteria['assignee'])
search_cache_key = '%s_%s' % (search_cache_key, assignee)
if assignee in ('none', '*'):
valid_criteria['assignee'] = assignee
else:
# returns github.NamedUser.NamedUser
valid_criteria['assignee'] = self.get_gh_user(assignee)
if criteria.has_key('mentioned'):
mentioned = str(criteria['assignee'])
search_cache_key = '%s_%s' % (search_cache_key, mentioned)
if mentioned in ('none', '*'):
valid_criteria['mentioned'] = mentioned
else:
# returns github.NamedUser.NamedUser
valid_criteria['mentioned'] = self.get_gh_user(mentioned)
if criteria.has_key('labels'):
labels = criteria['labels']
if not isinstance(labels, list):
raise ValueError("'lables' criteria must be a list")
valid_criteria['labels'] = []
for name in labels:
search_cache_key = '%s_%s' % (search_cache_key, labels)
valid_criteria['labels'].append(self.get_gh_label(str(name)))
if criteria.has_key('sort'):
sort = str(criteria['sort'])
if sort not in ('created', 'updated', 'comments'):
raise ValueError("'sort' criteria must be 'created', 'updated'"
", 'comments'")
valid_criteria['sort'] = sort
search_cache_key = '%s_%s' % (search_cache_key, sort)
if criteria.has_key('direction'):
direction = str(criteria['direction'])
if direction not in ('asc', 'desc'):
raise ValueError("'direction' criteria must be 'asc', 'desc'")
valid_criteria['direction'] = direction
search_cache_key = '%s_%s' % (search_cache_key, direction)
if criteria.has_key('since'):
since = criteria['since']
if not isinstance(since, datetime.datetime):
raise ValueError("'since' criteria must be a "
"datetime.datetime")
# second and milisecond not useful to search or cache
since = datetime.datetime(year=since.year,
month=since.month,
day=since.day,
hour=since.hour,
minute=since.minute,
second=0,
microsecond=0)
search_cache_key = '%s_%s' % (search_cache_key, since.isoformat())
valid_criteria['since'] = since
# Do not perform search operation unless no cached results
# or cached results have expired
fetch_partial = Partial(self.make_search_results, valid_criteria)
# This could take an arbitrarily LONG time
return self.get_gh_obj(search_cache_key, fetch_partial)
def make_search_results(self, valid_criteria):
"""
Return a SearchResults instance from issue numbers found by search
"""
repo = self.get_repo()
result = repo.get_issues(**valid_criteria)
return SearchResults(*[issue.number for issue in result])
def clean_cache_entry(self, key):
"""
Remove an entry from cache, ignoring any KeyErrors
"""
try:
del self.shelf[key]
except KeyError:
pass
def get_gh_user(self, login):
cache_key = 'github_user_%s' % login
fetch_partial = Partial(self.github.get_user, login)
try:
return self.get_gh_obj(cache_key, fetch_partial)
except KeyError:
raise ValueError('login %s is not a valid github user' % login)
def get_gh_label(self, name):
repo = self.get_repo()
cache_key = str('repo_%s_label_%s' % (self.repo_full_name, name))
fetch_partial = Partial(repo.get_label, name)
try:
return self.get_gh_obj(cache_key, fetch_partial)
except KeyError:
raise ValueError('label %s is not valid for repo %s' % (name,
self.repo_full_name))
def marshal_gh_obj(self, gh_issue):
"""
Translate a github issue object into dictionary w/ fixed keys
"""
mkeys = self.marshal_map.keys()
return dict([(key, self.marshal_map[key](gh_issue)) for key in mkeys])
@staticmethod
def gh_issue_is_pull(gh_issue):
"""
Return True/False if gh_issue is a pull request or not
"""
pullreq = gh_issue.pull_request
if pullreq is not None:
if (pullreq.diff_url is None and
pullreq.html_url is None and
pullreq.patch_url is None):
return False
else:
return False
# pullreq not None but pullreq attributes are not None
return True
# marshal_map method
def gh_issue_comment_authors(self, gh_issue):
"""
Retrieve a list of comment author e-mail addresses
"""
if gh_issue.comments > 0:
num = gh_issue.number
cache_key = ('repo_%s_issue_%s_comments'
% (self.repo_full_name, num))
fetch_partial = Partial(gh_issue.get_comments)
authors = set()
for comment in self.get_gh_obj(cache_key, fetch_partial):
# Referencing user attribute requires a request, so cache it
user_cache_key = cache_key + '_%s_user' % comment.id
user_fetch_partial = Partial(getattr, comment, 'user')
try:
user = self.get_gh_obj(user_cache_key, user_fetch_partial)
except:
# Also clean up comments cache
self.clean_cache_entry(cache_key)
raise # original exception
authors.add(user.email)
return authors
else:
return None
# marshal_map method
def gh_pr_commit_authors(self, gh_issue):
"""
Return list of commit author e-mail addresses for a pull-request
"""
if GithubIssues.gh_issue_is_pull(gh_issue):
num = gh_issue.number
repo = self.get_repo()
cache_key = 'repo_%s_pull_%s' % (self.repo_full_name, str(num))
fetch_partial = Partial(repo.get_pull, num)
pull = self.get_gh_obj(cache_key, fetch_partial)
if pull.commits is None or pull.commits < 1:
return None # No commits == no commit authors
cache_key = 'repo_%s_pull_%s_commits' % (self.repo_full_name,
str(num))
fetch_partial = Partial(pull.get_commits)
authors = set()
for commit in self.get_gh_obj(cache_key, fetch_partial):
# Referencing commit author requires a request, cache it.
author_cache_key = cache_key + '_%s_author' % str(commit.sha)
author_fetch_partial = Partial(getattr, commit, 'author')
try:
author_obj = self.get_gh_obj(author_cache_key,
author_fetch_partial)
except:
# clean up commit list cache entry also
self.clean_cache_entry(cache_key)
raise # original exception
# Retrieve e-mail from git commit object
if author_obj is None:
# Referencing git commit requires a request, cache it
gitcommit_cache_key = (cache_key + '_%s_gitcommit'
% str(commit.sha))
gitcommit_fetch_partial = Partial(getattr, commit,
'commit') # git commit
try:
gitcommit = self.get_gh_obj(gitcommit_cache_key,
gitcommit_fetch_partial)
except:
# Need to clean commit and gitcommit entries
self.clean_cache_entry(cache_key)
self.clean_cache_entry(gitcommit_cache_key)
raise
authors.add(gitcommit.author.email)
else: # Author is a github user
authors.add(author_obj.login)
return authors
return None # not a pull request
# marshal_map method
def gh_pr_commits(self, gh_issue):
"""
Retrieves the number of commits on a pull-request, None if not a pull.
"""
if GithubIssues.gh_issue_is_pull(gh_issue):
num = gh_issue.number
repo = self.get_repo()
cache_key = 'repo_%s_pull_%s' % (self.repo_full_name, str(num))
fetch_partial = Partial(repo.get_pull, num)
pull = self.get_gh_obj(cache_key, fetch_partial)
return pull.commits
return None # not a pull request
class MutateError(KeyError):
def __init__(self, key, number):
super(MutateError, self).__init__("Unable to modify %s on issue %d"
% (str(key), number))
class MutableIssue(dict):
"""Allow modification of some issue values"""
def __init__(self, github_issues, issue_number):
if not isinstance(github_issues, GithubIssues):
raise ValueError("github_issues %s is not a GithubIssues, it's a %s"
% (str(github_issues), str(type(github_issues))))
# make sure issue_number is valid and cached
junk = github_issues[issue_number]
del junk
# Private for private _github_issue property access
self._github_issues = github_issues
self._issue_number = issue_number
super(MutableIssue, self).__init__()
@property
def _github_issue(self):
return self._github_issues[self._issue_number]
@property
def _issue_cache_key(self):
return self.get_issue_cache_key(self._issue_number)
def _setdelitem(self, opr, key, value):
if key not in self._github_issues.marshal_map.keys():
raise MutateError(key, self._issue_number)
methodname = '%s_%s' % (opr, key)
if callable(getattr(self, methodname)):
method = getattr(self, methodname)
if opr == 'set':
method(value)
else:
method()
else:
raise MutateError(key, self._issue_number)
def __getitem__(self, key):
# Guarantees fresh/cached data for every call
return self._github_issue[key]
def __setitem__(self, key, value):
self._setdelitem('set', key, value)
def __delitem__(self, key):
self._setdelitem('del', key, None)
def set_labels(self, value):
"""
Merge list of new lables into existing label set
"""
new_labels = set(value)
old_labels = set(self._github_issue['labels'])
change_list = list(new_labels | old_labels)
get_gh_label = self._github_issues.get_gh_label # save typing
# Raise exception if any label name is bad
gh_labels = [get_gh_label(label) for label in change_list]
# Access PyGithub object to change labels
self._github_issue['github_issue'].set_labels(*gh_labels)
# Force retrieval of changed item
self._github_issues.clean_cache_entry(self._issue_cache_key)
def del_labels(self):
"""
Remove all lbels from an issue
"""
self._github_issue['github_issue'].delete_labels()
# Force retrieval of changed item
self._github_issues.clean_cache_entry(self._issue_cache_key)
# TODO: Write get_*(), set_*(), del_*() for other dictionary keys
| autotest/virt-test | tools/github/github_issues.py | Python | gpl-2.0 | 29,542 | 0.000609 |
#!/usr/bin/env python
class AllPermutations(object):
def __init__(self, arr):
self.arr = arr
def all_permutations(self):
results = []
used = []
self._all_permutations(self.arr, used, results)
return results
def _all_permutations(self, to_use, used, results):
if len(to_use) == 0:
results.append(used)
for i, x in enumerate(to_use):
new_used = used + [x]
new_to_use = to_use[:i] + to_use[i+1:]
self._all_permutations(new_to_use, new_used, results)
def main():
arr = [1, 2, 3, 4]
ap = AllPermutations(arr)
results = ap.all_permutations()
for x in results:
print x
print len(results)
if __name__ == "__main__":
main() | davjohnst/fundamentals | fundamentals/backtracking/all_permutations.py | Python | apache-2.0 | 772 | 0.001295 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import print_function
import behave
import os
import parse
from common import *
@behave.step("I use the repository \"{repo}\"")
def step_repo_condition(context, repo):
if "repos" not in context.dnf:
context.dnf["repos"] = []
if repo not in context.dnf["repos"]:
context.dnf["repos"].append(repo)
@behave.step('I require client certificate verification with certificate "{client_cert}" and key "{client_key}"')
def step_impl(context, client_cert, client_key):
if "client_ssl" not in context.dnf:
context.dnf["client_ssl"] = dict()
context.dnf["client_ssl"]["certificate"] = os.path.join(context.dnf.fixturesdir,
client_cert)
context.dnf["client_ssl"]["key"] = os.path.join(context.dnf.fixturesdir,
client_key)
@parse.with_pattern(r"http|https")
def parse_repo_type(text):
if text in ("http", "https"):
return text
assert False
behave.register_type(repo_type=parse_repo_type)
@behave.step("I use the {rtype:repo_type} repository based on \"{repo}\"")
def step_impl(context, rtype, repo):
assert hasattr(context, 'httpd'), 'Httpd fixture not set. Use @fixture.httpd tag.'
if rtype == "http":
host, port = context.httpd.new_http_server(context.dnf.repos_location)
else:
cacert = os.path.join(context.dnf.fixturesdir,
'certificates/testcerts/ca/cert.pem')
cert = os.path.join(context.dnf.fixturesdir,
'certificates/testcerts/server/cert.pem')
key = os.path.join(context.dnf.fixturesdir,
'certificates/testcerts/server/key.pem')
client_ssl = context.dnf._get(context, "client_ssl")
if client_ssl:
client_cert = client_ssl["certificate"]
client_key = client_ssl["key"]
host, port = context.httpd.new_https_server(
context.dnf.repos_location, cacert, cert, key,
client_verification=bool(client_ssl))
http_reposdir = "/http.repos.d"
repo_id = '{}-{}'.format(rtype, repo)
repocfg = ("[{repo_id}]\n"
"name={repo_id}\n"
"baseurl={rtype}://{host}:{port}/{repo}/\n"
"enabled=1\n"
"gpgcheck=0\n"
)
if rtype == "https":
repocfg += "sslcacert={cacert}\n"
if client_ssl:
repocfg += "sslclientcert={client_cert}\n"
repocfg += "sslclientkey={client_key}\n"
# generate repo file based on "repo" in /http.repos.d
repos_path = os.path.join(context.dnf.installroot, http_reposdir.lstrip("/"))
ensure_directory_exists(repos_path)
repo_file_path = os.path.join(repos_path, '{}.repo'.format(repo_id))
create_file_with_contents(
repo_file_path,
repocfg.format(**locals()))
# add /http.repos.d to reposdir
current_reposdir = context.dnf._get(context, "reposdir")
if not repos_path in current_reposdir:
context.dnf._set("reposdir", "{},{}".format(current_reposdir, repos_path))
# enable newly created http repo
context.execute_steps('Given I use the repository "{}"'.format(repo_id))
@behave.step("I disable the repository \"{repo}\"")
def step_repo_condition(context, repo):
if "repos" not in context.dnf:
context.dnf["repos"] = []
context.dnf["repos"].remove(repo)
@behave.given("There are no repositories")
def given_no_repos(context):
context.dnf["reposdir"] = "/dev/null"
| kkaarreell/ci-dnf-stack | dnf-behave-tests/features/steps/repo.py | Python | gpl-3.0 | 3,594 | 0.004174 |
import src
import random
class Shocker(src.items.Item):
"""
ingame item used as ressource to build bombs and stuff
should have the habit to explode at inconvienent times
"""
type = "Shocker"
def __init__(self):
"""
set up internal state
"""
super().__init__(display="/\\")
def apply(self, character):
"""
Parameters:
character: the character trying to use the item
"""
compressorFound = None
for item in character.inventory:
if isinstance(item,src.items.itemMap["CrystalCompressor"]):
compressorFound = item
break
if compressorFound:
if self.container and isinstance(self.container,src.rooms.Room):
if hasattr(self.container,"electricalCharges"):
if self.container.electricalCharges < self.container.maxElectricalCharges:
self.container.electricalCharges += 1
character.addMessage("you activate the shocker and increase the rooms charges to %s"%(self.container.electricalCharges,))
character.inventory.remove(compressorFound)
else:
character.addMessage("this room is fully charged")
else:
character.addMessage("this room can't be charged")
else:
character.addMessage("no room found")
else:
character.addMessage("no crystal compressor found in inventory")
src.items.addType(Shocker)
| MarxMustermann/OfMiceAndMechs | src/itemFolder/military/shocker.py | Python | gpl-3.0 | 1,603 | 0.004991 |
#!/usr/bin/python
import sys
#what is the command
command = sys.argv[1];
source = sys.argv[2];
print "Command: ", command;
print "Source: ", source; | LucidWorks/fusion-seed-app | pipelines.py | Python | mit | 151 | 0.039735 |
from pythonforandroid.recipe import PythonRecipe
class SixRecipe(PythonRecipe):
version = '1.15.0'
url = 'https://pypi.python.org/packages/source/s/six/six-{version}.tar.gz'
depends = ['setuptools']
recipe = SixRecipe()
| PKRoma/python-for-android | pythonforandroid/recipes/six/__init__.py | Python | mit | 236 | 0 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('meals', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='wbw_list',
name='list_id',
field=models.CharField(max_length=40, unique=True),
),
migrations.AlterField(
model_name='participant',
name='wbw_id',
field=models.CharField(max_length=40, unique=True),
),
]
| joostrijneveld/eetvoudig | meals/migrations/0002_auto_20161006_1640.py | Python | cc0-1.0 | 576 | 0 |
import calendar
from optparse import make_option
import time
from urllib import urlencode
from django.core.management.base import BaseCommand
import jwt
import requests
class Command(BaseCommand):
help = 'Simulate a BlueVia postback to mark a payment as complete.'
option_list = BaseCommand.option_list + (
make_option('--trans-id', action='store',
help='BlueVia transaction ID', default='1234'),
make_option('--secret', action='store',
help='Marketplace secret for signature verification'),
make_option('--contrib', action='store',
help='Contribution UUID'),
make_option('--addon', action='store',
help='ID of addon that was purchased'),
make_option('--url', action='store',
help='Postback URL. Default: %default',
default='http://localhost:8001/services/bluevia/postback'),
)
def handle(self, *args, **options):
assert 'contrib' in options, 'require --contrib'
assert 'addon' in options, 'require --addon'
issued_at = calendar.timegm(time.gmtime())
prod_data = urlencode({'contrib_uuid': options['contrib'],
'addon_id': options['addon']})
purchase = {'iss': 'tu.com',
'aud': 'marketplace.mozilla.org',
'typ': 'tu.com/payments/inapp/v1',
'iat': issued_at,
'exp': issued_at + 3600, # expires in 1 hour
'request': {
'name': 'Simulated Product',
'description': 'Simulated Product Description',
'price': '0.99',
'currencyCode': 'USD',
'productData': prod_data},
'response': {
'transactionID': options['trans_id']
}}
purchase_jwt = jwt.encode(purchase, options['secret'])
print 'posting JWT to %s' % options['url']
res = requests.post(options['url'], purchase_jwt, timeout=5)
res.raise_for_status()
print 'OK'
| mozilla/zamboni | mkt/purchase/management/commands/post_bluevia_payment.py | Python | bsd-3-clause | 2,183 | 0 |
# THIS FILE IS GENERATED FROM NUMPY SETUP.PY
short_version = '1.7.2'
version = '1.7.2'
full_version = '1.7.2'
git_revision = 'f3ee0735c1c372dfb9e0efcaa6846bd05e53b836'
release = True
if not release:
version = full_version
| beiko-lab/gengis | bin/Lib/site-packages/numpy/version.py | Python | gpl-3.0 | 238 | 0 |
import unittest
from fam.tests.models.test01 import Dog, Cat, Person, JackRussell, Monarch
from fam.mapper import ClassMapper
class MapperTests(unittest.TestCase):
def setUp(self):
self.mapper = ClassMapper([Dog, Cat, Person, JackRussell, Monarch])
def tearDown(self):
pass
def test_sub_class_refs(self):
self.assertEqual(set(Monarch.fields.keys()), set(["name", "country", "cats", "dogs", "animals", "callbacks"]))
self.assertEqual(set(Monarch.cls_fields.keys()), {"country"})
| paulharter/fam | src/fam/tests/test_couchdb/test_mapping.py | Python | mit | 535 | 0.011215 |
#!/usr/bin/env python3
# The MIT License
# Copyright (c) 2016 Estonian Information System Authority (RIA), Population Register Centre (VRK)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# Test case for verifying that in case the operational monitoring buffer size
# has been set to zero, the operational monitoring data of X-Road requests is
# not stored by the operational monitoring daemon and can't be queried.
# Expecting that the value of op-monitor-buffer.size has been set
# to 0 via run_tests.py.
import os
import sys
sys.path.append('..')
import python_common as common
def run(client_security_server_address, producer_security_server_address,
ssh_user, request_template_dir):
xroad_request_template_filename = os.path.join(
request_template_dir, "simple_xroad_query_template.xml")
query_data_client_template_filename = os.path.join(
request_template_dir, "query_operational_data_client_template.xml")
query_data_producer_template_filename = os.path.join(
request_template_dir, "query_operational_data_producer_template.xml")
client_timestamp_before_requests = common.get_remote_timestamp(
client_security_server_address, ssh_user)
producer_timestamp_before_requests = common.get_remote_timestamp(
producer_security_server_address, ssh_user)
xroad_message_id = common.generate_message_id()
print("\nGenerated message ID %s for X-Road request" % (xroad_message_id, ))
### Regular and operational data requests and the relevant checks
print("\n---- Sending an X-Road request to the client's security server ----\n")
request_contents = common.format_xroad_request_template(
xroad_request_template_filename, xroad_message_id)
print("Generated the following X-Road request: \n")
print(request_contents)
response = common.post_xml_request(
client_security_server_address, request_contents)
print("Received the following X-Road response: \n")
xml = common.parse_and_clean_xml(response.text)
print(xml.toprettyxml())
common.check_soap_fault(xml)
common.wait_for_operational_data()
client_timestamp_after_requests = common.get_remote_timestamp(
client_security_server_address, ssh_user)
producer_timestamp_after_requests = common.get_remote_timestamp(
producer_security_server_address, ssh_user)
# Now make operational data requests to both security servers and check the
# response payloads.
print("\n---- Sending an operational data request to the client's security server ----\n")
message_id = common.generate_message_id()
print("Generated message ID %s for query data request" % (message_id, ))
request_contents = common.format_query_operational_data_request_template(
query_data_client_template_filename, message_id,
client_timestamp_before_requests, client_timestamp_after_requests)
print("Generated the following query data request for the client's security server: \n")
print(request_contents)
response = common.post_xml_request(
client_security_server_address, request_contents,
get_raw_stream=True)
mime_parts, raw_response = common.parse_multipart_response(response)
if mime_parts:
soap_part, record_count = common.get_multipart_soap_and_record_count(mime_parts[0])
common.print_multipart_soap_and_record_count(soap_part, record_count)
# op-monitor-buffer.size=0 must result in an empty response
common.check_record_count(record_count, 0)
else:
common.parse_and_check_soap_response(raw_response)
message_id = common.generate_message_id()
print("\nGenerated message ID %s for operational data request" % (message_id, ))
request_contents = common.format_query_operational_data_request_template(
query_data_producer_template_filename, message_id,
producer_timestamp_before_requests, producer_timestamp_after_requests)
print("Generated the following operational data request for the producer's " \
"security server: \n")
print(request_contents)
response = common.post_xml_request(
producer_security_server_address, request_contents,
get_raw_stream=True)
mime_parts, raw_response = common.parse_multipart_response(response)
if mime_parts:
soap_part, record_count = common.get_multipart_soap_and_record_count(mime_parts[0])
common.print_multipart_soap_and_record_count(soap_part, record_count, is_client=False)
# op-monitor-buffer.size=0 must result in an empty response
common.check_record_count(record_count, 0)
else:
common.parse_and_check_soap_response(raw_response)
| ria-ee/X-Road | src/systemtest/op-monitoring/integration/testcases/test_zero_buffer_size.py | Python | mit | 5,787 | 0.003283 |
PLUGIN_INFO = {
"versions": [
{
"plugin_version": 1,
"supported_mod_versions": ["0.6"],
"download_url": "https://www.myteamspeak.com/addons/01a0f828-894c-45b7-a852-937b47ceb1ed"
}
]
}
| jhakonen/wot-teamspeak-mod | test/fute/test_helpers/constants.py | Python | lgpl-2.1 | 199 | 0.040201 |
"""
Main controller.
"""
import json
from Server.Importer import ImportFromModule
class ControllerMain(ImportFromModule("Server.ControllerBase", "ControllerBase")):
"""
Main controller.
"""
def ShowPage(self, uriParameters, postedParameters):
"""
Shows the home page.
"""
webPage = ImportFromModule("WebApplication.Views.PageView", "PageView")("Main")
self.SetOutput(webPage.GetContent())
def EchoText(self, uriParameters, postedParameters):
"""
Echo the incomming text.
"""
self.SetOutput(json.dumps(uriParameters, indent=4))
| allembedded/python_web_framework | WebApplication/Controllers/ControllerMain.py | Python | gpl-3.0 | 630 | 0.007937 |
#!/usr/bin/python3
#
# Copyright © 2017 jared <jared@jared-devstation>
#
from pydub import AudioSegment, scipy_effects, effects
import os
import settings, util
# combine two audio samples with a crossfade
def combine_samples(acc, file2, CROSSFADE_DUR=100):
util.debug_print('combining ' + file2)
sample2 = AudioSegment.from_wav(file2)
output = acc.append(sample2, crossfade=CROSSFADE_DUR)
output = effects.normalize(output)
return output
# combine audio samples with crossfade, from within program
def combine_prog_samples(acc, nsamp, CROSSFADE_DUR=100):
output = acc.append(nsamp, crossfade=CROSSFADE_DUR)
return output
# split an audio file into low, mid, high bands
def split_file(fname):
curr_file = AudioSegment.from_file(fname)
low_seg = scipy_effects.low_pass_filter(curr_file, settings.LOW_FREQUENCY_LIM).export(fname + '_low.wav', 'wav')
mid_seg = scipy_effects.band_pass_filter(curr_file, settings.LOW_FREQUENCY_LIM, settings.HIGH_FREQUENCY_LIM).export(fname + '_mid.wav', 'wav')
high_seg = scipy_effects.high_pass_filter(curr_file, settings.HIGH_FREQUENCY_LIM).export(fname + '_high.wav', 'wav')
## add a sample to an existing wav
#def add_sample(fname, samplefile, CROSSFADE_DUR=100):
# new_file = combine_samples(fname, samplefile, CROSSFADE_DUR)[0]
# os.rename(fname, 'old_' + fname)
# os.rename(new_file, fname)
# return new_file[1]
| techlover10/StochasticSoundscape | src/audio.py | Python | mit | 1,411 | 0.007092 |
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Keypair management extension."""
import webob
import webob.exc
from nova.api.openstack.compute.schemas.v3 import keypairs
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova.api import validation
from nova.compute import api as compute_api
from nova import exception
from nova.i18n import _
ALIAS = 'os-keypairs'
authorize = extensions.extension_authorizer('compute', 'v3:' + ALIAS)
soft_authorize = extensions.soft_extension_authorizer('compute', 'v3:' + ALIAS)
class KeypairController(wsgi.Controller):
"""Keypair API controller for the OpenStack API."""
def __init__(self):
self.api = compute_api.KeypairAPI()
def _filter_keypair(self, keypair, **attrs):
clean = {
'name': keypair.name,
'public_key': keypair.public_key,
'fingerprint': keypair.fingerprint,
}
for attr in attrs:
clean[attr] = keypair[attr]
return clean
# TODO(oomichi): Here should be 201(Created) instead of 200 by v2.1
# +microversions because the keypair creation finishes when returning
# a response.
@extensions.expected_errors((400, 403, 409))
@validation.schema(keypairs.create)
def create(self, req, body):
"""Create or import keypair.
Sending name will generate a key and return private_key
and fingerprint.
You can send a public_key to add an existing ssh key
params: keypair object with:
name (required) - string
public_key (optional) - string
"""
context = req.environ['nova.context']
authorize(context, action='create')
params = body['keypair']
name = params['name']
try:
if 'public_key' in params:
keypair = self.api.import_key_pair(context,
context.user_id, name,
params['public_key'])
keypair = self._filter_keypair(keypair, user_id=True)
else:
keypair, private_key = self.api.create_key_pair(
context, context.user_id, name)
keypair = self._filter_keypair(keypair, user_id=True)
keypair['private_key'] = private_key
return {'keypair': keypair}
except exception.KeypairLimitExceeded:
msg = _("Quota exceeded, too many key pairs.")
raise webob.exc.HTTPForbidden(explanation=msg)
except exception.InvalidKeypair as exc:
raise webob.exc.HTTPBadRequest(explanation=exc.format_message())
except exception.KeyPairExists as exc:
raise webob.exc.HTTPConflict(explanation=exc.format_message())
# TODO(oomichi): Here should be 204(No Content) instead of 202 by v2.1
# +microversions because the resource keypair has been deleted completely
# when returning a response.
@wsgi.response(202)
@extensions.expected_errors(404)
def delete(self, req, id):
"""Delete a keypair with a given name."""
context = req.environ['nova.context']
authorize(context, action='delete')
try:
self.api.delete_key_pair(context, context.user_id, id)
except exception.KeypairNotFound as exc:
raise webob.exc.HTTPNotFound(explanation=exc.format_message())
@extensions.expected_errors(404)
def show(self, req, id):
"""Return data for the given key name."""
context = req.environ['nova.context']
authorize(context, action='show')
try:
# Since this method returns the whole object, functional test
# test_keypairs_get is failing, receiving an unexpected field
# 'type', which was added to the keypair object.
# TODO(claudiub): Revert the changes in the next commit, which will
# enable nova-api to return the keypair type.
keypair = self.api.get_key_pair(context, context.user_id, id)
keypair = self._filter_keypair(keypair, created_at=True,
deleted=True, deleted_at=True,
id=True, user_id=True,
updated_at=True)
except exception.KeypairNotFound as exc:
raise webob.exc.HTTPNotFound(explanation=exc.format_message())
# TODO(oomichi): It is necessary to filter a response of keypair with
# _filter_keypair() when v2.1+microversions for implementing consistent
# behaviors in this keypair resource.
return {'keypair': keypair}
@extensions.expected_errors(())
def index(self, req):
"""List of keypairs for a user."""
context = req.environ['nova.context']
authorize(context, action='index')
key_pairs = self.api.get_key_pairs(context, context.user_id)
rval = []
for key_pair in key_pairs:
rval.append({'keypair': self._filter_keypair(key_pair)})
return {'keypairs': rval}
class Controller(wsgi.Controller):
def _add_key_name(self, req, servers):
for server in servers:
db_server = req.get_db_instance(server['id'])
# server['id'] is guaranteed to be in the cache due to
# the core API adding it in its 'show'/'detail' methods.
server['key_name'] = db_server['key_name']
def _show(self, req, resp_obj):
if 'server' in resp_obj.obj:
server = resp_obj.obj['server']
self._add_key_name(req, [server])
@wsgi.extends
def show(self, req, resp_obj, id):
context = req.environ['nova.context']
if soft_authorize(context):
self._show(req, resp_obj)
@wsgi.extends
def detail(self, req, resp_obj):
context = req.environ['nova.context']
if 'servers' in resp_obj.obj and soft_authorize(context):
servers = resp_obj.obj['servers']
self._add_key_name(req, servers)
class Keypairs(extensions.V3APIExtensionBase):
"""Keypair Support."""
name = "Keypairs"
alias = ALIAS
version = 1
def get_resources(self):
resources = [
extensions.ResourceExtension(ALIAS,
KeypairController())]
return resources
def get_controller_extensions(self):
controller = Controller()
extension = extensions.ControllerExtension(self, 'servers', controller)
return [extension]
# use nova.api.extensions.server.extensions entry point to modify
# server create kwargs
# NOTE(gmann): This function is not supposed to use 'body_deprecated_param'
# parameter as this is placed to handle scheduler_hint extension for V2.1.
def server_create(self, server_dict, create_kwargs, body_deprecated_param):
create_kwargs['key_name'] = server_dict.get('key_name')
def get_server_create_schema(self):
return keypairs.server_create
| cloudbase/nova-virtualbox | nova/api/openstack/compute/plugins/v3/keypairs.py | Python | apache-2.0 | 7,665 | 0.000261 |
# -*- coding: utf-8 -*-
"""
a24tim.tests.test_models
************************
The model tests for the a24tim app.
:copyright: (c) 2012 by Carl Modén
:licence: AGPL3, see LICENSE for more details.
"""
from datetime import date
import math
import urllib2
from django.test import TestCase
import a24tim.models as models
import helpers
class TestPoints(TestCase):
"""Tests for the Point model."""
def setUp(self):
self.allegro = models.Point(number=580, name=u'Allegrogrund',
definition=u'Mellan två platser',
lat=59.5, lon=18.25, startpoint=True)
self.allegro.save()
def test_latstr(self):
"""Verifies that a latitude is correctly rendered as a string."""
self.assertEqual(self.allegro.latstr(), u'N 59° 30.00')
def test_latstr_south(self):
"""Move allegro to south half of earth and ches the coordinates there.
"""
self.allegro.lat = -self.allegro.lat
self.assertEqual(self.allegro.latstr(), u'S 59° 30.00')
def test_lonstr(self):
"""Verifies that a latitude is correctly rendered as a string."""
self.assertEqual(self.allegro.lonstr(), u'E 18° 15.00')
def test_lonstr_south(self):
"""Move allegro to south half of earth and ches the coordinates there.
"""
self.allegro.lon = -self.allegro.lon
self.assertEqual(self.allegro.lonstr(), u'W 18° 15.00')
def test_latstr_singledigit_minute(self):
"""Move allegro to someplace with a single digit minute and check the
coordinates there."""
self.allegro.lat = 59.1
self.assertEqual(self.allegro.latstr(), u'N 59° 06.00')
def test_unicode(self):
"""Verifies the unicode representation"""
self.assertEqual(
self.allegro.__unicode__(),
u"580, Allegrogrund, Mellan två platser, N 59° 30.00, E 18° 15.00, startpunkt")
def test_unicode_no_start_point(self):
"""Unicode representation where the point is not startpoint"""
self.allegro.startpoint = False
self.assertEqual(self.allegro.__unicode__(),
u"580, Allegrogrund, Mellan två platser, N 59° 30.00, E 18° 15.00")
def test_merging_of_related_points(self):
"""Checks that one can access all points distances from one query."""
self.stina = models.Point(number=579, name=u'Stinasgrund',
definition=u'En sten',
lat=59.7, lon=18.02)
self.stina.save()
self.linanas = models.Point(number=581, name=u'Linanäs',
definition=u'En prick', lat=59.8, lon=18.1)
self.linanas.save()
self.stina_allegro = models.Distance(from_point=self.stina,
to_point=self.allegro,
distance=5.8)
self.stina_allegro.save()
self.allegro_lina = models.Distance(from_point=self.allegro,
to_point=self.linanas,
distance=5.1)
self.allegro_lina.save()
distances = self.allegro.distances.all()
self.assertEqual(len(distances), 2)
class TestPointWithForecast(TestCase):
def setUp(self):
self.allegro = models.Point(number=580, name=u'Allegrogrund',
definition=u'Mellan två platser',
lat=59.5, lon=18.25, startpoint=True)
self.allegro.save()
self.patcher, self.mock_urlopen = helpers.patch_urlopen()
def tearDown(self):
self.patcher.stop()
def test_forecast_url_called(self):
"""Checks that the correct url is called"""
self.allegro.forecast()
self.mock_urlopen.assert_called_once_with('http://api.met.no/weatherapi/locationforecast/'
'1.8/?lat=59.5;lon=18.25;msl=10')
def test_forecast_url_not_found(self):
"""If the url cannot be loaded the we want handle som errors."""
self.mock_urlopen.side_effect = urllib2.URLError(
'A simulated problem with loading the url')
forecast = self.allegro.forecast()
self.assertIsNotNone(forecast[0])
def test_forcast_new_protocol(self):
"""If the protocol is updated a status 203 will be sent."""
self.mock_urlopen.return_value.code = 203
forecast = self.allegro.forecast()
self.assertIsNotNone(forecast[0])
def test_forcast_is_rendered(self):
"""Sees that a parsable prognosis is rendered"""
forecast = self.allegro.forecast()
# Check some sample values
self.assertEqual(forecast[1]['wind_direction'], 'NE')
self.assertEqual(str(forecast[1]['pressure']), str(1012.4))
def test_forcast_is_not_xml(self):
"""Sees what happens of the forcast is not valid xml."""
self.mock_urlopen.return_value.read.return_value = u"a\nstring\n"
forecast = self.allegro.forecast()
self.assertIsNotNone(forecast[0])
class TestSailings(TestCase):
def test_unicode(self):
"""Checking the unicode representation of a sailing"""
sail = models.Sailing(slug='2011h', finish_date=date(2011, 8, 3))
self.assertEqual(sail.__unicode__(), u'2011h, 2011-08-03')
class TestDistance(TestCase):
def setUp(self):
self.allegro = models.Point(number=580, name=u'Allegrogrund',
definition=u'Mellan två platser',
lat=59.0, lon=18.0, startpoint=True)
self.stina = models.Point(number=579, name=u'Stinasgrund',
definition=u'En sten', lat=59.1, lon=18.0)
self.allegro.save()
self.stina.save()
self.distance = models.Distance(from_point=self.stina,
to_point=self.allegro,
distance=4.2)
def test_unicode(self):
"""Checking the unicode representation of a sailing."""
representation = self.distance.__unicode__()
self.assertIn(u'580', representation)
self.assertIn(u'579', representation)
self.assertIn(u'4.2', representation)
def test_save_order(self):
"""Check that the order of from and to is in order."""
self.distance.save()
self.assertEqual(self.distance.from_point, self.stina)
self.assertEqual(self.distance.to_point, self.allegro)
def test_save_order_swapped(self):
"""Check that the order of from and to swaps to be in order."""
self.distance = models.Distance(from_point=self.allegro,
to_point=self.stina,
distance=4.2)
self.distance.save()
self.assertEqual(self.distance.from_point, self.stina)
self.assertEqual(self.distance.to_point, self.allegro)
def test_birds_distance(self):
"""Check approx forumla for north-south distances."""
self.assertAlmostEqual(self.distance.birds_distance(), 6, places=2)
def test_birds_distance_east_west(self):
"""Moves one point of the distance to check that distances in pure
east west direction also works."""
self.stina.lat = 59
self.stina.lon = 18.1
self.assertAlmostEqual(self.distance.birds_distance(),
6 * math.cos(math.radians(59)), places=2)
| carlmod/Analys24h | a24tim/tests/test_models.py | Python | agpl-3.0 | 7,591 | 0.001188 |
from django.conf.urls import url
from core.views import add_feedback
urlpatterns = [
url('^add/core/feedback', name='add_feedback', view=add_feedback),
] | bonnieblueag/farm_log | core/urls.py | Python | gpl-3.0 | 158 | 0.006329 |
import weldnumpy as wn
import numpy as np
def assert_correct(np_a, z):
'''
common part of the check.
'''
shape = []
for s in z.shape:
shape.append(s)
shape = tuple(shape)
np_a = np.reshape(np_a, shape)
for i in range((z.shape[0])):
for j in range(z.shape[1]):
assert np_a[i][j] == z[i][j]
def test_view():
'''
Adding the iter code to a view.
In general, do we deal correctly with wn.array(view)??
'''
orig = np.random.rand(20,20)
a = orig[3:15:1,4:20:2]
# orig = np.random.rand(20,20,20)
# a = orig[3:15:3,:,:]
print(a.flags)
assert not a.flags.contiguous
a = wn.array(a)
z = np.copy(a)
shapes = a.weldobj.update(np.array(list(a.shape)))
strides = []
for s in a.strides:
strides.append(s/z.itemsize)
strides = a.weldobj.update(np.array(strides))
end = 1
for s in a.shape:
end = end*s
end = end
iter_code = 'result(for(nditer({arr}, 0L, {end}L, 1L, {shapes}, {strides}), appender, \
|b, i, e| merge(b,exp(e))))'.format(shapes=shapes, strides=strides, end=str(end), arr=a.name)
a.weldobj.weld_code = iter_code
z = np.exp(z)
# convert the data represented by weldarray 'a', to a multi-dimensional numpy array of shape as z,
# and then compare the values.
np_a = a._eval()
assert_correct(np_a, z)
def test_start():
'''
Has a different start from the base array.
'''
orig = np.random.rand(20,20)
a = orig[0:20:1,0:20:1]
start = (wn.addr(a) - wn.addr(orig)) / a.itemsize
orig = wn.array(orig)
z = np.copy(a)
z = np.exp(z)
shapes = orig.weldobj.update(np.array(list(a.shape)))
strides = []
for s in a.strides:
strides.append(s/8)
strides = orig.weldobj.update(np.array(strides))
end = 1
for s in a.shape:
end = end*s
end = end + start
iter_code = 'result(for(nditer({arr}, {start}L, {end}L, 1L, {shapes}, {strides}), appender, \
|b, i, e| merge(b,exp(e))))'.format(shapes=shapes, strides=strides, end=str(end),
start=str(start), arr=orig.name)
orig.weldobj.weld_code = iter_code
np_a = orig._eval()
assert_correct(np_a, z)
def test_zip():
'''
Has a different start from the base array.
'''
orig = np.random.rand(20,20)
orig2 = np.random.rand(20,20)
a = orig[5:20:1,3:20:2]
b = orig2[5:20:1,3:20:2]
start = (wn.addr(a) - wn.addr(orig)) / a.itemsize
orig = wn.array(orig)
# copying so we can test them later.
z = np.copy(a)
z2 = np.copy(b)
# added orig2 to orig's weldobject.
orig_2_name = orig.weldobj.update(orig2)
shapes = orig.weldobj.update(np.array(list(a.shape)))
strides = []
for s in a.strides:
strides.append(s/8)
strides = orig.weldobj.update(np.array(strides))
end = 1
for s in a.shape:
end = end*s
end = end + start
iter_code = 'result(for(zip(nditer({arr}, {start}l, {end}l, 1l, {shapes}, {strides}), \
nditer({arr2}, {start}l, {end}l, 1l, {shapes}, {strides})), \
appender, |b, i, e| merge(b,e.$0+e.$1)))'.format(shapes=shapes, strides=strides, end=str(end),
start=str(start), arr=orig.name, arr2=orig_2_name)
orig.weldobj.weld_code = iter_code
# gives us a numpy array after evaluating the nditer code above.
np_a = orig._eval()
# update the copied array.
z3 = z+z2;
# test values are equal.
assert_correct(np_a, z3)
# few different tests.
test_view()
test_start()
test_zip()
| rahulpalamuttam/weld | examples/python/nditer/nditer_test.py | Python | bsd-3-clause | 3,542 | 0.006494 |
# -*- Mode: Python; coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
##
## Copyright (C) 2011 Async Open Source <http://www.async.com.br>
## All rights reserved
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., or visit: http://www.gnu.org/.
##
## Author(s): Stoq Team <[email protected]>
##
"""
stoq/gui/calendar.py:
Calendar application.
"""
import urllib
from dateutil.parser import parse
from dateutil.relativedelta import MO, relativedelta
from dateutil.tz import tzlocal, tzutc
import gtk
from stoqlib.api import api
from stoqlib.domain.person import Client
from stoqlib.gui.editors.callseditor import CallsEditor
from stoqlib.gui.editors.paymenteditor import (InPaymentEditor,
OutPaymentEditor)
from stoqlib.gui.editors.workordereditor import WorkOrderEditor
from stoqlib.gui.stockicons import (STOQ_CALENDAR_TODAY,
STOQ_CALENDAR_WEEK,
STOQ_CALENDAR_MONTH,
STOQ_CALENDAR_LIST)
from stoqlib.gui.utils.keybindings import get_accels
from stoqlib.gui.widgets.webview import WebView
from stoqlib.lib import dateutils
from stoqlib.lib.daemonutils import start_daemon
from stoqlib.lib.defaults import get_weekday_start
from stoqlib.lib.translation import stoqlib_gettext as _
from stoq.gui.shell.shellapp import ShellApp
def parse_javascript_date(jsdate):
dt = parse(jsdate, fuzzy=True)
dt = dt.replace(tzinfo=tzlocal())
date = dt.astimezone(tzutc())
date += relativedelta(months=-1)
return date
class CalendarView(WebView):
def __init__(self, app):
self._loaded = False
WebView.__init__(self)
self.app = app
self.get_view().connect(
'load-finished',
self._on_view__document_load_finished)
self._load_user_settings()
def _load_finished(self):
self._startup()
self._loaded = True
view = self.get_view()
view.connect('size-allocate', self._on_view__size_allocate)
x, y, width, height = view.get_allocation()
self._update_calendar_size(width, height)
def _startup(self):
options = {}
options['monthNames'] = dateutils.get_month_names()
options['monthNamesShort'] = dateutils.get_short_month_names()
options['dayNames'] = dateutils.get_day_names()
options['dayNamesShort'] = dateutils.get_short_day_names()
options['buttonText'] = {"today": _('today'),
"month": _('month'),
"week": _('week'),
"day": _('day')}
options['defaultView'] = api.user_settings.get(
'calendar-view', 'month')
# FIXME: This should not be tied to the language, rather be
# picked up from libc, but it's a bit of work to translate
# one into another so just take a shortcut
options['columnFormat'] = {
# month column format, eg "Mon", see:
# http://arshaw.com/fullcalendar/docs/text/columnFormat/
'month': _('ddd'),
# week column format: eg, "Mon 9/7", see:
# http://arshaw.com/fullcalendar/docs/text/columnFormat/
'week': _('ddd M/d'),
# day column format : eg "Monday 9/7", see:
# http://arshaw.com/fullcalendar/docs/text/columnFormat/
'day': _('dddd M/d'),
}
options['timeFormat'] = {
# for agendaWeek and agendaDay, eg "5:00 - 6:30", see:
# http://arshaw.com/fullcalendar/docs/text/timeFormat/
'agenda': _('h:mm{ - h:mm}'),
# for all other views, eg "7p", see:
# http://arshaw.com/fullcalendar/docs/text/timeFormat/
'': _('h(:mm)t'),
}
options['titleFormat'] = {
# month title, eg "September 2009", see:
# http://arshaw.com/fullcalendar/docs/text/titleFormat/
'month': _('MMMM yyyy'),
# week title, eg "Sep 7 - 13 2009" see:
# http://arshaw.com/fullcalendar/docs/text/titleFormat/
'week': _("MMM d[ yyyy]{ '—'[ MMM] d yyyy}"),
# day time, eg "Tuesday, Sep 8, 2009" see:
# http://arshaw.com/fullcalendar/docs/text/titleFormat/
'day': _('dddd, MMM d, yyyy'),
}
if get_weekday_start() == MO:
firstday = 1
else:
firstday = 0
options['firstDay'] = firstday
options['isRTL'] = (
gtk.widget_get_default_direction() == gtk.TEXT_DIR_RTL)
options['data'] = self._show_events
options['loading_msg'] = _('Loading calendar content, please wait...')
self.js_function_call('startup', options)
self._update_title()
def _calendar_run(self, name, *args):
if not self._loaded:
return
self.js_function_call("$('#calendar').fullCalendar", name, *args)
def _load_daemon_path(self, path):
uri = '%s/%s' % (self._daemon_uri, path)
self.load_uri(uri)
def _load_user_settings(self):
events = api.user_settings.get('calendar-events', {})
self._show_events = dict(
in_payments=events.get('in-payments', True),
out_payments=events.get('out-payments', True),
purchase_orders=events.get('purchase-orders', True),
client_calls=events.get('client-calls', True),
client_birthdays=events.get('client-birthdays', True),
work_orders=events.get('work-orders', True),
)
def _save_user_settings(self):
events = api.user_settings.get('calendar-events', {})
events['in-payments'] = self._show_events['in_payments']
events['out-payments'] = self._show_events['out_payments']
events['purchase-orders'] = self._show_events['purchase_orders']
events['client-calls'] = self._show_events['client_calls']
events['client-birthdays'] = self._show_events['client_birthdays']
events['work-orders'] = self._show_events['work_orders']
def _update_calendar_size(self, width, height):
self._calendar_run('option', 'aspectRatio', float(width) / height)
def _update_title(self):
# Workaround to get the current calendar date
view = self.get_view()
view.execute_script("document.title = $('.fc-header-title').text()")
title = view.get_property('title')
self.app.date_label.set_markup(
'<big><b>%s</b></big>' % api.escape(title))
#
# Callbacks
#
def _on_view__document_load_finished(self, view, frame):
self._load_finished()
def _on_view__size_allocate(self, widget, req):
x, y, width, height = req
self._update_calendar_size(width, height)
#
# WebView
#
def web_open_uri(self, kwargs):
if kwargs['method'] == 'changeView':
view = kwargs['view']
if view == 'basicDay':
self.app.ViewDay.set_active(True)
jsdate = urllib.unquote(kwargs['date'])
date = parse_javascript_date(jsdate)
self._calendar_run('gotoDate', date.year, date.month, date.day)
#
# Public API
#
def set_daemon_uri(self, uri):
self._daemon_uri = uri
def load(self):
self._load_daemon_path('web/static/calendar-app.html')
def go_prev(self):
self._calendar_run('prev')
self._update_title()
def show_today(self):
self._calendar_run('today')
self._update_title()
def go_next(self):
self._calendar_run('next')
self._update_title()
def change_view(self, view_name):
self._calendar_run('removeEvents')
self._calendar_run('changeView', view_name)
self._calendar_run('refetchEvents')
api.user_settings.set('calendar-view', view_name)
self._update_title()
def refresh(self):
self.load()
def get_events(self):
return self._show_events
def update_events(self, **events):
self._show_events.update(**events)
if not self._loaded:
return
self.js_function_call("update_options", self._show_events)
self._calendar_run('refetchEvents')
self._save_user_settings()
class CalendarApp(ShellApp):
app_title = _('Calendar')
gladefile = 'calendar'
def __init__(self, window, store=None):
# Create this here because CalendarView will update it.
# It will only be shown on create_ui though
self.date_label = gtk.Label('')
self._calendar = CalendarView(self)
ShellApp.__init__(self, window, store=store)
self._setup_daemon()
@api.async
def _setup_daemon(self):
daemon = yield start_daemon()
self._calendar.set_daemon_uri(daemon.base_uri)
proxy = daemon.get_client()
yield proxy.callRemote('start_webservice')
self._calendar.load()
#
# ShellApp overrides
#
def create_actions(self):
group = get_accels('app.calendar')
actions = [
# File
('NewClientCall', None, _("Client call"),
group.get('new_client_call'), _("Add a new client call")),
('NewPayable', None, _("Account payable"),
group.get('new_payable'), _("Add a new account payable")),
('NewReceivable', None, _("Account receivable"),
group.get('new_receivable'), _("Add a new account receivable")),
('NewWorkOrder', None, _("Work order"),
group.get('new_work_order'), _("Add a new work order")),
# View
('Back', gtk.STOCK_GO_BACK, _("Back"),
group.get('go_back'), _("Go back")),
('Forward', gtk.STOCK_GO_FORWARD, _("Forward"),
group.get('go_forward'), _("Go forward")),
('Today', STOQ_CALENDAR_TODAY, _("Show today"),
group.get('show_today'), _("Show today")),
('CalendarEvents', None, _("Calendar events")),
('CurrentView', None, _("Display view as")),
]
self.calendar_ui = self.add_ui_actions('', actions,
filename='calendar.xml')
self.set_help_section(_("Calendar help"), 'app-calendar')
toggle_actions = [
('AccountsPayableEvents', None, _("Accounts payable"),
None, _("Show accounts payable in the list")),
('AccountsReceivableEvents', None, _("Accounts receivable"),
None, _("Show accounts receivable in the list")),
('PurchaseEvents', None, _("Purchases"),
None, _("Show purchases in the list")),
('ClientCallEvents', None, _("Client Calls"),
None, _("Show client calls in the list")),
('ClientBirthdaysEvents', None, _("Client Birthdays"),
None, _("Show client birthdays in the list")),
('WorkOrderEvents', None, _("Work orders"),
None, _("Show work orders in the list")),
]
self.add_ui_actions('', toggle_actions, 'ToggleActions',
'toggle')
events_info = dict(
in_payments=(self.AccountsReceivableEvents, self.NewReceivable,
u'receivable'),
out_payments=(self.AccountsPayableEvents, self.NewPayable,
u'payable'),
purchase_orders=(self.PurchaseEvents, None, u'stock'),
client_calls=(self.ClientCallEvents, self.NewClientCall, u'sales'),
client_birthdays=(self.ClientBirthdaysEvents, None, u'sales'),
work_orders=(self.WorkOrderEvents, self.NewWorkOrder, u'services'),
)
user = api.get_current_user(self.store)
events = self._calendar.get_events()
for event_name, value in events_info.items():
view_action, new_action, app = value
view_action.props.active = events[event_name]
# Disable feature if user does not have acces to required
# application
if not user.profile.check_app_permission(app):
view_action.props.active = False
view_action.set_sensitive(False)
if new_action:
new_action.set_sensitive(False)
view_action.connect('notify::active', self._update_events)
self._update_events()
radio_actions = [
('ViewMonth', STOQ_CALENDAR_MONTH, _("View as month"),
'', _("Show one month")),
('ViewWeek', STOQ_CALENDAR_WEEK, _("View as week"),
'', _("Show one week")),
('ViewDay', STOQ_CALENDAR_LIST, _("View as day"),
'', _("Show one day")),
]
self.add_ui_actions('', radio_actions, 'RadioActions',
'radio')
self.ViewMonth.set_short_label(_("Month"))
self.ViewWeek.set_short_label(_("Week"))
self.ViewDay.set_short_label(_("Day"))
self.ViewMonth.props.is_important = True
self.ViewWeek.props.is_important = True
self.ViewDay.props.is_important = True
view = api.user_settings.get('calendar-view', 'month')
if view == 'month':
self.ViewMonth.props.active = True
elif view == 'basicWeek':
self.ViewWeek.props.active = True
else:
self.ViewDay.props.active = True
def create_ui(self):
self.window.add_new_items([self.NewClientCall,
self.NewPayable,
self.NewReceivable])
# Reparent the toolbar, to show the date next to it.
self.hbox = gtk.HBox()
toolbar = self.uimanager.get_widget('/toolbar')
toolbar.reparent(self.hbox)
# A label to show the current calendar date.
self.date_label.show()
self.hbox.pack_start(self.date_label, False, False, 6)
self.hbox.show()
self.main_vbox.pack_start(self.hbox, False, False)
self.main_vbox.pack_start(self._calendar)
self._calendar.show()
self.window.Print.set_tooltip(_("Print this calendar"))
def activate(self, refresh=True):
self.window.SearchToolItem.set_sensitive(False)
# FIXME: Are we 100% sure we can always print something?
# self.window.Print.set_sensitive(True)
def deactivate(self):
# Put the toolbar back at where it was
main_vbox = self.window.main_vbox
toolbar = self.uimanager.get_widget('/toolbar')
self.hbox.remove(toolbar)
main_vbox.pack_start(toolbar, False, False)
main_vbox.reorder_child(toolbar, 1)
self.uimanager.remove_ui(self.calendar_ui)
self.window.SearchToolItem.set_sensitive(True)
# Private
def _update_events(self, *args):
self._calendar.update_events(
out_payments=self.AccountsPayableEvents.get_active(),
in_payments=self.AccountsReceivableEvents.get_active(),
purchase_orders=self.PurchaseEvents.get_active(),
client_calls=self.ClientCallEvents.get_active(),
client_birthdays=self.ClientBirthdaysEvents.get_active(),
work_orders=self.WorkOrderEvents.get_active(),
)
def _new_client_call(self):
with api.new_store() as store:
self.run_dialog(CallsEditor, store, None, None, Client)
if store.committed:
self._update_events()
def _new_work_order(self):
with api.new_store() as store:
self.run_dialog(WorkOrderEditor, store)
if store.committed:
self._update_events()
def _new_payment(self, editor):
with api.new_store() as store:
self.run_dialog(editor, store)
if store.committed:
self._update_events()
#
# Kiwi callbacks
#
# Toolbar
def new_activate(self):
if not self.NewClientCall.get_sensitive():
return
self._new_client_call()
def print_activate(self):
self._calendar.print_()
def export_spreadsheet_activate(self):
pass
def on_NewClientCall__activate(self, action):
self._new_client_call()
def on_NewPayable__activate(self, action):
self._new_payment(OutPaymentEditor)
def on_NewReceivable__activate(self, action):
self._new_payment(InPaymentEditor)
def on_NewWorkOrder__activate(self, action):
self._new_work_order()
def on_Back__activate(self, action):
self._calendar.go_prev()
def on_Today__activate(self, action):
self._calendar.show_today()
def on_Forward__activate(self, action):
self._calendar.go_next()
def on_ViewMonth__activate(self, action):
self._calendar.change_view('month')
def on_ViewWeek__activate(self, action):
self._calendar.change_view('basicWeek')
def on_ViewDay__activate(self, action):
self._calendar.change_view('basicDay')
| andrebellafronte/stoq | stoq/gui/calendar.py | Python | gpl-2.0 | 17,713 | 0.000847 |
# -*- coding: utf-8 -*-
"""Models for database connection"""
import settings
| vprusso/us_patent_scraper | patent_spider/patent_spider/models.py | Python | gpl-2.0 | 80 | 0.0125 |
# -*- coding: utf-8 -*-
from sympy.matrices import Matrix
from tensor_analysis.arraypy import Arraypy, TensorArray, list2arraypy, list2tensor
from tensor_analysis.tensor_fields import df, grad, curl, diverg, lie_xy, dw, \
lie_w, inner_product, g_tensor, g_wedge
from sympy import symbols, cos, sin
def test_df_varlist():
x1, x2, x3 = symbols('x1 x2 x3')
f = x1**2 * x2 + sin(x2 * x3 - x2)
var_list = [x1, x2, x3]
assert df(f, var_list) == [
2 * x1 * x2, x1**2 + (x3 - 1) * cos(x2 * x3 - x2), x2 *
cos(x2 * x3 - x2)]
assert isinstance(df(f, var_list), list)
assert df(f, var_list, 'l') == [
2 * x1 * x2, x1**2 + (x3 - 1) * cos(x2 * x3 - x2), x2 *
cos(x2 * x3 - x2)]
assert isinstance(df(f, var_list, 'l'), list)
assert df(f, var_list, 'a') == list2arraypy(
[2 * x1 * x2, x1**2 + (x3 - 1) * cos(x2 * x3 - x2), x2 *
cos(x2 * x3 - x2)])
assert isinstance(df(f, var_list, 'a'), Arraypy)
assert df(f, var_list, 't') == list2tensor(
[2 * x1 * x2, x1**2 + (x3 - 1) * cos(x2 * x3 - x2), x2 *
cos(x2 * x3 - x2)])
assert isinstance(df(f, var_list, 't'), TensorArray)
assert df(f, var_list, 't').type_pq == (0, 1)
def test_df_var_tnsr0():
x1, x2, x3 = symbols('x1 x2 x3')
f = x1**2 * x2 + sin(x2 * x3 - x2)
var_tnsr0 = TensorArray(Arraypy(3), (1))
var_tnsr0[0] = x1
var_tnsr0[1] = x2
var_tnsr0[2] = x3
assert df(f, var_tnsr0) == [
2 * x1 * x2, x1**2 + (x3 - 1) * cos(x2 * x3 - x2), x2 *
cos(x2 * x3 - x2)]
assert isinstance(df(f, var_tnsr0), list)
assert df(f, var_tnsr0, 'l') == [
2 * x1 * x2, x1**2 + (x3 - 1) * cos(x2 * x3 - x2), x2 *
cos(x2 * x3 - x2)]
assert isinstance(df(f, var_tnsr0, 'l'), list)
assert df(f, var_tnsr0, 'a') == list2arraypy(
[2 * x1 * x2, x1**2 + (x3 - 1) * cos(x2 * x3 - x2), x2 *
cos(x2 * x3 - x2)])
assert isinstance(df(f, var_tnsr0, 'a'), Arraypy)
assert df(f, var_tnsr0, 't') == list2tensor(
[2 * x1 * x2, x1**2 + (x3 - 1) * cos(x2 * x3 - x2), x2 *
cos(x2 * x3 - x2)])
assert isinstance(df(f, var_tnsr0, 't'), TensorArray)
assert df(f, var_tnsr0, 't').type_pq == (0, 1)
def test_df_var_tnsr1():
x1, x2, x3 = symbols('x1 x2 x3')
f = x1**2 * x2 + sin(x2 * x3 - x2)
var_tnsr1 = Arraypy([1, 3, 1]).to_tensor(1)
var_tnsr1[1] = x1
var_tnsr1[2] = x2
var_tnsr1[3] = x3
res_ar1 = Arraypy([1, 3, 1])
res_ar1[1] = 2 * x1 * x2
res_ar1[2] = x1**2 + (x3 - 1) * cos(x2 * x3 - x2)
res_ar1[3] = x2 * cos(x2 * x3 - x2)
res_ten1 = res_ar1.to_tensor(-1)
assert df(f, var_tnsr1) == [
2 * x1 * x2, x1**2 + (x3 - 1) * cos(x2 * x3 - x2), x2 *
cos(x2 * x3 - x2)]
assert isinstance(df(f, var_tnsr1), list)
assert df(f, var_tnsr1, 'l') == [
2 * x1 * x2, x1**2 + (x3 - 1) * cos(x2 * x3 - x2), x2 *
cos(x2 * x3 - x2)]
assert isinstance(df(f, var_tnsr1, 'l'), list)
assert df(f, var_tnsr1, 'a') == res_ar1
assert isinstance(df(f, var_tnsr1, 'a'), Arraypy)
assert df(f, var_tnsr1, 't') == res_ten1
assert isinstance(df(f, var_tnsr1, 't'), TensorArray)
assert df(f, var_tnsr1, 't').type_pq == (0, 1)
def test_diverg_var_x_list():
x1, x2, x3 = symbols('x1 x2 x3')
X = [x1 * x2**3, x2 - cos(x3), x3**3 - x1]
var = [x1, x2, x3]
g = Matrix([[2, 1, 0], [1, 3, 0], [0, 0, 1]])
ten = Arraypy([2, 3, 0]).to_tensor((-1, -1))
ten[0, 0] = 2
ten[0, 1] = 1
ten[0, 2] = 0
ten[1, 0] = 1
ten[1, 1] = 3
ten[1, 2] = 0
ten[2, 0] = 0
ten[2, 1] = 0
ten[2, 2] = 1
ten1 = Arraypy([2, 3, 1]).to_tensor((-1, -1))
ten1[1, 1] = 2
ten1[1, 2] = 1
ten1[1, 3] = 0
ten1[2, 1] = 1
ten1[2, 2] = 3
ten1[2, 3] = 0
ten1[3, 1] = 0
ten1[3, 2] = 0
ten1[3, 3] = 1
assert diverg(X, var) == x2**3 + 3 * x3**2 + 1
assert diverg(X, var, g) == x2**3 + 3 * x3**2 + 1
assert diverg(X, var, ten) == x2**3 + 3 * x3**2 + 1
assert diverg(X, var, ten1) == x2**3 + 3 * x3**2 + 1
def test_grad_varlist():
x1, x2, x3 = symbols('x1 x2 x3')
f = x1**2 * x2 + sin(x2 * x3 - x2)
var1 = [x1, x2, x3]
res_ar1 = Arraypy([1, 3, 0])
res_ar1[0] = 2 * x1 * x2
res_ar1[1] = x1**2 + (x3 - 1) * cos(x2 * x3 - x2)
res_ar1[2] = x2 * cos(x2 * x3 - x2)
res_ten1 = res_ar1.to_tensor(1)
res_ar = Arraypy([1, 3, 0])
res_ar[0] = -x1**2 / 5 + 6 * x1 * x2 / 5 - (x3 - 1) * cos(x2 * x3 - x2) / 5
res_ar[1] = 2 * x1**2 / 5 - 2 * x1 * x2 / \
5 + cos(x2 * x3 - x2) * 2 * (x3 - 1) / 5
res_ar[2] = x2 * cos(x2 * x3 - x2)
res_ten = res_ar.to_tensor(1)
g = Matrix([[2, 1, 0], [1, 3, 0], [0, 0, 1]])
assert grad(f, var1, output_type='l') == [
2 * x1 * x2, x1**2 + (x3 - 1) * cos(x2 * x3 - x2), x2 *
cos(x2 * x3 - x2)]
assert isinstance(grad(f, var1, output_type='l'), list)
assert grad(f, var1) == [
2 * x1 * x2, x1**2 + (x3 - 1) * cos(x2 * x3 - x2), x2 *
cos(x2 * x3 - x2)]
assert isinstance(grad(f, var1), list)
assert grad(f, var1, output_type='a') == res_ar1
assert isinstance(grad(f, var1, output_type='t'), Arraypy)
assert grad(f, var1, output_type='t') == res_ten1
assert isinstance(grad(f, var1, output_type='t'), TensorArray)
assert grad(f, var1, output_type='t').type_pq == (1, 0)
assert str(
grad(
f,
var1,
g,
output_type='l')) == '[-x1**2/5 + 6*x1*x2/5 - (x3 - 1)*cos(x2*x3 - x2)/5, 2*x1**2/5 - 2*x1*x2/5 + 2*(x3 - 1)*cos(x2*x3 - x2)/5, x2*cos(x2*x3 - x2)]'
assert isinstance(grad(f, var1, g, output_type='l'), list)
assert grad(f, var1, g, output_type='a') == res_ar
assert isinstance(grad(f, var1, g, output_type='a'), Arraypy)
assert grad(f, var1, g, output_type='t') == res_ten
assert isinstance(grad(f, var1, g, output_type='t'), TensorArray)
assert grad(f, var1, g, output_type='t').type_pq == (1, 0)
def test_grad_gtnsr():
x1, x2, x3 = symbols('x1 x2 x3')
f = x1**2 * x2 + sin(x2 * x3 - x2)
var1 = [x1, x2, x3]
k1 = Arraypy([1, 3, 0]).to_tensor(1)
k1[0] = x1
k1[1] = x2
k1[2] = x3
# g задано tensor, индекс с 1 и var-list
a = Arraypy([2, 3, 1])
b = a.to_tensor((-1, -1))
b[1, 1] = 2
b[1, 2] = 1
b[1, 3] = 0
b[2, 1] = 1
b[2, 2] = 3
b[2, 3] = 0
b[3, 1] = 0
b[3, 2] = 0
b[3, 3] = 1
res_ar = Arraypy([1, 3, 1])
res_ar[1] = -x1**2 / 5 + 6 * x1 * x2 / 5 - (x3 - 1) * cos(x2 * x3 - x2) / 5
res_ar[2] = 2 * x1**2 / 5 - 2 * x1 * x2 / \
5 + cos(x2 * x3 - x2) * 2 * (x3 - 1) / 5
res_ar[3] = x2 * cos(x2 * x3 - x2)
res_ten = res_ar.to_tensor(1)
res_ar1 = Arraypy([1, 3, 0])
res_ar1[0] = 2 * x1 * x2
res_ar1[1] = x1**2 + (x3 - 1) * cos(x2 * x3 - x2)
res_ar1[2] = x2 * cos(x2 * x3 - x2)
assert str(
grad(
f,
var1,
b,
'l')) == '[-x1**2/5 + 6*x1*x2/5 - (x3 - 1)*cos(x2*x3 - x2)/5, 2*x1**2/5 - 2*x1*x2/5 + 2*(x3 - 1)*cos(x2*x3 - x2)/5, x2*cos(x2*x3 - x2)]'
assert isinstance(grad(f, var1, b, 'l'), list)
assert grad(f, var1, b, 'a') == res_ar
assert isinstance(grad(f, var1, b, 'a'), Arraypy)
assert grad(f, k1, output_type='a') == res_ar1
assert isinstance(grad(f, k1, output_type='a'), Arraypy)
assert grad(f, var1, b, 't') == res_ten
assert isinstance(grad(f, var1, b, 't'), TensorArray)
assert grad(f, var1, b, 't').type_pq == (1, 0)
assert grad(f, var1, b) == res_ten
assert isinstance(grad(f, var1, b, 't'), TensorArray)
assert grad(f, var1, b, 't').type_pq == (1, 0)
def test_grad_gm_vl():
x1, x2, x3 = symbols('x1 x2 x3')
f = x1**2 * x2 + sin(x2 * x3 - x2)
var1 = [x1, x2, x3]
g = Matrix([[2, 1, 0], [1, 3, 0], [0, 0, 1]])
k0 = Arraypy([1, 3, 1]).to_tensor(1)
k0[1] = x1
k0[2] = x2
k0[3] = x3
res_ar = Arraypy([1, 3, 0])
res_ar[0] = -x1**2 / 5 + 6 * x1 * x2 / 5 - (x3 - 1) * cos(x2 * x3 - x2) / 5
res_ar[1] = 2 * x1**2 / 5 - 2 * x1 * x2 / \
5 + cos(x2 * x3 - x2) * 2 * (x3 - 1) / 5
res_ar[2] = x2 * cos(x2 * x3 - x2)
res_ten = res_ar.to_tensor(1)
assert str(
grad(
f,
k0,
g,
'l')) == '[-x1**2/5 + 6*x1*x2/5 - (x3 - 1)*cos(x2*x3 - x2)/5, 2*x1**2/5 - 2*x1*x2/5 + 2*(x3 - 1)*cos(x2*x3 - x2)/5, x2*cos(x2*x3 - x2)]'
assert isinstance(grad(f, k0, g, 'l'), list)
assert grad(f, k0, g, 'a') == res_ar
assert isinstance(grad(f, k0, g, 'a'), Arraypy)
assert grad(f, k0, g, 't') == res_ten
assert isinstance(grad(f, k0, g, 't'), TensorArray)
assert grad(f, k0, g, 't').type_pq == (1, 0)
def test_lie_xy():
x1, x2, x3, t, l, a = symbols('x1 x2 x3 t l a')
X = [x1 * x2**3, x2 - cos(x3), x3**3 - x1]
Y = [x1**3 * x2**3, x2 * x3 - sin(x1 * x3), x3**3 - x1**2]
arg = [x1, x2, x3]
res_ar = Arraypy([1, 3, 0])
res_ar[0] = 2 * x1**3 * x2**6 + 3 * x1**3 * x2**2 * \
(x2 - cos(x3)) - 3 * x1 * x2**2 * (x2 * x3 - sin(x1 * x3))
res_ar[1] = -x1 * x2**3 * x3 * cos(x1 * x3) - x2 * x3 + x3 * (x2 - cos(
x3)) + (-x1 + x3**3) * (-x1 * cos(x1 * x3) + x2) - (-x1**2 + x3**3) * \
sin(x3) + sin(x1 * x3)
res_ar[2] = x1**3 * x2**3 - 2 * x1**2 * x2**3 + 3 * \
x3**2 * (-x1 + x3**3) - 3 * x3**2 * (-x1**2 + x3**3)
res_ten = res_ar.to_tensor(1)
assert lie_xy(X, Y, arg, 'l') == [2 *
x1**3 *
x2**6 +
3 *
x1**3 *
x2**2 *
(x2 -
cos(x3)) -
3 *
x1 *
x2**2 *
(x2 *
x3 -
sin(x1 *
x3)), -
x1 *
x2**3 *
x3 *
cos(x1 *
x3) -
x2 *
x3 +
x3 *
(x2 -
cos(x3)) +
(-
x1 +
x3**3) *
(-
x1 *
cos(x1 *
x3) +
x2) -
(-
x1**2 +
x3**3) *
sin(x3) +
sin(x1 *
x3), x1**3 *
x2**3 -
2 *
x1**2 *
x2**3 +
3 *
x3**2 *
(-
x1 +
x3**3) -
3 *
x3**2 *
(-
x1**2 +
x3**3)]
assert isinstance(lie_xy(X, Y, arg, 'l'), list)
assert lie_xy(X, Y, arg, 'a') == res_ar
assert isinstance(lie_xy(X, Y, arg, 'a'), Arraypy)
assert lie_xy(X, Y, arg, 't') == res_ten
assert isinstance(lie_xy(X, Y, arg, 't'), TensorArray)
assert lie_xy(X, Y, arg, 't').type_pq == (1, 0)
def test_curl():
x1, x2, x3 = symbols('x1 x2 x3')
X = [x1 * x2**3, x2 - cos(x3), x3**3 - x1]
arg = [x1, x2, x3]
j = Arraypy(3)
k0 = TensorArray(j, (1))
k0[0] = x1 * x2**3
k0[1] = x2 - cos(x3)
k0[2] = x3**3 - x1
k1 = Arraypy([1, 3, 1]).to_tensor(1)
k1[1] = x1 * x2**3
k1[2] = x2 - cos(x3)
k1[3] = x3**3 - x1
v0 = TensorArray(j, (1))
v0[0] = x1
v0[1] = x2
v0[2] = x3
v1 = Arraypy([1, 3, 1]).to_tensor(1)
v1[1] = x1
v1[2] = x2
v1[3] = x3
s0 = TensorArray(j, (1))
s0[0] = -sin(x3)
s0[1] = 1
s0[2] = -3 * x1 * x2**2
s1 = Arraypy([1, 3, 1]).to_tensor(1)
s1[1] = -sin(x3)
s1[2] = 1
s1[3] = -3 * x1 * x2**2
assert curl(X, arg) == [-sin(x3), 1, -3 * x1 * x2**2]
assert isinstance(curl(X, arg), list)
assert curl(X, arg, 'a') == list2arraypy([-sin(x3), 1, -3 * x1 * x2**2])
assert isinstance(curl(X, arg, 'a'), Arraypy)
assert curl(X, arg, 't') == s0
assert isinstance(curl(X, arg, 't'), TensorArray)
assert curl(X, arg, 't').type_pq == (1, 0)
assert curl(k0, arg) == s0
assert isinstance(curl(k0, arg), TensorArray)
assert curl(X, arg, 't').type_pq == (1, 0)
assert curl(k0, arg, 'a') == list2arraypy([-sin(x3), 1, -3 * x1 * x2**2])
assert isinstance(curl(k0, arg, 'a'), Arraypy)
assert curl(k0, arg, 't') == s0
assert isinstance(curl(k0, arg, 't'), TensorArray)
assert curl(X, arg, 't').type_pq == (1, 0)
assert curl(k1, v1, 't') == s1
assert isinstance(curl(k1, v1, 't'), TensorArray)
assert curl(k1, v1, 't').type_pq == (1, 0)
def test_lie_w():
x1, x2, x3 = symbols('x1, x2, x3')
X = [x1 * x2**3, x2 - cos(x3), x3**3 - x1]
arr = Arraypy((3, 3))
y = TensorArray(arr, (-1, -1))
y1 = TensorArray(arr, (-1, -1))
y[0, 1] = x3
y[0, 2] = -x2
y[1, 0] = -x3
y[1, 2] = x1
y[2, 0] = x2
y[2, 1] = -x1
y1[0, 1] = x2**3 * x3 + x3**3 + x3
y1[0, 2] = -x2**4 - 3 * x2 * x3**2 - x2 + x3 * sin(x3) + cos(x3)
y1[1, 0] = -x2**3 * x3 - x3**3 - x3
y1[1, 2] = -2 * x1 * x2**3 + 3 * x1 * x3**2 + x1
y1[2, 0] = x2**4 + 3 * x2 * x3**2 + x2 - x3 * sin(x3) - cos(x3)
y1[2, 1] = 2 * x1 * x2**3 - 3 * x1 * x3**2 - x1
assert lie_w(y, X, [x1, x2, x3]) == y1
assert isinstance(lie_w(y, X, [x1, x2, x3]), TensorArray)
assert lie_w(y, X, [x1, x2, x3]).type_pq == (0, 2)
omega = TensorArray(arr, (-1, -1))
omega[0, 1] = x2
omega[1, 0] = -x2
omega[0, 2] = -x1
omega[2, 0] = x1
ten = TensorArray(arr, (-1, -1))
ten[0, 1] = x2**4 + 2 * x2 - cos(x3)
ten[0, 2] = -2 * x1 * x2**3 - 3 * x1 * x3**2 + x2 * sin(x3)
ten[1, 0] = -x2**4 - 2 * x2 + cos(x3)
ten[1, 2] = -3 * x1**2 * x2**2
ten[2, 0] = 2 * x1 * x2**3 + 3 * x1 * x3**2 - x2 * sin(x3)
ten[2, 1] = 3 * x1**2 * x2**2
assert lie_w(omega, X, [x1, x2, x3]) == ten
assert isinstance(lie_w(omega, X, [x1, x2, x3]), TensorArray)
assert lie_w(omega, X, [x1, x2, x3]).type_pq == (0, 2)
def test_g_tensor():
x, y, z, w = symbols('x, y, z, w')
omega=Arraypy([2,3,0]).to_tensor((-1,1))
omega[0,0]=w
omega[0,1]=x
omega[1,0]=y
omega[1,1]=z
omega[2,1]=y*y
omega[2,2]=x*y*w
g = Matrix([[2,1,0],[1,3,0],[0,0,1]])
s=w**2*x**2*y**2 + 3*y**4 +\
(-w/5 + 2*y/5)*(2*y + z) +\
(3*w/5 - y/5)*(2*w + x) + (w + 3*x)*\
(3*x/5 - z/5) + (-x/5 + 2*z/5)*\
(y + 3*z)
assert g_tensor(omega,omega,g)==s
def test_dw():
x1, x2, x3 = symbols('x1 x2 x3')
y = TensorArray(Arraypy((3, 3)), (-1, -1))
y1 = TensorArray(Arraypy((3, 3, 3)), (-1, -1, -1))
y[0, 1] = x3
y[0, 2] = -x2
y[1, 0] = -x3
y[1, 2] = x1
y[2, 0] = x2
y[2, 1] = -x1
y1[0, 1, 2] = 3
y1[0, 2, 1] = -3
y1[1, 0, 2] = -3
y1[1, 2, 0] = 3
y1[2, 0, 1] = 3
y1[2, 1, 0] = -3
assert dw(y, [x1, x2, x3]) == y1
assert isinstance(dw(y, [x1, x2, x3]), TensorArray)
assert dw(y, [x1, x2, x3]).type_pq == (0, 3)
def test_g_wedge():
l, m, n = symbols('l, m, n')
X_w=Arraypy([1,3,1]).to_tensor((-1))
X_w[1]=l
X_w[2]=m
X_w[3]=n
b1=Arraypy([2,3,1]).to_tensor((-1,-1))
b1[1,1]=2
b1[1,2]=1
b1[1,3]=0
b1[2,1]=1
b1[2,2]=3
b1[2,3]=0
b1[3,1]=0
b1[3,2]=0
b1[3,3]=1
assert g_wedge(X_w,X_w,b1)==l*(3*l/5 - m/5) + m*(-l/5 + 2*m/5) + n**2
def test_inner_product():
x1, x2, x3, l, m, n = symbols('x1 x2 x3 l m n')
omega2=Arraypy([2,3,1]).to_tensor((-1,-1))
omega2[1,2]=x3
omega2[1,3]=-x2
omega2[2,1]=-x3
omega2[2,3]=x1
omega2[3,1]=x2
omega2[3,2]=-x1
X_t=Arraypy([1,3,1]).to_tensor((1))
X_t[1]=l
X_t[2]=m
X_t[3]=n
res_ar1 = Arraypy([1, 3, 1])
res_ar1[1] = -m*x3 + n*x2
res_ar1[2] = l*x3 - n*x1
res_ar1[3] = -l*x2 + m*x1
res_ten1 = res_ar1.to_tensor(-1)
assert inner_product(omega2, X_t)==res_ten1
| AunShiLord/Tensor-analysis | tensor_analysis/tests/test_tensor_fields.py | Python | mit | 17,326 | 0.006065 |
"""
WSGI config for djarzeit project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "djarzeit.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "djarzeit.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| peap/djarzeit | djarzeit/wsgi.py | Python | mit | 1,425 | 0.000702 |
from .SCPISignalGenerator import SCPISignalGenerator
from .helper import SignalGenerator, amplitudelimiter
class AgilentN5182A(SCPISignalGenerator, SignalGenerator):
"""Agilent N5182A 100e3, 6e9.
.. figure:: images/SignalGenerator/AgilentN5182A.jpg
"""
def __init__(self, inst):
super().__init__(inst)
self.inst.read_termination = '\n'
self.inst.write_termination = '\n'
| DavidLutton/EngineeringProject | labtoolkit/SignalGenerator/AgilentN5182A.py | Python | mit | 417 | 0 |
# This file is part of Androguard.
#
# Copyright (C) 2012, Anthony Desnos <desnos at t0t0.fr>
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from subprocess import Popen, PIPE, STDOUT
import os, sys
import xmlrpclib
import cPickle
class _Method(object):
def __init__(self, proxy, name):
self.proxy = proxy
self.name = name
def __call__(self, *args):
#print "CALL", self.name, args
z = getattr( self.proxy, self.name, None )
#print "SEND", repr(cPickle.dumps( args ) )
try:
if len(args) == 1:
ret = z( cPickle.dumps( args[0] ) )
else:
ret = z( cPickle.dumps( args ) )
#print "RECEIVE", repr(ret)
return cPickle.loads( ret )
except xmlrpclib.ProtocolError:
return []
class MyXMLRPC(object):
def __init__(self, proxy):
self.proxy = proxy
def __getattr__(self, name):
return _Method(self.proxy, name)
class BasicBlock(object):
def __init__(self, ins):
self.ins = ins
def show(self):
for i in self.ins:
print i
class Function(object):
def __init__(self, name, start_ea, instructions, information):
#print name, start_ea
self.name = name
self.start_ea = start_ea
self.information = information
self.basic_blocks = []
self.instructions = instructions
r = {}
idx = 0
for i in instructions:
r[ i[0] ] = idx
idx += 1
for i in information[0]:
try:
start = r[i[0]]
end = r[i[1]] + 1
self.basic_blocks.append( BasicBlock( instructions[start:end] ) )
except KeyError:
pass
def get_instructions(self):
return [ i for i in self.instructions ]
def run_ida(idapath, wrapper_init_path, binpath):
os.environ["TVHEADLESS"] = "1"
pid = os.fork()
if pid == 0:
wrapper_path = "-S" + wrapper_init_path
l = [ idapath, "-A", wrapper_path, binpath ]
print l
compile = Popen(l, stdout=open('/dev/null', 'w'), stderr=STDOUT)
stdout, stderr = compile.communicate()
# print stdout, stderr
sys.exit(0)
class IDAPipe(object):
def __init__(self, idapath, binpath, wrapper_init_path):
self.idapath = idapath
self.binpath = binpath
self.proxy = None
run_ida(self.idapath, self.binpath, wrapper_init_path)
while 1:
try:
self.proxy = xmlrpclib.ServerProxy("http://localhost:9000/")
self.proxy.is_connected()
break
except:
pass
#print self.proxy
self.proxy = MyXMLRPC( self.proxy )
def quit(self):
try:
self.proxy.quit()
except:
pass
def _build_functions(self, functions):
F = {}
for i in functions:
F[ i ] = Function( functions[i][0], i, functions[i][1:-1], functions[i][-1] )
return F
def get_quick_functions(self):
functions = self.get_raw()
return self._build_functions( functions )
def get_raw(self):
return self.proxy.get_raw()
def get_nb_functions(self):
return len(self.proxy.Functions())
def get_functions(self):
for function_ea in self.proxy.Functions():
self.get_function_addr( function_ea )
def get_function_name(self, name):
function_ea = self.proxy.get_function( name )
self.get_function_addr( function_ea )
def get_function_addr(self, function_ea):
if function_ea == -1:
return
f_start = function_ea
f_end = self.proxy.GetFunctionAttr(function_ea, 4) #FUNCATTR_END)
edges = set()
boundaries = set((f_start,))
for head in self.proxy.Heads(f_start, f_end):
if self.proxy.isCode( self.proxy.GetFlags( head ) ):
refs = self.proxy.CodeRefsFrom(head, 0)
refs = set(filter(lambda x: x>=f_start and x<=f_end, refs))
#print head, f_end, refs, self.proxy.GetMnem(head), self.proxy.GetOpnd(head, 0), self.proxy.GetOpnd(head, 1)
if refs:
next_head = self.proxy.NextHead(head, f_end)
if self.proxy.isFlow(self.proxy.GetFlags(next_head)):
refs.add(next_head)
# Update the boundaries found so far.
boundaries.update(refs)
# For each of the references found, and edge is
# created.
for r in refs:
# If the flow could also come from the address
# previous to the destination of the branching
# an edge is created.
if self.proxy.isFlow(self.proxy.GetFlags(r)):
edges.add((self.proxy.PrevHead(r, f_start), r))
edges.add((head, r))
#print edges, boundaries
# Let's build the list of (startEA, startEA) couples
# for each basic block
sorted_boundaries = sorted(boundaries, reverse = True)
end_addr = self.proxy.PrevHead(f_end, f_start)
bb_addr = []
for begin_addr in sorted_boundaries:
bb_addr.append((begin_addr, end_addr))
# search the next end_addr which could be
# farther than just the previous head
# if data are interlaced in the code
# WARNING: it assumes it won't epicly fail ;)
end_addr = self.proxy.PrevHead(begin_addr, f_start)
while not self.proxy.isCode(self.proxy.GetFlags(end_addr)):
end_addr = self.proxy.PrevHead(end_addr, f_start)
# And finally return the result
bb_addr.reverse()
#print bb_addr, sorted(edges)
def display_function(f):
print f, f.name, f.information
for i in f.basic_blocks:
print i
i.show()
| nikhilpanicker/SecureVault | tools/modified/androguard/core/binaries/idapipe.py | Python | gpl-3.0 | 6,602 | 0.010906 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Person.education'
db.add_column('person_person', 'education',
self.gf('django.db.models.fields.CharField')(default='', max_length=255, blank=True),
keep_default=False)
# Adding field 'Person.birthday'
db.add_column('person_person', 'birthday',
self.gf('django.db.models.fields.DateTimeField')(null=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Person.education'
db.delete_column('person_person', 'education')
# Deleting field 'Person.birthday'
db.delete_column('person_person', 'birthday')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'person.person': {
'Meta': {'object_name': 'Person'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'birthday': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'education': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'fullname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'default': "'unknown'", 'max_length': '7'}),
'nickname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'occupation': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'normal'", 'max_length': '20'}),
'subscribing': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True', 'primary_key': 'True'})
}
}
complete_apps = ['person'] | elin-moco/ffclub | ffclub/person/migrations/0005_auto__add_field_person_education__add_field_person_birthday.py | Python | bsd-3-clause | 5,302 | 0.007922 |
from django.conf.urls import patterns, url
from publicaciones import views
urlpatterns = patterns('',
url(r'^$', views.index, name='index'),
url(r'^(?P<articulo_id>\d+)/$', views.ver_articulo, name='ver_articulo'),
) | rickyrish/rickyblog | build/lib.linux-i686-2.7/publicaciones/urls.py | Python | gpl-2.0 | 226 | 0.00885 |
# Recall is a program for storing bookmarks of different things
# Copyright (C) 2012 Cal Paterson
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import uuid
from bottle import Bottle, request, response, abort
import bcrypt
from recall.data import whitelist, blacklist
from recall import convenience as c
from recall import plugins, jobs, messages
app = Bottle()
app.install(plugins.exceptions)
app.install(plugins.ppjson)
app.install(plugins.auth)
app.install(plugins.cors)
app.error_handler = plugins.handler_dict
logger = c.logger("people")
@app.get("/")
def users():
abort(503, "Not yet implemented")
@app.get("/<who>/")
def user_(who):
try:
return whitelist(c.db().users.find_one({"email": who}), [
"email",
"firstName",
"pseudonym"
])
except TypeError:
logger.warn("Asked about {email}, but that is not a user".format(
email=who))
abort(404, "User not found")
@app.get("/<who>/self")
def _self(who, user):
if who != user["email"]:
response.status = 400
else:
return whitelist(user, ["pseudonym",
"firstName",
"surname",
"email",
"private_email"])
@app.post("/<who>/")
def request_invite(who):
# FIXME: Don't allow the pseudonym "public"
user = whitelist(request.json, [
"pseudonym",
"firstName",
"surname",
"private_email",
"token",
])
if "private_email" not in user:
abort(400, "You must provide a private_email field")
user["email_key"] = str(uuid.uuid4())
user["registered"] = c.unixtime()
user["email"] = who
c.db().users.ensure_index("email", unique=True)
c.db().users.insert(user, safe=True)
response.status = 202
logger.info("{email} subscribed".format(email=who))
jobs.enqueue(messages.SendInvite(user))
@app.post("/<who>/<email_key>")
def verify_email(who, email_key):
if "RECALL_TEST_MODE" in c.settings or "RECALL_DEBUG_MODE" in c.settings:
salt = bcrypt.gensalt(1)
else:
salt = bcrypt.gensalt()
password_hash = bcrypt.hashpw(request.json["password"], salt)
spec = {"email_key": email_key, "verified": {"$exists": False}}
update = {"$set": {"password_hash": password_hash,
"verified": c.unixtime()}}
success = c.db().users.update(spec, update, safe=True)["updatedExisting"]
if not success:
if c.db().users.find_one({"email_key": email_key}):
logger.warn("{email} tried to verify a second time".format(email=who))
abort(403, "Already verified")
else:
logger.warn("Someone tried to verify with a key, but it doesn't exist")
abort(404, "Don't know that key")
user = c.db().users.find_one({"email_key": email_key})
response.status = 201
return blacklist(user, ["_id", "email_key", "password_hash"])
| calpaterson/recall | src/recall/people.py | Python | agpl-3.0 | 3,665 | 0.00191 |
# -*- coding: utf-8 -*-
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('appliances', '0007_provider_num_simultaneous_configuring'),
]
operations = [
migrations.AddField(
model_name='appliance',
name='uuid',
field=models.CharField(
help_text=b'UUID of the machine', max_length=36, null=True, blank=True),
preserve_default=True,
),
]
| apagac/cfme_tests | sprout/appliances/migrations/0008_appliance_uuid.py | Python | gpl-2.0 | 493 | 0.002028 |
#
# Copyright (c) 2015 nexB Inc. and others. All rights reserved.
# http://nexb.com and https://github.com/nexB/scancode-toolkit/
# The ScanCode software is licensed under the Apache License version 2.0.
# Data generated with ScanCode require an acknowledgment.
# ScanCode is a trademark of nexB Inc.
#
# You may not use this software except in compliance with the License.
# You may obtain a copy of the License at: http://apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
# When you publish or redistribute any data created with ScanCode or any ScanCode
# derivative work, you must accompany this data with the following acknowledgment:
#
# Generated with ScanCode and provided on an "AS IS" BASIS, WITHOUT WARRANTIES
# OR CONDITIONS OF ANY KIND, either express or implied. No content created from
# ScanCode should be considered or used as legal advice. Consult an Attorney
# for any legal advice.
# ScanCode is a free software code scanning tool from nexB Inc. and others.
# Visit https://github.com/nexB/scancode-toolkit/ for support and download.
from __future__ import absolute_import
from __future__ import print_function
import codecs
import logging
import json
from collections import OrderedDict
from functools import partial
from commoncode import filetype
from commoncode import fileutils
from packagedcode import models
from packagedcode.utils import parse_repo_url
"""
Handle PHP composer packages, refer to https://getcomposer.org/
"""
logger = logging.getLogger(__name__)
# import sys
# logging.basicConfig(level=logging.DEBUG, stream=sys.stdout)
# logger.setLevel(logging.DEBUG)
class PHPComposerPackage(models.Package):
metafiles = ('composer.json')
filetypes = ('.json',)
mimetypes = ('application/json')
repo_types = (models.repo_phpcomposer,)
type = models.StringType(default='phpcomposer')
primary_language = models.StringType(default='PHP')
@classmethod
def recognize(cls, location):
return parse(location)
def is_phpcomposer_json(location):
return (filetype.is_file(location)
and fileutils.file_name(location).lower() == 'composer.json')
def parse(location):
"""
Return a Package object from a composer.json file or None.
"""
if not is_phpcomposer_json(location):
return
# mapping of top level composer.json items to the Package object field name
plain_fields = OrderedDict([
('name', 'name'),
('description', 'summary'),
('keywords', 'keywords'),
('version', 'version'),
('homepage', 'homepage_url'),
])
# mapping of top level composer.json items to a function accepting as arguments
# the composer.json element value and returning an iterable of key, values Package Object to update
field_mappers = OrderedDict([
('authors', author_mapper),
('license', licensing_mapper),
('require', dependencies_mapper),
('require-dev', dev_dependencies_mapper),
('repositories', repository_mapper),
('support', support_mapper),
])
with codecs.open(location, encoding='utf-8') as loc:
data = json.load(loc, object_pairs_hook=OrderedDict)
if not data.get('name') or not data.get('description'):
# a composer.json without name and description is not a usable PHP composer package
# name and description fields are required: https://getcomposer.org/doc/04-schema.md#name
return
package = PHPComposerPackage()
# a composer.json is at the root of a PHP composer package
base_dir = fileutils.parent_directory(location)
package.location = base_dir
package.metafile_locations = [location]
for source, target in plain_fields.items():
value = data.get(source)
if value:
if isinstance(value, basestring):
value = value.strip()
if value:
setattr(package, target, value)
for source, func in field_mappers.items():
logger.debug('parse: %(source)r, %(func)r' % locals())
value = data.get(source)
if value:
if isinstance(value, basestring):
value = value.strip()
if value:
func(value, package)
vendor_mapper(package) # Parse vendor from name value
return package
def licensing_mapper(licenses, package):
"""
Update package licensing and return package.
Licensing data structure has evolved over time and is a tad messy.
https://getcomposer.org/doc/04-schema.md#license
licenses is either:
- a string with:
- an SPDX id or expression { "license": "(LGPL-2.1 or GPL-3.0+)" }
- array:
"license": [
"LGPL-2.1",
"GPL-3.0+"
]
"""
if not licenses:
return package
if isinstance(licenses, basestring):
package.asserted_licenses.append(models.AssertedLicense(license=licenses))
elif isinstance(licenses, list):
"""
"license": [
"LGPL-2.1",
"GPL-3.0+"
]
"""
for lic in licenses:
if isinstance(lic, basestring):
package.asserted_licenses.append(models.AssertedLicense(license=lic))
else:
# use the bare repr
if lic:
package.asserted_licenses.append(models.AssertedLicense(license=repr(lic)))
else:
# use the bare repr
package.asserted_licenses.append(models.AssertedLicense(license=repr(licenses)))
return package
def author_mapper(authors_content, package):
"""
Update package authors and return package.
https://getcomposer.org/doc/04-schema.md#authors
"""
authors = []
for name, email, url in parse_person(authors_content):
authors.append(models.Party(type=models.party_person, name=name, email=email, url=url))
package.authors = authors
return package
def support_mapper(support, package):
"""
Update support and bug tracking url.
https://getcomposer.org/doc/04-schema.md#support
"""
package.support_contacts = [support.get('email')]
package.bug_tracking_url = support.get('issues')
package.code_view_url = support.get('source')
return package
def vendor_mapper(package):
"""
Vender is part of name element.
https://getcomposer.org/doc/04-schema.md#name
"""
name = package.name
if name and '/' in name:
vendors = name.split('/')
if vendors[0]:
package.vendors = [models.Party(name=vendors[0])]
return package
def repository_mapper(repos, package):
"""
https://getcomposer.org/doc/04-schema.md#repositories
"repositories": [
{
"type": "composer",
"url": "http://packages.example.com"
},
{
"type": "composer",
"url": "https://packages.example.com",
"options": {
"ssl": {
"verify_peer": "true"
}
}
},
{
"type": "vcs",
"url": "https://github.com/Seldaek/monolog"
},
{
"type": "pear",
"url": "https://pear2.php.net"
},
{
"type": "package",
"package": {
"name": "smarty/smarty",
"version": "3.1.7",
"dist": {
"url": "http://www.smarty.net/files/Smarty-3.1.7.zip",
"type": "zip"
},
"source": {
"url": "https://smarty-php.googlecode.com/svn/",
"type": "svn",
"reference": "tags/Smarty_3_1_7/distribution/"
}
}
}
]
"""
if not repos:
return package
if isinstance(repos, basestring):
package.vcs_repository = parse_repo_url(repos)
elif isinstance(repos, list):
for repo in repos:
if repo.get('type') == 'vcs':
# vcs type includes git, svn, fossil or hg.
# refer to https://getcomposer.org/doc/05-repositories.md#vcs
repo_url = repo.get('url')
if repo_url.startswith('svn') or 'subversion.apache.org' in repo_url:
package.vcs_tool = 'svn'
elif repo_url.startswith('hg') or 'mercurial.selenic.com' in repo_url:
package.vcs_tool = 'hg'
elif repo_url.startswith('fossil') or 'fossil-scm.org' in repo_url:
package.vcs_tool = 'fossil'
else:
package.vcs_tool = 'git'
package.vcs_repository = parse_repo_url(repo.get('url'))
return package
def deps_mapper(deps, package, field_name):
"""
Handle deps such as dependencies, devDependencies
return a tuple of (dep type, list of deps)
https://getcomposer.org/doc/04-schema.md#package-links
"""
dep_types = {
'dependencies': models.dep_runtime,
'devDependencies': models.dep_dev,
}
resolved_type = dep_types[field_name]
dependencies = []
for name, version_constraint in deps.items():
dep = models.Dependency(name=name, version_constraint=version_constraint)
dependencies.append(dep)
if resolved_type in package.dependencies:
package.dependencies[resolved_type].extend(dependencies)
else:
package.dependencies[resolved_type] = dependencies
return package
dependencies_mapper = partial(deps_mapper, field_name='dependencies')
dev_dependencies_mapper = partial(deps_mapper, field_name='devDependencies')
def parse_person(persons):
"""
https://getcomposer.org/doc/04-schema.md#authors
A "person" is an object with a "name" field and optionally "url" and "email".
Yield a name, email, url tuple for a person object
A person can be in the form:
"authors": [
{
"name": "Nils Adermann",
"email": "[email protected]",
"homepage": "http://www.naderman.de",
"role": "Developer"
},
{
"name": "Jordi Boggiano",
"email": "[email protected]",
"homepage": "http://seld.be",
"role": "Developer"
}
]
Both forms are equivalent.
"""
if isinstance(persons, list):
for person in persons:
# ensure we have our three values
name = person.get('name')
email = person.get('email')
url = person.get('homepage')
yield name and name.strip(), email and email.strip('<> '), url and url.strip('() ')
else:
raise Exception('Incorrect PHP composer composer.json person: %(person)r' % locals())
| yasharmaster/scancode-toolkit | src/packagedcode/phpcomposer.py | Python | apache-2.0 | 11,144 | 0.001884 |
"""Single slice vgg with normalised scale.
"""
import functools
import lasagne as nn
import numpy as np
import theano
import theano.tensor as T
import data_loader
import deep_learning_layers
import layers
import preprocess
import postprocess
import objectives
import theano_printer
import updates
# Random params
rng = np.random
take_a_dump = False # dump a lot of data in a pkl-dump file. (for debugging)
dump_network_loaded_data = False # dump the outputs from the dataloader (for debugging)
# Memory usage scheme
caching = None
# Save and validation frequency
validate_every = 10
validate_train_set = True
save_every = 10
restart_from_save = False
dump_network_loaded_data = False
# Training (schedule) parameters
# - batch sizes
batch_size = 32
sunny_batch_size = 4
batches_per_chunk = 16
AV_SLICE_PER_PAT = 11
num_epochs_train = 50 * AV_SLICE_PER_PAT
# - learning rate and method
base_lr = .0001
learning_rate_schedule = {
0: base_lr,
4*num_epochs_train/5: base_lr/10,
}
momentum = 0.9
build_updates = updates.build_adam_updates
# Preprocessing stuff
cleaning_processes = [
preprocess.set_upside_up,]
cleaning_processes_post = [
functools.partial(preprocess.normalize_contrast_zmuv, z=2)]
augmentation_params = {
"rotation": (-16, 16),
"shear": (0, 0),
"translation": (-8, 8),
"flip_vert": (0, 1),
"roll_time": (0, 0),
"flip_time": (0, 0),
}
preprocess_train = preprocess.preprocess_normscale
preprocess_validation = functools.partial(preprocess_train, augment=False)
preprocess_test = preprocess_train
sunny_preprocess_train = preprocess.sunny_preprocess_with_augmentation
sunny_preprocess_validation = preprocess.sunny_preprocess_validation
sunny_preprocess_test = preprocess.sunny_preprocess_validation
# Data generators
create_train_gen = data_loader.generate_train_batch
create_eval_valid_gen = functools.partial(data_loader.generate_validation_batch, set="validation")
create_eval_train_gen = functools.partial(data_loader.generate_validation_batch, set="train")
create_test_gen = functools.partial(data_loader.generate_test_batch, set=["validation", "test"])
# Input sizes
image_size = 128
data_sizes = {
"sliced:data:singleslice:difference:middle": (batch_size, 29, image_size, image_size), # 30 time steps, 30 mri_slices, 100 px wide, 100 px high,
"sliced:data:singleslice:difference": (batch_size, 29, image_size, image_size), # 30 time steps, 30 mri_slices, 100 px wide, 100 px high,
"sliced:data:singleslice": (batch_size, 30, image_size, image_size), # 30 time steps, 30 mri_slices, 100 px wide, 100 px high,
"sliced:data:ax": (batch_size, 30, 15, image_size, image_size), # 30 time steps, 30 mri_slices, 100 px wide, 100 px high,
"sliced:data:shape": (batch_size, 2,),
"sunny": (sunny_batch_size, 1, image_size, image_size)
# TBC with the metadata
}
# Objective
l2_weight = 0.000
l2_weight_out = 0.000
def build_objective(interface_layers):
# l2 regu on certain layers
l2_penalty = nn.regularization.regularize_layer_params_weighted(
interface_layers["regularizable"], nn.regularization.l2)
# build objective
return objectives.KaggleObjective(interface_layers["outputs"], penalty=l2_penalty)
# Testing
postprocess = postprocess.postprocess
test_time_augmentations = 100 * AV_SLICE_PER_PAT # More augmentations since a we only use single slices
# Architecture
def build_model():
#################
# Regular model #
#################
input_size = data_sizes["sliced:data:singleslice"]
l0 = nn.layers.InputLayer(input_size)
l1a = nn.layers.dnn.Conv2DDNNLayer(l0, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=64, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l1b = nn.layers.dnn.Conv2DDNNLayer(l1a, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=64, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l1 = nn.layers.dnn.MaxPool2DDNNLayer(l1b, pool_size=(2,2), stride=(2,2))
l2a = nn.layers.dnn.Conv2DDNNLayer(l1, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=128, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l2b = nn.layers.dnn.Conv2DDNNLayer(l2a, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=128, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l2 = nn.layers.dnn.MaxPool2DDNNLayer(l2b, pool_size=(2,2), stride=(2,2))
l3a = nn.layers.dnn.Conv2DDNNLayer(l2, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=256, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l3b = nn.layers.dnn.Conv2DDNNLayer(l3a, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=256, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l3c = nn.layers.dnn.Conv2DDNNLayer(l3b, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=256, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l3 = nn.layers.dnn.MaxPool2DDNNLayer(l3c, pool_size=(2,2), stride=(2,2))
l4a = nn.layers.dnn.Conv2DDNNLayer(l3, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=512, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l4b = nn.layers.dnn.Conv2DDNNLayer(l4a, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=512, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l4c = nn.layers.dnn.Conv2DDNNLayer(l4b, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=512, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l4 = nn.layers.dnn.MaxPool2DDNNLayer(l4c, pool_size=(2,2), stride=(2,2))
l5a = nn.layers.dnn.Conv2DDNNLayer(l4, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=512, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l5b = nn.layers.dnn.Conv2DDNNLayer(l5a, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=512, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l5c = nn.layers.dnn.Conv2DDNNLayer(l5b, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=512, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l5 = nn.layers.dnn.MaxPool2DDNNLayer(l5c, pool_size=(2,2), stride=(2,2))
# Systole Dense layers
ldsys1 = nn.layers.DenseLayer(l5, num_units=1024, W=nn.init.Orthogonal("relu"), b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)
ldsys1drop = nn.layers.dropout(ldsys1, p=0.5)
ldsys2 = nn.layers.DenseLayer(ldsys1drop, num_units=1024, W=nn.init.Orthogonal("relu"),b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)
ldsys2drop = nn.layers.dropout(ldsys2, p=0.5)
ldsys3 = nn.layers.DenseLayer(ldsys2drop, num_units=600, W=nn.init.Orthogonal("relu"), b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.softmax)
ldsys3drop = nn.layers.dropout(ldsys3, p=0.5) # dropout at the output might encourage adjacent neurons to correllate
l_systole = layers.CumSumLayer(ldsys3)
# Diastole Dense layers
lddia1 = nn.layers.DenseLayer(l5, num_units=1024, W=nn.init.Orthogonal("relu"), b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)
lddia1drop = nn.layers.dropout(lddia1, p=0.5)
lddia2 = nn.layers.DenseLayer(lddia1drop, num_units=1024, W=nn.init.Orthogonal("relu"),b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)
lddia2drop = nn.layers.dropout(lddia2, p=0.5)
lddia3 = nn.layers.DenseLayer(lddia2drop, num_units=600, W=nn.init.Orthogonal("relu"), b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.softmax)
lddia3drop = nn.layers.dropout(lddia3, p=0.5) # dropout at the output might encourage adjacent neurons to correllate
l_diastole = layers.CumSumLayer(lddia3drop)
return {
"inputs":{
"sliced:data:singleslice": l0
},
"outputs": {
"systole": l_systole,
"diastole": l_diastole,
},
"regularizable": {
ldsys1: l2_weight,
ldsys2: l2_weight,
ldsys3: l2_weight_out,
lddia1: l2_weight,
lddia2: l2_weight,
lddia3: l2_weight_out,
},
}
| 317070/kaggle-heart | configurations/je_ss_smcrps_nrmsc_dropoutput.py | Python | mit | 8,198 | 0.009758 |
__author__ = 'ENG-5 USER'
from numpy import *
import numpy as np
| Geekly/framepy | pump.py | Python | gpl-2.0 | 68 | 0.014706 |
from ..workspace import Block
from twisted.internet import defer
from .variables import lexical_variable
import operator
class logic_null (Block):
def eval (self):
return defer.succeed(None)
class logic_boolean (Block):
def eval (self):
return defer.succeed(self.fields['BOOL'] == 'TRUE')
class logic_negate (Block):
outputType = bool
def eval (self):
def negate (result):
if result is None:
return None
return result == False
self._complete = self.getInputValue('BOOL').addCallback(negate)
return self._complete
_operators_map = {
"EQ": operator.eq,
"NEQ": operator.ne,
"LT": operator.lt,
"LTE": operator.le,
"GT": operator.gt,
"GTE": operator.ge
}
def _compare (lhs, rhs, op_id):
if lhs is None or rhs is None:
return None
op = _operators_map[op_id]
return op(lhs, rhs)
# Emit a warning if bad op given
class logic_compare (Block):
outputType = bool
def eval (self):
lhs = self.getInputValue('A')
rhs = self.getInputValue('B')
op_id = self.fields['OP']
def _eval (results):
lhs, rhs = results
return _compare(lhs, rhs, op_id)
self._complete = defer.gatherResults([lhs, rhs]).addCallback(_eval)
return self._complete
class lexical_variable_compare (lexical_variable):
outputType = bool
def eval (self):
variable = self._getVariable()
if variable is None:
self.emitLogMessage(
"Unknown variable: " + str(self.getFieldValue('VAR')),
"error"
)
return defer.succeed(None)
value = self.getFieldValue('VALUE')
op_id = self.getFieldValue('OP')
unit = self.getFieldValue('UNIT', None)
if isinstance(unit, (int, float)):
value *= unit
return defer.succeed(_compare(variable.value, value, op_id))
class logic_operation (Block):
outputType = bool
def eval (self):
@defer.inlineCallbacks
def _run ():
op = self.fields['OP']
lhs = yield self.getInputValue('A')
if lhs is None:
return
if op == "AND":
if bool(lhs):
rhs = yield self.getInputValue('B')
if rhs is None:
return
defer.returnValue(bool(rhs))
else:
defer.returnValue(False)
elif op == "OR":
if bool(lhs):
defer.returnValue(True)
else:
rhs = yield self.getInputValue('B')
if rhs is None:
return
defer.returnValue(bool(rhs))
# Emit a warning
return
self._complete = _run()
return self._complete
class logic_ternary (Block):
# TODO: outputType of then and else should be the same.
# this is then the outputType of the logic_ternary block.
def eval (self):
@defer.inlineCallbacks
def _run ():
test = yield self.getInputValue('IF')
if test is None:
return
if bool(test):
result = yield self.getInputValue('THEN')
defer.returnValue(result)
else:
result = yield self.getInputValue('ELSE')
defer.returnValue(result)
self._complete = _run()
return self._complete
| richardingham/octopus | octopus/blocktopus/blocks/logic.py | Python | mit | 2,889 | 0.039114 |
# This is an auto-generated file. Use admin/change-versions to update.
from twisted.python import versions
version = versions.Version(__name__[:__name__.rfind('.')], 0, 6, 0)
| kenorb-contrib/BitTorrent | twisted/web/_version.py | Python | gpl-3.0 | 175 | 0 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'wangx'
import urllib2
from bs4 import BeautifulSoup
import getProvinceList
provinceList = getProvinceList.main()
global coun
coun=[]
def get(net):
result = []
try:
html = urllib2.urlopen(net,timeout=10).read()
except:
html=''
while not html:
html = urllib2.urlopen(net,timeout=10).read()
soup = BeautifulSoup(html)
temp = soup.body.find(class_='lqcontentBoxH').find(class_='contentboxTab').find(class_='contentboxTab1').find(class_='conMidtab').find_all(class_='conMidtab3')
print len(temp)
for i in temp:
city = i.td.text
j = i.find_all('tr')
for k in j:
result.append((city,k.a.text,k.a.get('href')))
coun.append(k.a.text)
return result
def gettotal():
totalCountryList = []
for i in provinceList.keys():
net = provinceList[i]
temp = get(net)
for j in temp:
row = (i,)+j
totalCountryList.append(row)
file = open('totalCountryList','w')
text=''
text = str(totalCountryList)
file.write(text)
file.close()
def test():
test=[]
for i in provinces:
if i in test:
print i
test.append(i)
| ramwin1/environment_spider | weather/getCountryList.py | Python | gpl-2.0 | 1,285 | 0.012451 |
#!/usr/bin/env python
#
# Copyright 2011 Google Inc. All Rights Reserved.
import unittest
from google.appengine.api import files
from google.appengine.ext import db
from mapreduce import control
from mapreduce import model
from mapreduce import output_writers
from mapreduce import test_support
from testlib import testutil
BLOBSTORE_WRITER_NAME = (output_writers.__name__ + "." +
output_writers.BlobstoreOutputWriter.__name__)
FILE_WRITER_NAME = (output_writers.__name__ + "." +
output_writers.FileOutputWriter.__name__)
class TestEntity(db.Model):
"""Test entity class."""
def test_handler_yield_key_str(entity):
"""Test handler which yields entity key."""
yield str(entity.key()) + "\n"
class FileOutputWriterEndToEndTest(testutil.HandlerTestBase):
"""End-to-end tests for FileOutputWriter using googlestore."""
def testSingleShard(self):
entity_count = 1000
for _ in range(entity_count):
TestEntity().put()
mapreduce_id = control.start_map(
"test_map",
__name__ + ".test_handler_yield_key_str",
"mapreduce.input_readers.DatastoreInputReader",
{
"entity_kind": __name__ + "." + TestEntity.__name__,
"filesystem": "gs",
"gs_bucket_name": "bucket"
},
shard_count=4,
base_path="/mapreduce_base_path",
output_writer_spec=FILE_WRITER_NAME)
test_support.execute_until_empty(self.taskqueue)
mapreduce_state = model.MapreduceState.get_by_job_id(mapreduce_id)
filenames = output_writers.FileOutputWriter.get_filenames(mapreduce_state)
self.assertEqual(1, len(filenames))
self.assertTrue(filenames[0].startswith("/gs/bucket/"))
with files.open(filenames[0], "r") as f:
data = f.read(10000000)
self.assertEquals(1000, len(data.strip().split("\n")))
def testDedicatedParams(self):
entity_count = 1000
for _ in range(entity_count):
TestEntity().put()
mapreduce_id = control.start_map(
"test_map",
__name__ + ".test_handler_yield_key_str",
"mapreduce.input_readers.DatastoreInputReader",
{
"input_reader": {
"entity_kind": __name__ + "." + TestEntity.__name__,
},
"output_writer": {
"filesystem": "gs",
"gs_bucket_name": "bucket",
},
},
shard_count=4,
base_path="/mapreduce_base_path",
output_writer_spec=FILE_WRITER_NAME)
test_support.execute_until_empty(self.taskqueue)
mapreduce_state = model.MapreduceState.get_by_job_id(mapreduce_id)
filenames = output_writers.FileOutputWriter.get_filenames(mapreduce_state)
self.assertEqual(1, len(filenames))
self.assertTrue(filenames[0].startswith("/gs/bucket/"))
with files.open(filenames[0], "r") as f:
data = f.read(10000000)
self.assertEquals(1000, len(data.strip().split("\n")))
def testMultipleShards(self):
entity_count = 1000
for _ in range(entity_count):
TestEntity().put()
mapreduce_id = control.start_map(
"test_map",
__name__ + ".test_handler_yield_key_str",
"mapreduce.input_readers.DatastoreInputReader",
{
"entity_kind": __name__ + "." + TestEntity.__name__,
"output_sharding": "input",
"filesystem": "gs",
},
shard_count=4,
base_path="/mapreduce_base_path",
output_writer_spec=BLOBSTORE_WRITER_NAME)
test_support.execute_until_empty(self.taskqueue)
mapreduce_state = model.MapreduceState.get_by_job_id(mapreduce_id)
filenames = output_writers.BlobstoreOutputWriter.get_filenames(
mapreduce_state)
self.assertEqual(4, len(filenames))
file_lengths = []
for filename in filenames:
self.assertTrue(filename.startswith("/blobstore/"))
self.assertFalse(filename.startswith("/blobstore/writable:"))
with files.open(filename, "r") as f:
data = f.read(10000000)
file_lengths.append(len(data.strip().split("\n")))
# these numbers are totally random and depend on our sharding,
# which is quite deterministic.
expected_lengths = [199, 210, 275, 316]
self.assertEqual(1000, sum(expected_lengths))
self.assertEquals(expected_lengths, file_lengths)
class BlobstoreOutputWriterEndToEndTest(testutil.HandlerTestBase):
"""End-to-end tests for BlobstoreOutputWriter.
BlobstoreOutputWriter isn't complex enough yet to do extensive
unit tests. Do end-to-end tests just to check that it works.
"""
def testSingleShard(self):
entity_count = 1000
for _ in range(entity_count):
TestEntity().put()
mapreduce_id = control.start_map(
"test_map",
__name__ + ".test_handler_yield_key_str",
"mapreduce.input_readers.DatastoreInputReader",
{
"entity_kind": __name__ + "." + TestEntity.__name__,
},
shard_count=4,
base_path="/mapreduce_base_path",
output_writer_spec=BLOBSTORE_WRITER_NAME)
test_support.execute_until_empty(self.taskqueue)
mapreduce_state = model.MapreduceState.get_by_job_id(mapreduce_id)
filenames = output_writers.BlobstoreOutputWriter.get_filenames(
mapreduce_state)
self.assertEqual(1, len(filenames))
blob_name = filenames[0]
self.assertTrue(blob_name.startswith("/blobstore/"))
self.assertFalse(blob_name.startswith("/blobstore/writable:"))
with files.open(blob_name, "r") as f:
data = f.read(10000000)
self.assertEquals(1000, len(data.strip().split("\n")))
def testMultipleShards(self):
entity_count = 1000
for _ in range(entity_count):
TestEntity().put()
mapreduce_id = control.start_map(
"test_map",
__name__ + ".test_handler_yield_key_str",
"mapreduce.input_readers.DatastoreInputReader",
{
"entity_kind": __name__ + "." + TestEntity.__name__,
"output_sharding": "input",
},
shard_count=4,
base_path="/mapreduce_base_path",
output_writer_spec=BLOBSTORE_WRITER_NAME)
test_support.execute_until_empty(self.taskqueue)
mapreduce_state = model.MapreduceState.get_by_job_id(mapreduce_id)
filenames = output_writers.BlobstoreOutputWriter.get_filenames(
mapreduce_state)
self.assertEqual(4, len(filenames))
file_lengths = []
for filename in filenames:
self.assertTrue(filename.startswith("/blobstore/"))
self.assertFalse(filename.startswith("/blobstore/writable:"))
with files.open(filename, "r") as f:
data = f.read(10000000)
file_lengths.append(len(data.strip().split("\n")))
# these numbers are totally random and depend on our sharding,
# which is quite deterministic.
expected_lengths = [199, 210, 275, 316]
self.assertEqual(1000, sum(expected_lengths))
self.assertEquals(expected_lengths, file_lengths)
if __name__ == "__main__":
unittest.main()
| bslatkin/8-bits | appengine-mapreduce/python/test/mapreduce/output_writers_end_to_end_test.py | Python | apache-2.0 | 7,023 | 0.004129 |
from synergine.synergy.event.Action import Action
class A(Action):
_depend = []
class B(Action):
_depend = [A]
class C(Action):
_depend = [B]
class D(Action):
_depend = []
class F(Action):
_depend = [C]
class E(Action):
_depend = [B, F]
class G(Action):
_depend = []
class H(Action):
_depend = [B] | buxx/synergine | tests/src/event/test_actions.py | Python | apache-2.0 | 337 | 0.026706 |
import fifo
import random
import time
def insertion_sort(elemToTry):
tab = []
for a in elemToTry:
tab.append(a);
place(tab , len(tab)-1)
return tab
def invertPlace(tableau,indiceOne,indiceTwo):
tableau[indiceTwo], tableau[indiceOne] = tableau[indiceOne],tableau[indiceTwo]
def place(tableau,indice):
while tableau[indice] < tableau[indice-1]:
if(indice-1 < 0 ):
return
invertPlace(tableau,indice-1,indice)
indice = indice - 1
#############################################################
def bucketSort(table, index = lambda a : a>>6):
tab = [None]
for a in table:
if len(tab)-1 < index(a):
tab = tab + [None] * (index(a) - len(tab)+1)
if tab[index(a)] == None:
tab[index(a)] = fifo.Deque(a)
else:
tab[index(a)].push_last(a)
tabret = []
for a in tab:
tabret = tabret + insertion_sort(a)
return tabret
| tiregram/algo-E3FI | tp4/exo1.py | Python | gpl-3.0 | 1,042 | 0.028791 |
def clean_dict_repr(mw):
"""Produce a repr()-like output of dict mw with ordered keys"""
return '{' + \
', '.join('{k!r}: {v!r}'.format(k=k, v=v) for k, v in
sorted(mw.items())) +\
'}'
| npilon/planterbox | planterbox/util.py | Python | mit | 236 | 0 |
# (C) British Crown Copyright 2013, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
'''
Provide grib 1 and 2 phenomenon translations to + from CF terms.
This is done by wrapping '_grib_cf_map.py',
which is in a format provided by the metadata translation project.
Currently supports only these ones:
* grib1 --> cf
* grib2 --> cf
* cf --> grib2
'''
import collections
import warnings
import numpy as np
from iris.fileformats.grib import _grib_cf_map as grcf
import iris.std_names
import iris.unit
class LookupTable(dict):
"""
Specialised dictionary object for making lookup tables.
Returns None for unknown keys (instead of raising exception).
Raises exception for any attempt to change an existing entry,
(but it is still possible to remove keys)
"""
def __init__(self, *args, **kwargs):
self._super = super(LookupTable, self)
self._super.__init__(*args, **kwargs)
def __getitem__(self, key):
if not key in self:
return None
return self._super.__getitem__(key)
def __setitem__(self, key, value):
if key in self and self[key] is not value:
raise KeyError('Attempted to set dict[{}] = {}, '
'but this is already set to {}.'.format(
key, value, self[key]))
self._super.__setitem__(key, value)
# Define namedtuples for keys+values of the Grib1 lookup table.
_Grib1ToCfKeyClass = collections.namedtuple(
'Grib1CfKey',
('table2_version', 'centre_number', 'param_number'))
# NOTE: this form is currently used for both Grib1 *and* Grib2
_GribToCfDataClass = collections.namedtuple(
'Grib1CfData',
('standard_name', 'long_name', 'units', 'set_height'))
# Create the grib1-to-cf lookup table.
def _make_grib1_cf_table():
""" Build the Grib1 to CF phenomenon translation table. """
table = LookupTable()
def _make_grib1_cf_entry(table2_version, centre_number, param_number,
standard_name, long_name, units, set_height=None):
"""
Check data, convert types and create a new _GRIB1_CF_TABLE key/value.
Note that set_height is an optional parameter. Used to denote
phenomena that imply a height definition (agl),
e.g. "2-metre tempererature".
"""
grib1_key = _Grib1ToCfKeyClass(table2_version=int(table2_version),
centre_number=int(centre_number),
param_number=int(param_number))
if standard_name is not None:
if standard_name not in iris.std_names.STD_NAMES:
warnings.warn('{} is not a recognised CF standard name '
'(skipping).'.format(standard_name))
return None
# convert units string to iris Unit (i.e. mainly, check it is good)
iris_units = iris.unit.Unit(units)
cf_data = _GribToCfDataClass(standard_name=standard_name,
long_name=long_name,
units=iris_units,
set_height=set_height)
return (grib1_key, cf_data)
# Interpret the imported Grib1-to-CF table.
for (grib1data, cfdata) in grcf.GRIB1Local_TO_CF.iteritems():
assert grib1data.edition == 1
association_entry = _make_grib1_cf_entry(
table2_version=grib1data.t2version,
centre_number=grib1data.centre,
param_number=grib1data.iParam,
standard_name=cfdata.standard_name,
long_name=cfdata.long_name,
units=cfdata.unit)
if association_entry is not None:
key, value = association_entry
table[key] = value
# Do the same for special Grib1 codes that include an implied height level.
for (grib1data, (cfdata, extra_dimcoord)) \
in grcf.GRIB1LocalConstrained_TO_CF.iteritems():
assert grib1data.edition == 1
if extra_dimcoord.standard_name != 'height':
raise ValueError('Got implied dimension coord of "{}", '
'currently can only handle "height".'.format(
extra_dimcoord.standard_name))
if extra_dimcoord.units != 'm':
raise ValueError('Got implied dimension units of "{}", '
'currently can only handle "m".'.format(
extra_dimcoord.units))
if len(extra_dimcoord.points) != 1:
raise ValueError('Implied dimension has {} points, '
'currently can only handle 1.'.format(
len(extra_dimcoord.points)))
association_entry = _make_grib1_cf_entry(
table2_version=int(grib1data.t2version),
centre_number=int(grib1data.centre),
param_number=int(grib1data.iParam),
standard_name=cfdata.standard_name,
long_name=cfdata.long_name,
units=cfdata.unit,
set_height=extra_dimcoord.points[0])
if association_entry is not None:
key, value = association_entry
table[key] = value
return table
_GRIB1_CF_TABLE = _make_grib1_cf_table()
# Define a namedtuple for the keys of the Grib2 lookup table.
_Grib2ToCfKeyClass = collections.namedtuple(
'Grib2CfKey',
('param_discipline', 'param_category', 'param_number'))
# Create the grib2-to-cf lookup table.
def _make_grib2_to_cf_table():
""" Build the Grib2 to CF phenomenon translation table. """
table = LookupTable()
def _make_grib2_cf_entry(param_discipline, param_category, param_number,
standard_name, long_name, units):
"""
Check data, convert types and make a _GRIB2_CF_TABLE key/value pair.
Note that set_height is an optional parameter. Used to denote
phenomena that imply a height definition (agl),
e.g. "2-metre tempererature".
"""
grib2_key = _Grib2ToCfKeyClass(param_discipline=int(param_discipline),
param_category=int(param_category),
param_number=int(param_number))
if standard_name is not None:
if standard_name not in iris.std_names.STD_NAMES:
warnings.warn('{} is not a recognised CF standard name '
'(skipping).'.format(standard_name))
return None
# convert units string to iris Unit (i.e. mainly, check it is good)
iris_units = iris.unit.Unit(units)
cf_data = _GribToCfDataClass(standard_name=standard_name,
long_name=long_name,
units=iris_units,
set_height=None)
return (grib2_key, cf_data)
# Interpret the grib2 info from grib_cf_map
for grib2data, cfdata in grcf.GRIB2_TO_CF.iteritems():
assert grib2data.edition == 2
association_entry = _make_grib2_cf_entry(
param_discipline=grib2data.discipline,
param_category=grib2data.category,
param_number=grib2data.number,
standard_name=cfdata.standard_name,
long_name=cfdata.long_name,
units=cfdata.unit)
if association_entry is not None:
key, value = association_entry
table[key] = value
return table
_GRIB2_CF_TABLE = _make_grib2_to_cf_table()
# Define namedtuples for key+values of the cf-to-grib2 lookup table.
_CfToGrib2KeyClass = collections.namedtuple(
'CfGrib2Key',
('standard_name', 'long_name'))
_CfToGrib2DataClass = collections.namedtuple(
'CfGrib2Data',
('discipline', 'category', 'number', 'units'))
# Create the cf-to-grib2 lookup table.
def _make_cf_to_grib2_table():
""" Build the Grib1 to CF phenomenon translation table. """
table = LookupTable()
def _make_cf_grib2_entry(standard_name, long_name,
param_discipline, param_category, param_number,
units):
"""
Check data, convert types and make a new _CF_TABLE key/value pair.
"""
assert standard_name is not None or long_name is not None
if standard_name is not None:
long_name = None
if standard_name not in iris.std_names.STD_NAMES:
warnings.warn('{} is not a recognised CF standard name '
'(skipping).'.format(standard_name))
return None
cf_key = _CfToGrib2KeyClass(standard_name, long_name)
# convert units string to iris Unit (i.e. mainly, check it is good)
iris_units = iris.unit.Unit(units)
grib2_data = _CfToGrib2DataClass(discipline=int(param_discipline),
category=int(param_category),
number=int(param_number),
units=iris_units)
return (cf_key, grib2_data)
# Interpret the imported CF-to-Grib2 table into a lookup table
for cfdata, grib2data in grcf.CF_TO_GRIB2.iteritems():
assert grib2data.edition == 2
iris_units = iris.unit.Unit(cfdata.unit)
association_entry = _make_cf_grib2_entry(
standard_name=cfdata.standard_name,
long_name=cfdata.long_name,
param_discipline=grib2data.discipline,
param_category=grib2data.category,
param_number=grib2data.number,
units=iris_units)
if association_entry is not None:
key, value = association_entry
table[key] = value
return table
_CF_GRIB2_TABLE = _make_cf_to_grib2_table()
# Interface functions for translation lookup
def grib1_phenom_to_cf_info(table2_version, centre_number, param_number):
"""
Lookup grib-1 parameter --> cf_data or None.
Returned cf_data has attributes:
* standard_name
* long_name
* units : a :class:`iris.unit.Unit`
* set_height : a scalar 'height' value , or None
"""
grib1_key = _Grib1ToCfKeyClass(table2_version=table2_version,
centre_number=centre_number,
param_number=param_number)
return _GRIB1_CF_TABLE[grib1_key]
def grib2_phenom_to_cf_info(param_discipline, param_category, param_number):
"""
Lookup grib-2 parameter --> cf_data or None.
Returned cf_data has attributes:
* standard_name
* long_name
* units : a :class:`iris.unit.Unit`
"""
grib2_key = _Grib2ToCfKeyClass(param_discipline=int(param_discipline),
param_category=int(param_category),
param_number=int(param_number))
return _GRIB2_CF_TABLE[grib2_key]
def cf_phenom_to_grib2_info(standard_name, long_name=None):
"""
Lookup CF names --> grib2_data or None.
Returned grib2_data has attributes:
* discipline
* category
* number
* units : a :class:`iris.unit.Unit`
The unit represents the defined reference units for the message data.
"""
if standard_name is not None:
long_name = None
return _CF_GRIB2_TABLE[(standard_name, long_name)]
| kwilliams-mo/iris | lib/iris/fileformats/grib/grib_phenom_translation.py | Python | gpl-3.0 | 12,009 | 0.000167 |
# Ill attempt to research and see the practicality of making a pid tuner
# possibly hard coding and using error
# or maybe using tensor flow
# but just an idea at the moment
| purduerov/XX-Core | rov/movement/controls/PID_Tuner.py | Python | mit | 174 | 0 |
import genologics_sql.utils
from genologics_sql.tables import *
def test_connection():
session=genologics_sql.utils.get_session()
assert(session is not None)
def test_project_query():
session=genologics_sql.utils.get_session()
pj=session.query(Project).limit(1)
assert(pj is not None)
| Galithil/genologics_sql | tests/test_default.py | Python | mit | 308 | 0.019481 |
'''Trains a simple convnet on the MNIST dataset.
Gets to 99.25% test accuracy after 12 epochs
(there is still a lot of margin for parameter tuning).
16 seconds per epoch on a GRID K520 GPU.
'''
from __future__ import print_function
import keras
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers import Dense, Dropout, Flatten
from keras.layers import Conv2D, MaxPooling2D
from keras import backend as K
from keras.models import load_model
batch_size = 128
num_classes = 10
epochs = 12
# input image dimensions
img_rows, img_cols = 28, 28
# the data, shuffled and split between train and test sets
(x_train, y_train), (x_test, y_test) = mnist.load_data()
if K.image_data_format() == 'channels_first':
x_train = x_train.reshape(x_train.shape[0], 1, img_rows, img_cols)
x_test = x_test.reshape(x_test.shape[0], 1, img_rows, img_cols)
input_shape = (1, img_rows, img_cols)
else:
x_train = x_train.reshape(x_train.shape[0], img_rows, img_cols, 1)
x_test = x_test.reshape(x_test.shape[0], img_rows, img_cols, 1)
input_shape = (img_rows, img_cols, 1)
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
x_train /= 255
x_test /= 255
print('x_train shape:', x_train.shape)
print(x_train.shape[0], 'train samples')
print(x_test.shape[0], 'test samples')
# convert class vectors to binary class matrices
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)
model = Sequential()
model.add(Conv2D(32, kernel_size=(3, 3),
activation='relu',
input_shape=input_shape))
model.add(Conv2D(64, (3, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(128, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(num_classes, activation='softmax'))
model.compile(loss=keras.losses.categorical_crossentropy,
optimizer=keras.optimizers.Adadelta(),
metrics=['accuracy'])
model.fit(x_train, y_train,
batch_size=batch_size,
epochs=epochs,
verbose=1,
validation_data=(x_test, y_test))
score = model.evaluate(x_test, y_test, verbose=0)
print('Test loss:', score[0])
print('Test accuracy:', score[1])
# HDF5ファイルを作成
model.save('mnist_cnn_model.h5')
| yoshiweb/keras-mnist | keras-mnist/mnist_cnn/mnist_cnn_train.py | Python | mit | 2,369 | 0 |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# Copyright (c) 2017-2020 Rhilip <[email protected]>
import time
from flask import Blueprint, request, jsonify, redirect
from app import cache
from .gen import Gen
getinfo_blueprint = Blueprint('infogen', __name__, url_prefix="/movieinfo")
docs_url = "https://github.com/Rhilip/PT-help/blob/master/modules/infogen/README.md"
def get_key(key):
ret = ""
if request.method == "POST":
ret = request.form[key]
elif request.method == "GET":
ret = request.args.get(key)
return ret
@getinfo_blueprint.route("/gen", methods=["GET", "POST"])
def gen():
url = get_key("url")
if url is None:
site = get_key('site')
sid = get_key('sid')
if site is not None and sid is not None:
url = {'site': site, 'sid': sid}
if url:
t0 = time.time()
@cache.memoize(timeout=86400)
def gen_data(uri):
return Gen(url=uri).gen()
nocache = get_key("nocache")
if nocache:
cache.delete_memoized(gen_data, url)
data = gen_data(url)
data["cost"] = time.time() - t0
return jsonify(data)
else:
return redirect(docs_url, code=301)
| Rhilip/PT-help-server | modules/infogen/__init__.py | Python | mit | 1,231 | 0.000812 |
#!/usr/bin/python
#
# Copyright (c) 2018 Yuwei Zhou, <[email protected]>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_servicebusqueue
version_added: "2.8"
short_description: Manage Azure Service Bus queue.
description:
- Create, update or delete an Azure Service Bus queue.
options:
resource_group:
description:
- name of resource group.
required: true
name:
description:
- name of the queue.
required: true
namespace:
description:
- Servicebus namespace name.
- A namespace is a scoping container for all messaging components.
- Multiple queues and topics can reside within a single namespace, and namespaces often serve as application containers.
required: true
state:
description:
- Assert the state of the queue. Use 'present' to create or update and
'absent' to delete.
default: present
choices:
- absent
- present
auto_delete_on_idle_in_seconds:
description:
- Time idle interval after which a queue is automatically deleted.
- The minimum duration is 5 minutes.
type: int
dead_lettering_on_message_expiration:
description:
- A value that indicates whether a queue has dead letter support when a message expires.
type: bool
default_message_time_to_live_seconds:
description:
- Default message timespan to live value.
- This is the duration after which the message expires, starting from when the message is sent to Service Bus.
- This is the default value used when TimeToLive is not set on a message itself.
type: int
enable_batched_operations:
description:
- Value that indicates whether server-side batched operations are enabled.
type: bool
enable_express:
description:
- Value that indicates whether Express Entities are enabled.
- An express topic or queue holds a message in memory temporarily before writing it to persistent storage.
type: bool
enable_partitioning:
description:
- A value that indicates whether the topic or queue is to be partitioned across multiple message brokers.
type: bool
forward_dead_lettered_messages_to:
description:
- Queue or topic name to forward the Dead Letter message for a queue.
forward_to:
description:
- Queue or topic name to forward the messages for a queue.
lock_duration_in_seconds:
description:
- Timespan duration of a peek-lock.
- The amount of time that the message is locked for other receivers.
- The maximum value for LockDuration is 5 minutes.
type: int
max_delivery_count:
description:
- he maximum delivery count.
- A message is automatically deadlettered after this number of deliveries.
type: int
max_size_in_mb:
description:
- The maximum size of the queue in megabytes, which is the size of memory allocated for the queue.
type: int
requires_duplicate_detection:
description:
- A value indicating if this queue or topic requires duplicate detection.
type: bool
duplicate_detection_time_in_seconds:
description:
- TimeSpan structure that defines the duration of the duplicate detection history.
type: int
requires_session:
description:
- A value that indicates whether the queue supports the concept of sessions.
type: bool
status:
description:
- Status of the entity.
choices:
- active
- disabled
- send_disabled
- receive_disabled
extends_documentation_fragment:
- azure
- azure_tags
author:
- "Yuwei Zhou (@yuwzho)"
'''
EXAMPLES = '''
- name: Create a queue
azure_rm_servicebusqueue:
name: subqueue
resource_group: myResourceGroup
namespace: bar
duplicate_detection_time_in_seconds: 600
'''
RETURN = '''
id:
description: Current state of the queue.
returned: success
type: str
'''
try:
from msrestazure.azure_exceptions import CloudError
except ImportError:
# This is handled in azure_rm_common
pass
from ansible.module_utils.azure_rm_common import AzureRMModuleBase
from ansible.module_utils.common.dict_transformations import _snake_to_camel, _camel_to_snake
from ansible.module_utils._text import to_native
from datetime import datetime, timedelta
duration_spec_map = dict(
default_message_time_to_live='default_message_time_to_live_seconds',
duplicate_detection_history_time_window='duplicate_detection_time_in_seconds',
auto_delete_on_idle='auto_delete_on_idle_in_seconds',
lock_duration='lock_duration_in_seconds'
)
sas_policy_spec = dict(
state=dict(type='str', default='present', choices=['present', 'absent']),
name=dict(type='str', required=True),
regenerate_key=dict(type='bool'),
rights=dict(type='str', choices=['manage', 'listen', 'send', 'listen_send'])
)
class AzureRMServiceBusQueue(AzureRMModuleBase):
def __init__(self):
self.module_arg_spec = dict(
resource_group=dict(type='str', required=True),
name=dict(type='str', required=True),
state=dict(type='str', default='present', choices=['present', 'absent']),
namespace=dict(type='str', required=True),
auto_delete_on_idle_in_seconds=dict(type='int'),
dead_lettering_on_message_expiration=dict(type='bool'),
default_message_time_to_live_seconds=dict(type='int'),
duplicate_detection_time_in_seconds=dict(type='int'),
enable_batched_operations=dict(type='bool'),
enable_express=dict(type='bool'),
enable_partitioning=dict(type='bool'),
forward_dead_lettered_messages_to=dict(type='str'),
forward_to=dict(type='str'),
lock_duration_in_seconds=dict(type='int'),
max_delivery_count=dict(type='int'),
max_size_in_mb=dict(type='int'),
requires_duplicate_detection=dict(type='bool'),
requires_session=dict(type='bool'),
status=dict(type='str',
choices=['active', 'disabled', 'send_disabled', 'receive_disabled'])
)
self.resource_group = None
self.name = None
self.state = None
self.namespace = None
self.location = None
self.type = None
self.subscription_topic_name = None
self.auto_delete_on_idle_in_seconds = None
self.dead_lettering_on_message_expiration = None
self.default_message_time_to_live_seconds = None
self.enable_batched_operations = None
self.enable_express = None
self.enable_partitioning = None
self.forward_dead_lettered_messages_to = None
self.forward_to = None
self.lock_duration_in_seconds = None
self.max_delivery_count = None
self.max_size_in_mb = None
self.requires_duplicate_detection = None
self.status = None
self.results = dict(
changed=False,
id=None
)
super(AzureRMServiceBusQueue, self).__init__(self.module_arg_spec,
supports_check_mode=True)
def exec_module(self, **kwargs):
for key in list(self.module_arg_spec.keys()):
setattr(self, key, kwargs[key])
changed = False
original = self.get()
if self.state == 'present':
# Create the resource instance
params = dict(
dead_lettering_on_message_expiration=self.dead_lettering_on_message_expiration,
enable_batched_operations=self.enable_batched_operations,
enable_express=self.enable_express,
enable_partitioning=self.enable_partitioning,
forward_dead_lettered_messages_to=self.forward_dead_lettered_messages_to,
forward_to=self.forward_to,
max_delivery_count=self.max_delivery_count,
max_size_in_megabytes=self.max_size_in_mb
)
if self.status:
params['status'] = self.servicebus_models.EntityStatus(str.capitalize(_snake_to_camel(self.status)))
for k, v in duration_spec_map.items():
seconds = getattr(self, v)
if seconds:
params[k] = timedelta(seconds=seconds)
instance = self.servicebus_models.SBQueue(**params)
result = original
if not original:
changed = True
result = instance
else:
result = original
attribute_map = set(self.servicebus_models.SBQueue._attribute_map.keys()) - set(self.servicebus_models.SBQueue._validation.keys())
for attribute in attribute_map:
value = getattr(instance, attribute)
if value and value != getattr(original, attribute):
changed = True
if changed and not self.check_mode:
result = self.create_or_update(instance)
self.results = self.to_dict(result)
elif original:
changed = True
if not self.check_mode:
self.delete()
self.results['deleted'] = True
self.results['changed'] = changed
return self.results
def create_or_update(self, param):
try:
client = self._get_client()
return client.create_or_update(self.resource_group, self.namespace, self.name, param)
except Exception as exc:
self.fail('Error creating or updating queue {0} - {1}'.format(self.name, str(exc.inner_exception) or str(exc)))
def delete(self):
try:
client = self._get_client()
client.delete(self.resource_group, self.namespace, self.name)
return True
except Exception as exc:
self.fail("Error deleting queue {0} - {1}".format(self.name, str(exc)))
def _get_client(self):
return self.servicebus_client.queues
def get(self):
try:
client = self._get_client()
return client.get(self.resource_group, self.namespace, self.name)
except Exception:
return None
def to_dict(self, instance):
result = dict()
attribute_map = self.servicebus_models.SBQueue._attribute_map
for attribute in attribute_map.keys():
value = getattr(instance, attribute)
if not value:
continue
if attribute_map[attribute]['type'] == 'duration':
if is_valid_timedelta(value):
key = duration_spec_map.get(attribute) or attribute
result[key] = int(value.total_seconds())
elif attribute == 'status':
result['status'] = _camel_to_snake(value)
elif isinstance(value, self.servicebus_models.MessageCountDetails):
result[attribute] = value.as_dict()
elif isinstance(value, self.servicebus_models.SBSku):
result[attribute] = value.name.lower()
elif isinstance(value, datetime):
result[attribute] = str(value)
elif isinstance(value, str):
result[attribute] = to_native(value)
elif attribute == 'max_size_in_megabytes':
result['max_size_in_mb'] = value
else:
result[attribute] = value
return result
def is_valid_timedelta(value):
if value == timedelta(10675199, 10085, 477581):
return None
return value
def main():
AzureRMServiceBusQueue()
if __name__ == '__main__':
main()
| alxgu/ansible | lib/ansible/modules/cloud/azure/azure_rm_servicebusqueue.py | Python | gpl-3.0 | 12,400 | 0.002581 |
"""All constants related to the ZHA component."""
import enum
import logging
from typing import List
import bellows.zigbee.application
from zigpy.config import CONF_DEVICE_PATH # noqa: F401 # pylint: disable=unused-import
import zigpy_cc.zigbee.application
import zigpy_deconz.zigbee.application
import zigpy_xbee.zigbee.application
import zigpy_zigate.zigbee.application
import zigpy_znp.zigbee.application
from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR
from homeassistant.components.climate import DOMAIN as CLIMATE
from homeassistant.components.cover import DOMAIN as COVER
from homeassistant.components.device_tracker import DOMAIN as DEVICE_TRACKER
from homeassistant.components.fan import DOMAIN as FAN
from homeassistant.components.light import DOMAIN as LIGHT
from homeassistant.components.lock import DOMAIN as LOCK
from homeassistant.components.number import DOMAIN as NUMBER
from homeassistant.components.sensor import DOMAIN as SENSOR
from homeassistant.components.switch import DOMAIN as SWITCH
from .typing import CALLABLE_T
ATTR_ARGS = "args"
ATTR_ATTRIBUTE = "attribute"
ATTR_ATTRIBUTE_ID = "attribute_id"
ATTR_ATTRIBUTE_NAME = "attribute_name"
ATTR_AVAILABLE = "available"
ATTR_CLUSTER_ID = "cluster_id"
ATTR_CLUSTER_TYPE = "cluster_type"
ATTR_COMMAND_TYPE = "command_type"
ATTR_DEVICE_IEEE = "device_ieee"
ATTR_DEVICE_TYPE = "device_type"
ATTR_ENDPOINTS = "endpoints"
ATTR_ENDPOINT_NAMES = "endpoint_names"
ATTR_ENDPOINT_ID = "endpoint_id"
ATTR_IEEE = "ieee"
ATTR_IN_CLUSTERS = "in_clusters"
ATTR_LAST_SEEN = "last_seen"
ATTR_LEVEL = "level"
ATTR_LQI = "lqi"
ATTR_MANUFACTURER = "manufacturer"
ATTR_MANUFACTURER_CODE = "manufacturer_code"
ATTR_MEMBERS = "members"
ATTR_MODEL = "model"
ATTR_NEIGHBORS = "neighbors"
ATTR_NODE_DESCRIPTOR = "node_descriptor"
ATTR_NWK = "nwk"
ATTR_OUT_CLUSTERS = "out_clusters"
ATTR_POWER_SOURCE = "power_source"
ATTR_PROFILE_ID = "profile_id"
ATTR_QUIRK_APPLIED = "quirk_applied"
ATTR_QUIRK_CLASS = "quirk_class"
ATTR_RSSI = "rssi"
ATTR_SIGNATURE = "signature"
ATTR_TYPE = "type"
ATTR_UNIQUE_ID = "unique_id"
ATTR_VALUE = "value"
ATTR_WARNING_DEVICE_DURATION = "duration"
ATTR_WARNING_DEVICE_MODE = "mode"
ATTR_WARNING_DEVICE_STROBE = "strobe"
ATTR_WARNING_DEVICE_STROBE_DUTY_CYCLE = "duty_cycle"
ATTR_WARNING_DEVICE_STROBE_INTENSITY = "intensity"
BAUD_RATES = [2400, 4800, 9600, 14400, 19200, 38400, 57600, 115200, 128000, 256000]
BINDINGS = "bindings"
CHANNEL_ACCELEROMETER = "accelerometer"
CHANNEL_ANALOG_INPUT = "analog_input"
CHANNEL_ANALOG_OUTPUT = "analog_output"
CHANNEL_ATTRIBUTE = "attribute"
CHANNEL_BASIC = "basic"
CHANNEL_COLOR = "light_color"
CHANNEL_COVER = "window_covering"
CHANNEL_DOORLOCK = "door_lock"
CHANNEL_ELECTRICAL_MEASUREMENT = "electrical_measurement"
CHANNEL_EVENT_RELAY = "event_relay"
CHANNEL_FAN = "fan"
CHANNEL_HUMIDITY = "humidity"
CHANNEL_IAS_WD = "ias_wd"
CHANNEL_IDENTIFY = "identify"
CHANNEL_ILLUMINANCE = "illuminance"
CHANNEL_LEVEL = ATTR_LEVEL
CHANNEL_MULTISTATE_INPUT = "multistate_input"
CHANNEL_OCCUPANCY = "occupancy"
CHANNEL_ON_OFF = "on_off"
CHANNEL_POWER_CONFIGURATION = "power"
CHANNEL_PRESSURE = "pressure"
CHANNEL_SHADE = "shade"
CHANNEL_SMARTENERGY_METERING = "smartenergy_metering"
CHANNEL_TEMPERATURE = "temperature"
CHANNEL_THERMOSTAT = "thermostat"
CHANNEL_ZDO = "zdo"
CHANNEL_ZONE = ZONE = "ias_zone"
CLUSTER_COMMAND_SERVER = "server"
CLUSTER_COMMANDS_CLIENT = "client_commands"
CLUSTER_COMMANDS_SERVER = "server_commands"
CLUSTER_TYPE_IN = "in"
CLUSTER_TYPE_OUT = "out"
PLATFORMS = (
BINARY_SENSOR,
CLIMATE,
COVER,
DEVICE_TRACKER,
FAN,
LIGHT,
LOCK,
NUMBER,
SENSOR,
SWITCH,
)
CONF_BAUDRATE = "baudrate"
CONF_DATABASE = "database_path"
CONF_DEVICE_CONFIG = "device_config"
CONF_ENABLE_QUIRKS = "enable_quirks"
CONF_FLOWCONTROL = "flow_control"
CONF_RADIO_TYPE = "radio_type"
CONF_USB_PATH = "usb_path"
CONF_ZIGPY = "zigpy_config"
DATA_DEVICE_CONFIG = "zha_device_config"
DATA_ZHA = "zha"
DATA_ZHA_CONFIG = "config"
DATA_ZHA_BRIDGE_ID = "zha_bridge_id"
DATA_ZHA_CORE_EVENTS = "zha_core_events"
DATA_ZHA_DISPATCHERS = "zha_dispatchers"
DATA_ZHA_GATEWAY = "zha_gateway"
DATA_ZHA_PLATFORM_LOADED = "platform_loaded"
DEBUG_COMP_BELLOWS = "bellows"
DEBUG_COMP_ZHA = "homeassistant.components.zha"
DEBUG_COMP_ZIGPY = "zigpy"
DEBUG_COMP_ZIGPY_CC = "zigpy_cc"
DEBUG_COMP_ZIGPY_DECONZ = "zigpy_deconz"
DEBUG_COMP_ZIGPY_XBEE = "zigpy_xbee"
DEBUG_COMP_ZIGPY_ZIGATE = "zigpy_zigate"
DEBUG_LEVEL_CURRENT = "current"
DEBUG_LEVEL_ORIGINAL = "original"
DEBUG_LEVELS = {
DEBUG_COMP_BELLOWS: logging.DEBUG,
DEBUG_COMP_ZHA: logging.DEBUG,
DEBUG_COMP_ZIGPY: logging.DEBUG,
DEBUG_COMP_ZIGPY_CC: logging.DEBUG,
DEBUG_COMP_ZIGPY_DECONZ: logging.DEBUG,
DEBUG_COMP_ZIGPY_XBEE: logging.DEBUG,
DEBUG_COMP_ZIGPY_ZIGATE: logging.DEBUG,
}
DEBUG_RELAY_LOGGERS = [DEBUG_COMP_ZHA, DEBUG_COMP_ZIGPY]
DEFAULT_RADIO_TYPE = "ezsp"
DEFAULT_BAUDRATE = 57600
DEFAULT_DATABASE_NAME = "zigbee.db"
DEVICE_PAIRING_STATUS = "pairing_status"
DISCOVERY_KEY = "zha_discovery_info"
DOMAIN = "zha"
GROUP_ID = "group_id"
GROUP_IDS = "group_ids"
GROUP_NAME = "group_name"
MFG_CLUSTER_ID_START = 0xFC00
POWER_MAINS_POWERED = "Mains"
POWER_BATTERY_OR_UNKNOWN = "Battery or Unknown"
class RadioType(enum.Enum):
# pylint: disable=invalid-name
"""Possible options for radio type."""
znp = (
"ZNP = Texas Instruments Z-Stack ZNP protocol: CC253x, CC26x2, CC13x2",
zigpy_znp.zigbee.application.ControllerApplication,
)
ezsp = (
"EZSP = Silicon Labs EmberZNet protocol: Elelabs, HUSBZB-1, Telegesis",
bellows.zigbee.application.ControllerApplication,
)
deconz = (
"deCONZ = dresden elektronik deCONZ protocol: ConBee I/II, RaspBee I/II",
zigpy_deconz.zigbee.application.ControllerApplication,
)
ti_cc = (
"Legacy TI_CC = Texas Instruments Z-Stack ZNP protocol: CC253x, CC26x2, CC13x2",
zigpy_cc.zigbee.application.ControllerApplication,
)
zigate = (
"ZiGate = ZiGate Zigbee radios: PiZiGate, ZiGate USB-TTL, ZiGate WiFi",
zigpy_zigate.zigbee.application.ControllerApplication,
)
xbee = (
"XBee = Digi XBee Zigbee radios: Digi XBee Series 2, 2C, 3",
zigpy_xbee.zigbee.application.ControllerApplication,
)
@classmethod
def list(cls) -> List[str]:
"""Return a list of descriptions."""
return [e.description for e in RadioType]
@classmethod
def get_by_description(cls, description: str) -> str:
"""Get radio by description."""
for radio in cls:
if radio.description == description:
return radio.name
raise ValueError
def __init__(self, description: str, controller_cls: CALLABLE_T):
"""Init instance."""
self._desc = description
self._ctrl_cls = controller_cls
@property
def controller(self) -> CALLABLE_T:
"""Return controller class."""
return self._ctrl_cls
@property
def description(self) -> str:
"""Return radio type description."""
return self._desc
REPORT_CONFIG_MAX_INT = 900
REPORT_CONFIG_MAX_INT_BATTERY_SAVE = 10800
REPORT_CONFIG_MIN_INT = 30
REPORT_CONFIG_MIN_INT_ASAP = 1
REPORT_CONFIG_MIN_INT_IMMEDIATE = 0
REPORT_CONFIG_MIN_INT_OP = 5
REPORT_CONFIG_MIN_INT_BATTERY_SAVE = 3600
REPORT_CONFIG_RPT_CHANGE = 1
REPORT_CONFIG_DEFAULT = (
REPORT_CONFIG_MIN_INT,
REPORT_CONFIG_MAX_INT,
REPORT_CONFIG_RPT_CHANGE,
)
REPORT_CONFIG_ASAP = (
REPORT_CONFIG_MIN_INT_ASAP,
REPORT_CONFIG_MAX_INT,
REPORT_CONFIG_RPT_CHANGE,
)
REPORT_CONFIG_BATTERY_SAVE = (
REPORT_CONFIG_MIN_INT_BATTERY_SAVE,
REPORT_CONFIG_MAX_INT_BATTERY_SAVE,
REPORT_CONFIG_RPT_CHANGE,
)
REPORT_CONFIG_IMMEDIATE = (
REPORT_CONFIG_MIN_INT_IMMEDIATE,
REPORT_CONFIG_MAX_INT,
REPORT_CONFIG_RPT_CHANGE,
)
REPORT_CONFIG_OP = (
REPORT_CONFIG_MIN_INT_OP,
REPORT_CONFIG_MAX_INT,
REPORT_CONFIG_RPT_CHANGE,
)
SENSOR_ACCELERATION = "acceleration"
SENSOR_BATTERY = "battery"
SENSOR_ELECTRICAL_MEASUREMENT = CHANNEL_ELECTRICAL_MEASUREMENT
SENSOR_GENERIC = "generic"
SENSOR_HUMIDITY = CHANNEL_HUMIDITY
SENSOR_ILLUMINANCE = CHANNEL_ILLUMINANCE
SENSOR_METERING = "metering"
SENSOR_OCCUPANCY = CHANNEL_OCCUPANCY
SENSOR_OPENING = "opening"
SENSOR_PRESSURE = CHANNEL_PRESSURE
SENSOR_TEMPERATURE = CHANNEL_TEMPERATURE
SENSOR_TYPE = "sensor_type"
SIGNAL_ADD_ENTITIES = "zha_add_new_entities"
SIGNAL_ATTR_UPDATED = "attribute_updated"
SIGNAL_AVAILABLE = "available"
SIGNAL_MOVE_LEVEL = "move_level"
SIGNAL_REMOVE = "remove"
SIGNAL_SET_LEVEL = "set_level"
SIGNAL_STATE_ATTR = "update_state_attribute"
SIGNAL_UPDATE_DEVICE = "{}_zha_update_device"
SIGNAL_GROUP_ENTITY_REMOVED = "group_entity_removed"
SIGNAL_GROUP_MEMBERSHIP_CHANGE = "group_membership_change"
UNKNOWN = "unknown"
UNKNOWN_MANUFACTURER = "unk_manufacturer"
UNKNOWN_MODEL = "unk_model"
WARNING_DEVICE_MODE_STOP = 0
WARNING_DEVICE_MODE_BURGLAR = 1
WARNING_DEVICE_MODE_FIRE = 2
WARNING_DEVICE_MODE_EMERGENCY = 3
WARNING_DEVICE_MODE_POLICE_PANIC = 4
WARNING_DEVICE_MODE_FIRE_PANIC = 5
WARNING_DEVICE_MODE_EMERGENCY_PANIC = 6
WARNING_DEVICE_STROBE_NO = 0
WARNING_DEVICE_STROBE_YES = 1
WARNING_DEVICE_SOUND_LOW = 0
WARNING_DEVICE_SOUND_MEDIUM = 1
WARNING_DEVICE_SOUND_HIGH = 2
WARNING_DEVICE_SOUND_VERY_HIGH = 3
WARNING_DEVICE_STROBE_LOW = 0x00
WARNING_DEVICE_STROBE_MEDIUM = 0x01
WARNING_DEVICE_STROBE_HIGH = 0x02
WARNING_DEVICE_STROBE_VERY_HIGH = 0x03
WARNING_DEVICE_SQUAWK_MODE_ARMED = 0
WARNING_DEVICE_SQUAWK_MODE_DISARMED = 1
ZHA_DISCOVERY_NEW = "zha_discovery_new_{}"
ZHA_GW_MSG = "zha_gateway_message"
ZHA_GW_MSG_DEVICE_FULL_INIT = "device_fully_initialized"
ZHA_GW_MSG_DEVICE_INFO = "device_info"
ZHA_GW_MSG_DEVICE_JOINED = "device_joined"
ZHA_GW_MSG_DEVICE_REMOVED = "device_removed"
ZHA_GW_MSG_GROUP_ADDED = "group_added"
ZHA_GW_MSG_GROUP_INFO = "group_info"
ZHA_GW_MSG_GROUP_MEMBER_ADDED = "group_member_added"
ZHA_GW_MSG_GROUP_MEMBER_REMOVED = "group_member_removed"
ZHA_GW_MSG_GROUP_REMOVED = "group_removed"
ZHA_GW_MSG_LOG_ENTRY = "log_entry"
ZHA_GW_MSG_LOG_OUTPUT = "log_output"
ZHA_GW_MSG_RAW_INIT = "raw_device_initialized"
EFFECT_BLINK = 0x00
EFFECT_BREATHE = 0x01
EFFECT_OKAY = 0x02
EFFECT_DEFAULT_VARIANT = 0x00
| partofthething/home-assistant | homeassistant/components/zha/core/const.py | Python | apache-2.0 | 10,279 | 0.000292 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated from FHIR 4.0.0-a53ec6ee1b (http://hl7.org/fhir/StructureDefinition/Composition) on 2019-05-07.
# 2019, SMART Health IT.
from . import domainresource
class Composition(domainresource.DomainResource):
""" A set of resources composed into a single coherent clinical statement with
clinical attestation.
A set of healthcare-related information that is assembled together into a
single logical package that provides a single coherent statement of
meaning, establishes its own context and that has clinical attestation with
regard to who is making the statement. A Composition defines the structure
and narrative content necessary for a document. However, a Composition
alone does not constitute a document. Rather, the Composition must be the
first entry in a Bundle where Bundle.type=document, and any other resources
referenced from Composition must be included as subsequent entries in the
Bundle (for example Patient, Practitioner, Encounter, etc.).
"""
resource_type = "Composition"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.attester = None
""" Attests to accuracy of composition.
List of `CompositionAttester` items (represented as `dict` in JSON). """
self.author = None
""" Who and/or what authored the composition.
List of `FHIRReference` items (represented as `dict` in JSON). """
self.category = None
""" Categorization of Composition.
List of `CodeableConcept` items (represented as `dict` in JSON). """
self.confidentiality = None
""" As defined by affinity domain.
Type `str`. """
self.custodian = None
""" Organization which maintains the composition.
Type `FHIRReference` (represented as `dict` in JSON). """
self.date = None
""" Composition editing time.
Type `FHIRDate` (represented as `str` in JSON). """
self.encounter = None
""" Context of the Composition.
Type `FHIRReference` (represented as `dict` in JSON). """
self.event = None
""" The clinical service(s) being documented.
List of `CompositionEvent` items (represented as `dict` in JSON). """
self.identifier = None
""" Version-independent identifier for the Composition.
Type `Identifier` (represented as `dict` in JSON). """
self.relatesTo = None
""" Relationships to other compositions/documents.
List of `CompositionRelatesTo` items (represented as `dict` in JSON). """
self.section = None
""" Composition is broken into sections.
List of `CompositionSection` items (represented as `dict` in JSON). """
self.status = None
""" preliminary | final | amended | entered-in-error.
Type `str`. """
self.subject = None
""" Who and/or what the composition is about.
Type `FHIRReference` (represented as `dict` in JSON). """
self.title = None
""" Human Readable name/title.
Type `str`. """
self.type = None
""" Kind of composition (LOINC if possible).
Type `CodeableConcept` (represented as `dict` in JSON). """
super(Composition, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(Composition, self).elementProperties()
js.extend([
("attester", "attester", CompositionAttester, True, None, False),
("author", "author", fhirreference.FHIRReference, True, None, True),
("category", "category", codeableconcept.CodeableConcept, True, None, False),
("confidentiality", "confidentiality", str, False, None, False),
("custodian", "custodian", fhirreference.FHIRReference, False, None, False),
("date", "date", fhirdate.FHIRDate, False, None, True),
("encounter", "encounter", fhirreference.FHIRReference, False, None, False),
("event", "event", CompositionEvent, True, None, False),
("identifier", "identifier", identifier.Identifier, False, None, False),
("relatesTo", "relatesTo", CompositionRelatesTo, True, None, False),
("section", "section", CompositionSection, True, None, False),
("status", "status", str, False, None, True),
("subject", "subject", fhirreference.FHIRReference, False, None, False),
("title", "title", str, False, None, True),
("type", "type", codeableconcept.CodeableConcept, False, None, True),
])
return js
from . import backboneelement
class CompositionAttester(backboneelement.BackboneElement):
""" Attests to accuracy of composition.
A participant who has attested to the accuracy of the composition/document.
"""
resource_type = "CompositionAttester"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.mode = None
""" personal | professional | legal | official.
Type `str`. """
self.party = None
""" Who attested the composition.
Type `FHIRReference` (represented as `dict` in JSON). """
self.time = None
""" When the composition was attested.
Type `FHIRDate` (represented as `str` in JSON). """
super(CompositionAttester, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(CompositionAttester, self).elementProperties()
js.extend([
("mode", "mode", str, False, None, True),
("party", "party", fhirreference.FHIRReference, False, None, False),
("time", "time", fhirdate.FHIRDate, False, None, False),
])
return js
class CompositionEvent(backboneelement.BackboneElement):
""" The clinical service(s) being documented.
The clinical service, such as a colonoscopy or an appendectomy, being
documented.
"""
resource_type = "CompositionEvent"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.code = None
""" Code(s) that apply to the event being documented.
List of `CodeableConcept` items (represented as `dict` in JSON). """
self.detail = None
""" The event(s) being documented.
List of `FHIRReference` items (represented as `dict` in JSON). """
self.period = None
""" The period covered by the documentation.
Type `Period` (represented as `dict` in JSON). """
super(CompositionEvent, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(CompositionEvent, self).elementProperties()
js.extend([
("code", "code", codeableconcept.CodeableConcept, True, None, False),
("detail", "detail", fhirreference.FHIRReference, True, None, False),
("period", "period", period.Period, False, None, False),
])
return js
class CompositionRelatesTo(backboneelement.BackboneElement):
""" Relationships to other compositions/documents.
Relationships that this composition has with other compositions or
documents that already exist.
"""
resource_type = "CompositionRelatesTo"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.code = None
""" replaces | transforms | signs | appends.
Type `str`. """
self.targetIdentifier = None
""" Target of the relationship.
Type `Identifier` (represented as `dict` in JSON). """
self.targetReference = None
""" Target of the relationship.
Type `FHIRReference` (represented as `dict` in JSON). """
super(CompositionRelatesTo, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(CompositionRelatesTo, self).elementProperties()
js.extend([
("code", "code", str, False, None, True),
("targetIdentifier", "targetIdentifier", identifier.Identifier, False, "target", True),
("targetReference", "targetReference", fhirreference.FHIRReference, False, "target", True),
])
return js
class CompositionSection(backboneelement.BackboneElement):
""" Composition is broken into sections.
The root of the sections that make up the composition.
"""
resource_type = "CompositionSection"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.author = None
""" Who and/or what authored the section.
List of `FHIRReference` items (represented as `dict` in JSON). """
self.code = None
""" Classification of section (recommended).
Type `CodeableConcept` (represented as `dict` in JSON). """
self.emptyReason = None
""" Why the section is empty.
Type `CodeableConcept` (represented as `dict` in JSON). """
self.entry = None
""" A reference to data that supports this section.
List of `FHIRReference` items (represented as `dict` in JSON). """
self.focus = None
""" Who/what the section is about, when it is not about the subject of
composition.
Type `FHIRReference` (represented as `dict` in JSON). """
self.mode = None
""" working | snapshot | changes.
Type `str`. """
self.orderedBy = None
""" Order of section entries.
Type `CodeableConcept` (represented as `dict` in JSON). """
self.section = None
""" Nested Section.
List of `CompositionSection` items (represented as `dict` in JSON). """
self.text = None
""" Text summary of the section, for human interpretation.
Type `Narrative` (represented as `dict` in JSON). """
self.title = None
""" Label for section (e.g. for ToC).
Type `str`. """
super(CompositionSection, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(CompositionSection, self).elementProperties()
js.extend([
("author", "author", fhirreference.FHIRReference, True, None, False),
("code", "code", codeableconcept.CodeableConcept, False, None, False),
("emptyReason", "emptyReason", codeableconcept.CodeableConcept, False, None, False),
("entry", "entry", fhirreference.FHIRReference, True, None, False),
("focus", "focus", fhirreference.FHIRReference, False, None, False),
("mode", "mode", str, False, None, False),
("orderedBy", "orderedBy", codeableconcept.CodeableConcept, False, None, False),
("section", "section", CompositionSection, True, None, False),
("text", "text", narrative.Narrative, False, None, False),
("title", "title", str, False, None, False),
])
return js
import sys
try:
from . import codeableconcept
except ImportError:
codeableconcept = sys.modules[__package__ + '.codeableconcept']
try:
from . import fhirdate
except ImportError:
fhirdate = sys.modules[__package__ + '.fhirdate']
try:
from . import fhirreference
except ImportError:
fhirreference = sys.modules[__package__ + '.fhirreference']
try:
from . import identifier
except ImportError:
identifier = sys.modules[__package__ + '.identifier']
try:
from . import narrative
except ImportError:
narrative = sys.modules[__package__ + '.narrative']
try:
from . import period
except ImportError:
period = sys.modules[__package__ + '.period']
| all-of-us/raw-data-repository | rdr_service/lib_fhir/fhirclient_4_0_0/models/composition.py | Python | bsd-3-clause | 13,565 | 0.007667 |
#
# Copyright 2012 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
import tempfile
import os
import storage.fileUtils as fileUtils
import testValidation
from testrunner import VdsmTestCase as TestCaseBase
class DirectFileTests(TestCaseBase):
@classmethod
def getConfigTemplate(cls):
return {}
def testRead(self):
data = """Vestibulum. Libero leo nostra, pede nunc eu. Pellentesque
platea lacus morbi nisl montes ve. Ac. A, consectetuer erat, justo eu.
Elementum et, phasellus fames et rutrum donec magnis eu bibendum. Arcu,
ante aliquam ipsum ut facilisis ad."""
srcFd, srcPath = tempfile.mkstemp()
f = os.fdopen(srcFd, "wb")
f.write(data)
f.flush()
f.close()
with fileUtils.open_ex(srcPath, "dr") as f:
self.assertEquals(f.read(), data)
os.unlink(srcPath)
def testSeekRead(self):
data = """
Habitasse ipsum at fusce litora metus, placerat dui purus aenean ante,
ve. Pede hymenaeos ut primis cum, rhoncus, lectus, nunc. Vestibulum
curabitur vitae etiam magna auctor velit, mi tempus vivamus orci eros.
Pellentesque curabitur risus fermentum eget. Elementum curae, donec
nisl egestas ve, ut odio eu nunc elit felis primis id. Ridiculus metus
morbi nulla erat, amet nisi. Amet ligula nisi, id penatibus risus in.
Purus velit duis. Aenean eget, pellentesque eu rhoncus arcu et
consectetuer laoreet, augue nisi dictum lacinia urna. Fermentum
torquent. Ut interdum vivamus duis. Felis consequat nec pede. Orci
sollicitudin parturient orci felis. Enim, diam velit sapien
condimentum fames semper nibh. Integer at, egestas pede consectetuer
ac augue pharetra dolor non placerat quisque id cursus ultricies.
Ligula mi senectus sit. Habitasse. Integer sollicitudin dapibus cum
quam.
"""
self.assertTrue(len(data) > 512)
srcFd, srcPath = tempfile.mkstemp()
f = os.fdopen(srcFd, "wb")
f.write(data)
f.flush()
f.close()
with fileUtils.open_ex(srcPath, "dr") as f:
f.seek(512)
self.assertEquals(f.read(), data[512:])
os.unlink(srcPath)
def testWrite(self):
data = """In ut non platea egestas, quisque magnis nunc nostra ac etiam
suscipit nec integer sociosqu. Fermentum. Ante orci luctus, ipsum
ullamcorper enim arcu class neque inceptos class. Ut, sagittis
torquent, commodo facilisi."""
srcFd, srcPath = tempfile.mkstemp()
os.close(srcFd)
with fileUtils.open_ex(srcPath, "dw") as f:
f.write(data)
with fileUtils.open_ex(srcPath, "r") as f:
self.assertEquals(f.read(len(data)), data)
os.unlink(srcPath)
def testSmallWrites(self):
data = """
Aliquet habitasse tellus. Fringilla faucibus tortor parturient
consectetuer sodales, venenatis platea habitant. Hendrerit nostra nunc
odio. Primis porttitor consequat enim ridiculus. Taciti nascetur,
nibh, convallis sit, cum dis mi. Nonummy justo odio cursus, ac hac
curabitur nibh. Tellus. Montes, ut taciti orci ridiculus facilisis
nunc. Donec. Risus adipiscing habitant donec vehicula non vitae class,
porta vitae senectus. Nascetur felis laoreet integer, tortor ligula.
Pellentesque vestibulum cras nostra. Ut sollicitudin posuere, per
accumsan curabitur id, nisi fermentum vel, eget netus tristique per,
donec, curabitur senectus ut fusce. A. Mauris fringilla senectus et
eni facilisis magna inceptos eu, cursus habitant fringilla neque.
Nibh. Elit facilisis sed, elit, nostra ve torquent dictumst, aenean
sapien quam, habitasse in. Eu tempus aptent, diam, nisi risus
pharetra, ac, condimentum orci, consequat mollis. Cras lacus augue
ultrices proin fermentum nibh sed urna. Ve ipsum ultrices curae,
feugiat faucibus proin et elementum vivamus, lectus. Torquent. Tempus
facilisi. Cras suspendisse euismod consectetuer ornare nostra. Fusce
amet cum amet diam.
"""
self.assertTrue(len(data) > 512)
srcFd, srcPath = tempfile.mkstemp()
os.close(srcFd)
with fileUtils.open_ex(srcPath, "dw") as f:
f.write(data[:512])
f.write(data[512:])
with fileUtils.open_ex(srcPath, "r") as f:
self.assertEquals(f.read(len(data)), data)
os.unlink(srcPath)
def testUpdateRead(self):
data = """
Aliquet. Aliquam eni ac nullam iaculis cras ante, adipiscing. Enim
eget egestas pretium. Ultricies. Urna cubilia in, hac. Curabitur.
Nibh. Purus ridiculus natoque sed id. Feugiat lacus quam, arcu
maecenas nec egestas. Hendrerit duis nunc eget dis lacus porttitor per
sodales class diam condimentum quisque condimentum nisi ligula.
Dapibus blandit arcu nam non ac feugiat diam, dictumst. Ante eget
fames eu penatibus in, porta semper accumsan adipiscing tellus in
sagittis. Est parturient parturient mi fermentum commodo, per
fermentum. Quis duis velit at quam risus mi. Facilisi id fames.
Turpis, conubia rhoncus. Id. Elit eni tellus gravida, ut, erat morbi.
Euismod, enim a ante vestibulum nibh. Curae curae primis vulputate
adipiscing arcu ipsum suspendisse quam hymenaeos primis accumsan
vestibulum.
"""
self.assertTrue(len(data) > 512)
srcFd, srcPath = tempfile.mkstemp()
os.close(srcFd)
with fileUtils.open_ex(srcPath, "wd") as f:
f.write(data[:512])
with fileUtils.open_ex(srcPath, "r+d") as f:
f.seek(512)
f.write(data[512:])
with fileUtils.open_ex(srcPath, "r") as f:
self.assertEquals(f.read(len(data)), data)
os.unlink(srcPath)
class ChownTests(TestCaseBase):
@testValidation.ValidateRunningAsRoot
def test(self):
targetId = 666
srcFd, srcPath = tempfile.mkstemp()
os.close(srcFd)
fileUtils.chown(srcPath, targetId, targetId)
stat = os.stat(srcPath)
self.assertTrue(stat.st_uid == stat.st_gid == targetId)
os.unlink(srcPath)
@testValidation.ValidateRunningAsRoot
def testNames(self):
# I convert to some id because I have no
# idea what users are defined and what
# there IDs are apart from root
tmpId = 666
srcFd, srcPath = tempfile.mkstemp()
os.close(srcFd)
fileUtils.chown(srcPath, tmpId, tmpId)
stat = os.stat(srcPath)
self.assertTrue(stat.st_uid == stat.st_gid == tmpId)
fileUtils.chown(srcPath, "root", "root")
stat = os.stat(srcPath)
self.assertTrue(stat.st_uid == stat.st_gid == 0)
class CopyUserModeToGroupTests(TestCaseBase):
MODE_MASK = 0777
# format: initialMode, expectedMode
modesList = [
(0770, 0770), (0700, 0770), (0750, 0770), (0650, 0660),
]
def testCopyUserModeToGroup(self):
fd, path = tempfile.mkstemp()
try:
os.close(fd)
for initialMode, expectedMode in self.modesList:
os.chmod(path, initialMode)
fileUtils.copyUserModeToGroup(path)
self.assertEquals(os.stat(path).st_mode & self.MODE_MASK,
expectedMode)
finally:
os.unlink(path)
| edwardbadboy/vdsm-ubuntu | tests/fileUtilTests.py | Python | gpl-2.0 | 8,296 | 0 |
from dbt.contracts.graph.parsed import (
HasTestMetadata,
ParsedNode,
ParsedAnalysisNode,
ParsedDataTestNode,
ParsedHookNode,
ParsedModelNode,
ParsedResource,
ParsedRPCNode,
ParsedSchemaTestNode,
ParsedSeedNode,
ParsedSnapshotNode,
ParsedSourceDefinition,
SeedConfig,
TestConfig,
)
from dbt.node_types import NodeType
from dbt.contracts.util import Replaceable
from dbt.exceptions import RuntimeException
from hologram import JsonSchemaMixin
from dataclasses import dataclass, field
import sqlparse # type: ignore
from typing import Optional, List, Union, Dict, Type
@dataclass
class InjectedCTE(JsonSchemaMixin, Replaceable):
id: str
sql: str
# for some frustrating reason, we can't subclass from ParsedNode directly,
# or typing.Union will flatten CompiledNode+ParsedNode into just ParsedNode.
# TODO: understand that issue and come up with some way for these two to share
# logic
@dataclass
class CompiledNode(ParsedNode):
compiled: bool = False
compiled_sql: Optional[str] = None
extra_ctes_injected: bool = False
extra_ctes: List[InjectedCTE] = field(default_factory=list)
injected_sql: Optional[str] = None
def prepend_ctes(self, prepended_ctes: List[InjectedCTE]):
self.extra_ctes_injected = True
self.extra_ctes = prepended_ctes
if self.compiled_sql is None:
raise RuntimeException(
'Cannot prepend ctes to an unparsed node', self
)
self.injected_sql = _inject_ctes_into_sql(
self.compiled_sql,
prepended_ctes,
)
self.validate(self.to_dict())
def set_cte(self, cte_id: str, sql: str):
"""This is the equivalent of what self.extra_ctes[cte_id] = sql would
do if extra_ctes were an OrderedDict
"""
for cte in self.extra_ctes:
if cte.id == cte_id:
cte.sql = sql
break
else:
self.extra_ctes.append(InjectedCTE(id=cte_id, sql=sql))
@dataclass
class CompiledAnalysisNode(CompiledNode):
resource_type: NodeType = field(metadata={'restrict': [NodeType.Analysis]})
@dataclass
class CompiledHookNode(CompiledNode):
resource_type: NodeType = field(
metadata={'restrict': [NodeType.Operation]}
)
index: Optional[int] = None
@dataclass
class CompiledModelNode(CompiledNode):
resource_type: NodeType = field(metadata={'restrict': [NodeType.Model]})
@dataclass
class CompiledRPCNode(CompiledNode):
resource_type: NodeType = field(metadata={'restrict': [NodeType.RPCCall]})
@dataclass
class CompiledSeedNode(CompiledNode):
resource_type: NodeType = field(metadata={'restrict': [NodeType.Seed]})
config: SeedConfig = field(default_factory=SeedConfig)
@property
def empty(self):
""" Seeds are never empty"""
return False
@dataclass
class CompiledSnapshotNode(CompiledNode):
resource_type: NodeType = field(metadata={'restrict': [NodeType.Snapshot]})
@dataclass
class CompiledDataTestNode(CompiledNode):
resource_type: NodeType = field(metadata={'restrict': [NodeType.Test]})
config: TestConfig = field(default_factory=TestConfig)
@dataclass
class CompiledSchemaTestNode(CompiledNode, HasTestMetadata):
resource_type: NodeType = field(metadata={'restrict': [NodeType.Test]})
column_name: Optional[str] = None
config: TestConfig = field(default_factory=TestConfig)
CompiledTestNode = Union[CompiledDataTestNode, CompiledSchemaTestNode]
def _inject_ctes_into_sql(sql: str, ctes: List[InjectedCTE]) -> str:
"""
`ctes` is a list of InjectedCTEs like:
[
InjectedCTE(
id="cte_id_1",
sql="__dbt__CTE__ephemeral as (select * from table)",
),
InjectedCTE(
id="cte_id_2",
sql="__dbt__CTE__events as (select id, type from events)",
),
]
Given `sql` like:
"with internal_cte as (select * from sessions)
select * from internal_cte"
This will spit out:
"with __dbt__CTE__ephemeral as (select * from table),
__dbt__CTE__events as (select id, type from events),
with internal_cte as (select * from sessions)
select * from internal_cte"
(Whitespace enhanced for readability.)
"""
if len(ctes) == 0:
return sql
parsed_stmts = sqlparse.parse(sql)
parsed = parsed_stmts[0]
with_stmt = None
for token in parsed.tokens:
if token.is_keyword and token.normalized == 'WITH':
with_stmt = token
break
if with_stmt is None:
# no with stmt, add one, and inject CTEs right at the beginning
first_token = parsed.token_first()
with_stmt = sqlparse.sql.Token(sqlparse.tokens.Keyword, 'with')
parsed.insert_before(first_token, with_stmt)
else:
# stmt exists, add a comma (which will come after injected CTEs)
trailing_comma = sqlparse.sql.Token(sqlparse.tokens.Punctuation, ',')
parsed.insert_after(with_stmt, trailing_comma)
token = sqlparse.sql.Token(
sqlparse.tokens.Keyword,
", ".join(c.sql for c in ctes)
)
parsed.insert_after(with_stmt, token)
return str(parsed)
PARSED_TYPES: Dict[Type[CompiledNode], Type[ParsedResource]] = {
CompiledAnalysisNode: ParsedAnalysisNode,
CompiledModelNode: ParsedModelNode,
CompiledHookNode: ParsedHookNode,
CompiledRPCNode: ParsedRPCNode,
CompiledSeedNode: ParsedSeedNode,
CompiledSnapshotNode: ParsedSnapshotNode,
CompiledDataTestNode: ParsedDataTestNode,
CompiledSchemaTestNode: ParsedSchemaTestNode,
}
COMPILED_TYPES: Dict[Type[ParsedResource], Type[CompiledNode]] = {
ParsedAnalysisNode: CompiledAnalysisNode,
ParsedModelNode: CompiledModelNode,
ParsedHookNode: CompiledHookNode,
ParsedRPCNode: CompiledRPCNode,
ParsedSeedNode: CompiledSeedNode,
ParsedSnapshotNode: CompiledSnapshotNode,
ParsedDataTestNode: CompiledDataTestNode,
ParsedSchemaTestNode: CompiledSchemaTestNode,
}
# for some types, the compiled type is the parsed type, so make this easy
CompiledType = Union[Type[CompiledNode], Type[ParsedResource]]
CompiledResource = Union[ParsedResource, CompiledNode]
def compiled_type_for(parsed: ParsedNode) -> CompiledType:
if type(parsed) in COMPILED_TYPES:
return COMPILED_TYPES[type(parsed)]
else:
return type(parsed)
def parsed_instance_for(compiled: CompiledNode) -> ParsedResource:
cls = PARSED_TYPES.get(type(compiled))
if cls is None:
# how???
raise ValueError('invalid resource_type: {}'
.format(compiled.resource_type))
# validate=False to allow extra keys from compiling
return cls.from_dict(compiled.to_dict(), validate=False)
NonSourceCompiledNode = Union[
CompiledAnalysisNode,
CompiledDataTestNode,
CompiledModelNode,
CompiledHookNode,
CompiledRPCNode,
CompiledSchemaTestNode,
CompiledSeedNode,
CompiledSnapshotNode,
]
NonSourceParsedNode = Union[
ParsedAnalysisNode,
ParsedDataTestNode,
ParsedHookNode,
ParsedModelNode,
ParsedRPCNode,
ParsedSchemaTestNode,
ParsedSeedNode,
ParsedSnapshotNode,
]
# This is anything that can be in manifest.nodes.
NonSourceNode = Union[
NonSourceCompiledNode,
NonSourceParsedNode,
]
# We allow either parsed or compiled nodes, or parsed sources, as some
# 'compile()' calls in the runner actually just return the original parsed
# node they were given.
CompileResultNode = Union[
NonSourceNode,
ParsedSourceDefinition,
]
| fishtown-analytics/dbt | core/dbt/contracts/graph/compiled.py | Python | apache-2.0 | 7,700 | 0 |
#!/usr/bin/python
import socket
import os
import time
import shutil
import sys
import re
import datetime
import argparse
# NCMD Libs
import ncmd_print as np
from ncmd_print import MessageLevel as MessageLevel
import ncmd_commands as ncmds
import ncmd_fileops as nfops
MAX_TRANSFER_BYTES=2048
QUIT_CMD = "quit now"
HOST = ""
PORT = 10123
ROOT_DIR_PATH = "/share/CACHEDEV1_DATA"
# Set up the server socket
def bindServerSocket(port):
server_sock = None
try:
server_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server_sock.bind((HOST, port))
np.print_msg("Successfully bound server socket to port:{0}".format(PORT), MessageLevel.INFO)
except Exception as err:
np.print_msg("Failed to bind server socket to port:{0}".format(PORT), MessageLevel.ERROR)
server_sock = None
return server_sock
# Accept incoming socket connections
def acceptConnection(server_sock):
server_sock.listen(1)
conn, addr = server_sock.accept()
return (conn, addr)
# Validate a path against the server mount
def validatePath(path, server_mnt):
result = False
# Paths beginning with the server mount are considered 'valid'
if path.find(server_mnt) == 0:
result = True
return result
# Validate source / destination paths
def validatePaths(paths, server_mnt):
result = True
for path in paths:
if not validatePath(path, server_mnt):
result = False
break
return result
# Deal with generating the appropriate response for a command
def processResponse(ncmd, success):
nresp = ''
if ncmds.getCommandBlock(ncmd):
if success:
nresp = ncmds.genCmdSuccessResp(ncmd)
else:
nresp = ncmds.genCmdFailureResp(ncmd)
else:
pass # No response for non-blocking
return nresp
# Handle the current command string -- the actual file operations occur here
def processCmd(ncmd, args):
quit = False
cmd_success = True
np.print_msg("Received command: {0}".format(ncmd), MessageLevel.INFO)
dest = ncmds.getCommandDest(ncmd)
srcs = ncmds.getCommandSrcs(ncmd)
if ncmds.isQuitSequence(ncmd):
quit = True
else:
if args.validate_server_mount:
srcs_valid = validatePaths(srcs, args.validate_server_mount)
dest_valid = validatePath(dest, args.validate_server_mount)
cmd_success = srcs_valid and dest_valid
# Only try and conduct file operations when validation is disabled,
# or if validation is enabled, and it passes.
if cmd_success:
if ncmds.isMove(ncmd):
for src in srcs:
if not nfops.move(src, dest):
cmd_success = False
elif ncmds.isCopy(ncmd):
for src in srcs:
if not nfops.copy(src, dest):
cmd_success = False
elif ncmds.isRemove(ncmd):
# The naming here isn't ideal, but this code gets the job done!
for src in srcs:
if not nfops.remove(src):
cmd_success = False
if not nfops.remove(dest):
cmd_success = False
return quit, cmd_success
# Deal with the current connection, getting, sending, and closing
def processConnection(conn, args):
ncmd = conn.recv(ncmds.MAX_CMD_SIZE)
quit, cmd_success = processCmd(ncmd, args)
resp = processResponse(ncmd, cmd_success)
if len(resp) > 0:
try:
conn.send(resp)
except Exception as err:
np.print_msg(msg, MessageLevel.ERROR)
conn.close()
return quit
def getArgs():
parser = argparse.ArgumentParser(description='Copy, move, remove quickly on a remotely mounted folder.')
parser.add_argument('--port', type=int, help='Specify a custom port.')
parser.add_argument('--validate_server_mount', type=str, help='Specify a mount on the server to validate incoming paths against.')
return parser.parse_args()
def main():
# Get the port
args = getArgs()
server_port = PORT
if args.port:
server_port = args.port
# Bind the sever socket
server_sock = bindServerSocket(server_port)
if server_sock:
while True:
conn = None
try:
conn, addr = acceptConnection(server_sock)
np.print_msg("Successfully connected to client: {0}:{1}".format(addr[0], PORT), MessageLevel.INFO)
except socket.error as msg:
np.print_msg(msg, MessageLevel.ERROR)
conn = None
if conn:
quit = processConnection(conn, args)
if quit:
np.print_msg("Server shutdown requested @ {0}...".format(datetime.datetime.now()), MessageLevel.INFO)
break
# Keep this at the end for safety!
if server_sock:
server_sock.close()
if __name__ == '__main__':
main()
| nathankrueger/ncmd | ncmd_server.py | Python | gpl-2.0 | 4,429 | 0.031836 |
import knuckle
class GameState(knuckle.State):
def on_keyup(self, e):
pass
def on_keydown(self, e):
if e == 'Escape':
self.window.pop_state()
def on_draw(self):
self.window.clear()
self.batch.draw()
self.window.flip()
def __str__(self):
return 'GameState()'
| chris-statzer/knuckle-python | game/game_state.py | Python | mit | 339 | 0 |
# Copyright (c) 2016, the GPyOpt Authors
# Licensed under the BSD 3-clause license (see LICENSE.txt)
from ...models import GPModel
import numpy as np
class CostModel(object):
"""
Class to handle the cost of evaluating the function.
param cost_withGradients: function that returns the cost of evaluating the function and its gradient. By default
no cost is used. Options are:
- cost_withGradients is some pre-defined cost function. Should return numpy array as outputs.
- cost_withGradients = 'evaluation_time'.
.. Note:: if cost_withGradients = 'evaluation time' the evaluation time of the function is used to model a GP whose
mean is used as cost.
"""
def __init__(self, cost_withGradients):
super(CostModel, self).__init__()
self.cost_type = cost_withGradients
# --- Set-up evaluation cost
if self.cost_type is None:
self.cost_withGradients = constant_cost_withGradients
self.cost_type = 'Constant cost'
elif self.cost_type == 'evaluation_time':
self.cost_model = GPModel()
self.cost_withGradients = self._cost_gp_withGradients
self.num_updates = 0
else:
self.cost_withGradients = cost_withGradients
self.cost_type = 'User defined cost'
def _cost_gp(self,x):
"""
Predicts the time cost of evaluating the function at x.
"""
m, _, _, _ = self.cost_model.predict_withGradients(x)
return np.exp(m)
def _cost_gp_withGradients(self,x):
"""
Predicts the time cost and its gradient of evaluating the function at x.
"""
m, _, dmdx, _= self.cost_model.predict_withGradients(x)
return np.exp(m), np.exp(m)*dmdx
def update_cost_model(self, x, cost_x):
"""
Updates the GP used to handle the cost.
param x: input of the GP for the cost model.
param x_cost: values of the time cost at the input locations.
"""
if self.cost_type == 'evaluation_time':
cost_evals = np.log(np.atleast_2d(np.asarray(cost_x)).T)
if self.num_updates == 0:
X_all = x
costs_all = cost_evals
else:
X_all = np.vstack((self.cost_model.model.X,x))
costs_all = np.vstack((self.cost_model.model.Y,cost_evals))
self.num_updates += 1
self.cost_model.updateModel(X_all, costs_all, None, None)
def constant_cost_withGradients(x):
"""
Constant cost function used by default: cost = 1, d_cost = 0.
"""
return np.ones(x.shape[0])[:,None], np.zeros(x.shape)
| SheffieldML/GPyOpt | GPyOpt/core/task/cost.py | Python | bsd-3-clause | 2,686 | 0.00484 |
# coding=utf-8
#
# Copyright (c) 2011-2015 First Flamingo Enterprise B.V.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# markup.py
# firstflamingo/python_utilities
#
# Created by Berend Schotanus on 23-Nov-2011.
#
from google.appengine.api import users
import logging
import xml.sax
# ====== Generic XML Classes ===========================================================================================
class XMLElement:
OPEN_TEMPLATE = '<%s>'
CLOSE_TEMPLATE = '</%s>'
SELFCLOSING_TEMPLATE = '<%s/>'
ATTRIBUTE_TEMPLATE = '%s="%s"'
def __init__(self, type, attributes=None, content=None):
self.type = type
if attributes == None:
self.attributes = {}
else:
self.attributes = attributes
if content == None:
self.content = []
else:
self.content = content
def set_attribute(self, key, value):
self.attributes[key] = value
def set_time(self, timeStamp):
self.set_attribute('time', rfc3339String(timeStamp))
def add(self, newContent):
self.content.append(newContent)
def write(self, depth=0, lf=False):
if depth > 10: raise Exception('exceeds max recurse depth %d' % depth)
list = [self.type]
for key, value in self.attributes.items():
list.append(XMLElement.ATTRIBUTE_TEMPLATE % (key, value))
attributed_type = ' '.join(list)
list = []
if self.content:
list.append(XMLElement.OPEN_TEMPLATE % attributed_type)
for element in self.content:
try:
theString = element.write(depth + 1, lf=lf)
list.append(theString)
except AttributeError:
list.append(unicode(element))
list.append(XMLElement.CLOSE_TEMPLATE % self.type)
if lf:
joinString = '\n' + depth * ' '
else:
joinString = ''
return joinString.join(list)
else:
return XMLElement.SELFCLOSING_TEMPLATE % attributed_type
class XMLDocument:
def __init__(self, name):
self.root = XMLElement(name)
def doctype(self):
return '<?xml version="1.0" encoding="UTF-8"?>'
def write(self, lf=False):
if lf:
joinString = '\n'
else:
joinString = ''
return joinString.join([self.doctype(), self.root.write(lf=lf)])
# ====== Functions creating XML elements ===============================================================================
def element_with_id(name, id):
return XMLElement(name, {'id': id})
def element_with_content(name, content):
return XMLElement(name, content=[content])
# ====== XML Parser ====================================================================================================
class XMLImporter(xml.sax.handler.ContentHandler):
"""
xml.sax ContentHandler, intended to be subclassed
Compares an existing data set with imported xml data, creates a dictionary with changed objects
and a dictionary with objects that don't appear in the xml.
The actual reading of the data will be done in a subclass implementation of start_xml_element and end_xml_element
Fetched data must be (temporarily) stored in attributes of the Importer
Results must be saved in endDocument()
The following methods must be implemented in subclasses:
active_xml_tags()
existing_objects_dictionary()
key_for_current_object()
create_new_object(key)
start_xml_element(name, attrs)
end_xml_element(name)
update_object(existing_object)
"""
data = None
changes = False
old_objects = None
updated_objects = None
new_objects = None
def startDocument(self):
self.old_objects = self.existing_objects_dictionary()
self.updated_objects = {}
self.new_objects = {}
def endDocument(self):
self.save_objects()
def startElement(self, name, attrs):
self.data = []
self.start_xml_element(name, attrs)
def endElement(self, name):
if name in self.active_xml_tags():
key = self.key_for_current_object()
if key is not None:
current_object = self.pop_from_old_objects(key)
if not current_object:
current_object = self.create_new_object(key)
self.changes = False
self.update_object(current_object, name)
self.new_objects[key] = current_object
if self.changes:
self.updated_objects[key] = current_object
self.end_xml_element(name)
def characters(self, string):
self.data.append(string)
def pop_from_old_objects(self, key):
current_object = self.old_objects.get(key)
if current_object:
del self.old_objects[key]
else:
current_object = self.new_objects.get(key)
return current_object
def active_xml_tags(self):
"""
Provides the name of the xml element that encapsulates the objects that must be imported.
Must be overwritten in subclasses
"""
return None
def existing_objects_dictionary(self):
"""
Provides a dictionary with the objects that will be updated by the import.
Must be overwritten in subclasses
"""
return {}
def key_for_current_object(self):
"""
Provides the key to store the current object. If 'None' is returned the current object will be ignored.
Must be overwritten in subclasses
"""
return None
def create_new_object(self, key):
"""
Provides a new blank object, to be filled with the current import.
Must be overwritten in subclasses
"""
return None
def start_xml_element(self, name, attrs):
"""
Gives subclasses the opportunity to read data from the xml element
"""
pass
def end_xml_element(self, name):
"""
Gives subclasses the opportunity to read data from the xml element
"""
pass
def update_object(self, existing_object, name):
"""
Gives subclasses the opportunity to apply the imported data upon an existing (or newly created) object.
If changes are applied, self.changes must be set to True, for the changes te be saved.
Must be overwritten in subclasses
"""
pass
def save_objects(self):
"""
Gives subclasses the opportunity to save the imported objects.
Must be overwritten in subclasses
"""
pass
# ====== Generic HTML Classes ==========================================================================================
class HTMLDocument(XMLDocument):
def __init__(self, title, language='en', charset='UTF-8'):
XMLDocument.__init__(self, 'html')
self.head = XMLElement('head')
self.head.add(title_tag(title))
self.head.add(meta('charset', charset))
self.root.add(self.head)
self.body = XMLElement('body')
self.root.add(self.body)
self.root.set_attribute('lang', language)
def doctype(self):
return '<!doctype html>'
class HTMLTable():
def __init__(self, name, columnTitles):
self.name = name
self.width = len(columnTitles)
self.titles = columnTitles
self.rows = []
def set_title(self, key, name):
self.titles[key] = name
def add_row(self):
newRow = HTMLTableRow(self.width)
self.rows.append(newRow)
return newRow
def fill_data(self, data):
if not self.format: return
for dataRow in data:
tableRow = self.add_row()
for col in range(len(dataRow)):
if col >= len(self.format): break
tableRow.add_to_cell(col, self.format[col] % dataRow[col])
def write(self, depth, lf=False):
root = element_with_id('table', self.name)
colgroup = XMLElement('colgroup')
table_head = XMLElement('tr')
i = 0
while i < self.width:
colgroup.add(element_with_id('col', self.name + '_col%02d' % i))
table_head.add(XMLElement('th', {'scope': 'col', 'class': 'C%02d' % i}, [self.titles[i]]))
i += 1
root.add(colgroup)
root.add(XMLElement('thead', {}, [table_head]))
table_body = XMLElement('tbody')
for row in self.rows:
row_object = XMLElement('tr')
for cell in row.cells:
row_object.add(cell)
table_body.add(row_object)
root.add(table_body)
return root.write(depth, lf=lf)
class HTMLTableRow():
def __init__(self, width):
self.cells = [''] * width
i = 0
while i < width:
self.cells[i] = table_cell(i)
i += 1
def add_to_cell(self, index, content):
self.cells[index].add(content)
def add_date_to_cell(self, index, value):
self.cells[index].add(date(value))
def add_time_to_cell(self, index, value):
self.cells[index].add(time(value))
def add_link_to_cell(self, index, href, text):
self.cells[index].add(anchor(href, text))
# ====== Functions creating HTML elements ==============================================================================
def title_tag(text):
return XMLElement('title', {}, [text])
def meta(key, value):
return XMLElement('meta', {key: value}, [])
def link(rel, href):
return XMLElement('link', {'rel': rel, 'href': href}, [])
def div(identifier):
return XMLElement('div', {'id':identifier}, [''])
def anchor(href, content):
return XMLElement('a', {'href': href}, [content])
def link_to_page(format, page):
return anchor(format % page, '%d' % page)
def paragraph(text, attributes={}):
return XMLElement('p', attributes, [text])
def heading(level, text, attributes={}):
type = 'h%d' % level
return XMLElement(type, attributes, [text])
def table_cell(column_index):
return XMLElement('td', {'class': 'C%02d' % column_index}, [])
def form(action, method='get'):
return XMLElement('form', {'action':action, 'method':method}, [])
def input(type, name=None, value=None, size=None):
attributes = {'type':type}
if name: attributes['name'] = name
if value: attributes['value'] = value
if size: attributes['size'] = size
return XMLElement('input', attributes, [])
def date(timeStamp):
m = timeStamp.month
if m == 1: monthName = 'jan'
if m == 2: monthName = 'feb'
if m == 3: monthName = 'mrt'
if m == 4: monthName = 'apr'
if m == 5: monthName = 'mei'
if m == 6: monthName = 'jun'
if m == 7: monthName = 'jul'
if m == 8: monthName = 'aug'
if m == 9: monthName = 'sep'
if m == 10: monthName = 'okt'
if m == 11: monthName = 'nov'
if m == 12: monthName = 'dec'
userString = '%d %s.\'%02d' % (timeStamp.day, monthName, timeStamp.year - 2000)
return XMLElement('time', {'datetime':rfc3339String(timeStamp)}, [userString])
def time(timeStamp):
userString = '%02d:%02d:%02d' % (timeStamp.hour, timeStamp.minute, timeStamp.second)
return XMLElement('time', {'datetime':rfc3339String(timeStamp)}, [userString])
def rfc3339String(t):
return '%04d-%02d-%02dT%02d:%02d:%02dZ' % (t.year, t.month, t.day, t.hour, t.minute, t.second)
# ====== HTML template functions =======================================================================================
def login_link():
return anchor(users.create_login_url('/'), 'login')
def user_id():
user = users.get_current_user().nickname()
logout_link = anchor(users.create_logout_url('/'), 'logout')
content = '<b>%s</b> | %s' % (user, logout_link.write())
return XMLElement('div', {'id': 'user_id'}, [content])
def main_menu(menu_list):
list = XMLElement('ul')
for item in menu_list:
list.add(element_with_content('li', anchor(item[1], item[0])))
return list
def page_navigator(currentPage, lastPage, urlFormat):
par = paragraph('pagina: ')
before = currentPage - 1
after = lastPage - currentPage
if before > 1:
par.add(link_to_page(urlFormat, 1))
par.add(' ')
if before > 2:
par.add('... ')
if before > 0:
par.add(link_to_page(urlFormat, currentPage - 1))
par.add(' ')
par.add('<strong>%d</strong>' % currentPage)
if after > 0:
par.add(' ')
par.add(link_to_page(urlFormat, currentPage + 1))
if after > 2:
par.add(' ...')
if after > 1:
par.add(' ')
par.add(link_to_page(urlFormat, lastPage))
return par
| firstflamingo/python_utilities | markup.py | Python | apache-2.0 | 13,400 | 0.007687 |
#!/usr/bin/env python
import os
import csv
import rospy
from std_msgs.msg import Bool
from dbw_mkz_msgs.msg import ThrottleCmd, SteeringCmd, BrakeCmd, SteeringReport
'''
You can use this file to test your DBW code against a bag recorded with a reference implementation.
The bag can be found at https://s3-us-west-1.amazonaws.com/udacity-selfdrivingcar/files/reference.bag.zip
To use the downloaded bag file, rename it to 'dbw_test.rosbag.bag' and place it in the CarND-Capstone/data folder.
Then with roscore running, you can then use roslaunch with the dbw_test.launch file found in
<project_repo>/ros/src/twist_controller/launch.
This file will produce 3 csv files which you can process to figure out how your DBW node is
performing on various commands.
`/actual/*` are commands from the recorded bag while `/vehicle/*` are the output of your node.
'''
class DBWTestNode(object):
def __init__(self):
rospy.init_node('dbw_test_node')
rospy.Subscriber('/vehicle/steering_cmd', SteeringCmd, self.steer_cb)
rospy.Subscriber('/vehicle/throttle_cmd', ThrottleCmd, self.throttle_cb)
rospy.Subscriber('/vehicle/brake_cmd', BrakeCmd, self.brake_cb)
rospy.Subscriber('/actual/steering_cmd', SteeringCmd, self.actual_steer_cb)
rospy.Subscriber('/actual/throttle_cmd', ThrottleCmd, self.actual_throttle_cb)
rospy.Subscriber('/actual/brake_cmd', BrakeCmd, self.actual_brake_cb)
rospy.Subscriber('/vehicle/dbw_enabled', Bool, self.dbw_enabled_cb)
self.steer = self.throttle = self.brake = None
self.steer_data = []
self.throttle_data = []
self.brake_data = []
self.dbw_enabled = False
base_path = os.path.dirname(os.path.abspath(__file__))
self.steerfile = os.path.join(base_path, 'steers.csv')
self.throttlefile = os.path.join(base_path, 'throttles.csv')
self.brakefile = os.path.join(base_path, 'brakes.csv')
self.loop()
def loop(self):
rate = rospy.Rate(10) # 10Hz
while not rospy.is_shutdown():
rate.sleep()
fieldnames = ['actual', 'proposed']
with open(self.steerfile, 'w') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
writer.writerows(self.steer_data)
with open(self.throttlefile, 'w') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
writer.writerows(self.throttle_data)
with open(self.brakefile, 'w') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
writer.writerows(self.brake_data)
def dbw_enabled_cb(self, msg):
self.dbw_enabled = msg.data
def steer_cb(self, msg):
self.steer = msg.steering_wheel_angle_cmd
def throttle_cb(self, msg):
self.throttle = msg.pedal_cmd
def brake_cb(self, msg):
self.brake = msg.pedal_cmd
def actual_steer_cb(self, msg):
if self.dbw_enabled and self.steer is not None:
self.steer_data.append({'actual': msg.steering_wheel_angle_cmd,
'proposed': self.steer})
self.steer = None
def actual_throttle_cb(self, msg):
if self.dbw_enabled and self.throttle is not None:
self.throttle_data.append({'actual': msg.pedal_cmd,
'proposed': self.throttle})
self.throttle = None
def actual_brake_cb(self, msg):
if self.dbw_enabled and self.brake is not None:
self.brake_data.append({'actual': msg.pedal_cmd,
'proposed': self.brake})
self.brake = None
if __name__ == '__main__':
DBWTestNode()
| zegnus/self-driving-car-machine-learning | p13-final-project/ros/src/twist_controller/dbw_test.py | Python | mit | 3,850 | 0.002857 |
from django.test import TestCase
from django.conf import settings
from django.contrib.sites.models import Site
from django.db.models.query import QuerySet
from preferences import preferences
from music.models import TrackContributor, Credit, Track, Album, CreditOption
from music.utils import wikipedia, lastfm
class ScraperTestCase(TestCase):
@classmethod
def setUpClass(cls):
# Disable scraping
settings.JMBO_MUSIC['scrapers'] = []
# Bootstrap music preferences
prefs = preferences.MusicPreferences
prefs.save()
creditoption = CreditOption.objects.create(
music_preferences=prefs, role_type='artist', role_name='Artist',
role_priority=1
)
# Legitimate entries
artist = TrackContributor.objects.create(title="Oasis")
album = Album.objects.create(title="What's the story morning glory")
track = Track.objects.create(title="Don't look back in anger")
track.create_credit("Oasis", "artist")
track.album.add(album.id)
track.save()
cls.wikipedia_artist = artist
cls.wikipedia_album = album
cls.wikipedia_track = track
artist = TrackContributor.objects.create(title="Foo Fighters")
album = Album.objects.create(title="One By One")
track = Track.objects.create(title="All My Life")
track.create_credit("Foo Fighters", "artist")
track.album.add(album.id)
track.save()
cls.lastfm_artist = artist
cls.lastfm_album = album
cls.lastfm_track = track
# Illegitimate entries
artist = TrackContributor.objects.create(title="vgnfdnvnvfnsncfd")
album = Album.objects.create(title="tggbfbvfvf")
track = Track.objects.create(title="grfgrgeagteg")
track.create_credit("vgnfdnvnvfnsncfd", "artist")
track.album = [album]
track.save()
cls.iartist = artist
cls.ialbum = album
cls.itrack = track
def test_wikipedia(self):
settings.JMBO_MUSIC['scrapers'] = ['wikipedia']
wikipedia(self.wikipedia_artist)
wikipedia(self.wikipedia_album)
wikipedia(self.wikipedia_track)
wikipedia(self.iartist)
wikipedia(self.ialbum)
wikipedia(self.itrack)
self.failUnless(self.wikipedia_artist.image)
self.failUnless(self.wikipedia_album.image)
self.failUnless(self.wikipedia_track.image)
self.failIf(self.iartist.image)
self.failIf(self.ialbum.image)
# Track is exempt because it always gets a default image
def test_lastfm(self):
# Abort test if no API key was set
try:
dc = settings.JMBO_MUSIC['lastfm_api_key']
dc = settings.JMBO_MUSIC['lastfm_api_secret']
except KeyError:
return
settings.JMBO_MUSIC['scrapers'] = ['lastfm']
lastfm(self.lastfm_artist)
lastfm(self.lastfm_album)
lastfm(self.lastfm_track)
lastfm(self.iartist)
lastfm(self.ialbum)
lastfm(self.itrack)
self.failUnless(self.lastfm_artist.image)
self.failUnless(self.lastfm_album.image)
self.failUnless(self.lastfm_track.image)
self.failIf(self.iartist.image)
self.failIf(self.ialbum.image)
# Track is exempt because it always gets a default image
| praekelt/jmbo-music | music/tests/__init__.py | Python | bsd-3-clause | 3,379 | 0 |
# -*-python-*-
# GemRB - Infinity Engine Emulator
# Copyright (C) 2003 The GemRB Project
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# MessageWindow.py - scripts and GUI for main (walk) window
###################################################
import GemRB
import GUIClasses
import GUICommon
import GUICommonWindows
import CommonWindow
import GUIWORLD
from GameCheck import MAX_PARTY_SIZE
from GUIDefines import *
MessageWindow = 0
ActionsWindow = 0
PortraitWindow = 0
OptionsWindow = 0
MessageTA = 0
def OnLoad():
global MessageWindow, ActionsWindow, PortraitWindow, OptionsWindow
GemRB.GameSetPartySize(MAX_PARTY_SIZE)
GemRB.GameSetProtagonistMode(0)
GemRB.LoadWindowPack (GUICommon.GetWindowPack())
GemRB.SetInfoTextColor(0,255,0,255)
ActionsWindow = GemRB.LoadWindow(0)
OptionsWindow = GemRB.LoadWindow(2)
MessageWindow = GemRB.LoadWindow(7)
PortraitWindow = GUICommonWindows.OpenPortraitWindow (1)
MessageTA = MessageWindow.GetControl (1)
MessageTA.SetFlags (IE_GUI_TEXTAREA_AUTOSCROLL|IE_GUI_TEXTAREA_HISTORY)
GemRB.SetVar ("MessageTextArea", MessageTA.ID)
GemRB.SetVar ("ActionsWindow", ActionsWindow.ID)
GemRB.SetVar ("OptionsWindow", OptionsWindow.ID)
GemRB.SetVar ("MessageWindow", -1)
GemRB.SetVar ("OtherWindow", -1)
GemRB.SetVar ("ActionsPosition", 1) #Bottom
GemRB.SetVar ("OptionsPosition", 1) #Bottom
GemRB.SetVar ("MessagePosition", 1) #Bottom
GemRB.SetVar ("OtherPosition", 0) #Left
GemRB.GameSetScreenFlags (0, OP_SET)
CloseButton= MessageWindow.GetControl (0)
CloseButton.SetText(28082)
CloseButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, CommonWindow.OnDecreaseSize)
CloseButton.SetFlags (IE_GUI_BUTTON_DEFAULT | IE_GUI_BUTTON_MULTILINE, OP_OR)
OpenButton = OptionsWindow.GetControl (10)
OpenButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, CommonWindow.OnIncreaseSize)
# Select all
Button = ActionsWindow.GetControl (1)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, GUICommon.SelectAllOnPress)
# Select all
Button = ActionsWindow.GetControl (3)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, GUICommonWindows.ActionStopPressed)
FormationButton = ActionsWindow.GetControl (4)
FormationButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, GUIWORLD.OpenFormationWindow)
GUICommonWindows.SetupClockWindowControls (ActionsWindow)
GUICommonWindows.SetupMenuWindowControls (OptionsWindow)
UpdateControlStatus ()
def UpdateControlStatus ():
global MessageWindow, PortraitWindow, ActionsWindow, OptionsWindow, MessageTA
Expand = GemRB.GetMessageWindowSize() & (GS_DIALOGMASK|GS_DIALOG)
hideflags = GemRB.HideGUI ()
if Expand:
GemRB.SetVar ("MessageWindow", MessageWindow.ID)
GemRB.SetVar ("PortraitWindow", -1)
GemRB.SetVar ("ActionsWindow", -1)
GemRB.SetVar ("OptionsWindow", -1)
MessageTA = GUIClasses.GTextArea(MessageWindow.ID, GemRB.GetVar ("MessageTextArea"))
MessageTA.SetStatus (IE_GUI_CONTROL_FOCUSED)
Label = MessageWindow.GetControl (0x10000003)
Label.SetText (str (GemRB.GameGetPartyGold ()))
else:
GemRB.SetVar ("MessageWindow", -1)
GemRB.SetVar ("PortraitWindow", PortraitWindow.ID)
GemRB.SetVar ("ActionsWindow", ActionsWindow.ID)
GemRB.SetVar ("OptionsWindow", OptionsWindow.ID)
GUICommon.GameControl.SetStatus(IE_GUI_CONTROL_FOCUSED)
if hideflags:
GemRB.UnhideGUI ()
| Tomsod/gemrb | gemrb/GUIScripts/pst/MessageWindow.py | Python | gpl-2.0 | 3,926 | 0.033367 |
import ckanext.deadoralive.config as config
import ckanext.deadoralive.tests.helpers as custom_helpers
class TestConfig(custom_helpers.FunctionalTestBaseClass):
def test_that_it_reads_settings_from_config_file(self):
"""Test that non-default config settings in the config file work."""
# These non-default settings are in the test.ini config file.
assert config.recheck_resources_after == 48
assert config.resend_pending_resources_after == 12
# TODO: Test falling back on defaults when there's nothing in the config
# file.
| ckan/ckanext-deadoralive | ckanext/deadoralive/tests/test_config.py | Python | agpl-3.0 | 572 | 0 |
# -*- coding: utf-8 -*-
import os
basedir=os.path.abspath(os.path.dirname(__file__))#get basedir of the project
WTF_CSRF_ENABLED = True
SECRET_KEY = 'you-will-guess'
#for database
# SQLALCHEMY_DATABASE_URI = 'mysql:///' + os.path.join(basedir, 'app.db')
SQLALCHEMY_DATABASE_URI = "mysql://username:password@server_ip:port/database_name"
SQLALCHEMY_MIGRATE_REPO = os.path.join(basedir, 'db_repository')
#for upload pic
UPLOAD_FOLDER = basedir+'/uploads/' #should use basedir
MAX_CONTENT_LENGTH=2*1024*1024
ALLOWED_EXTENSIONS = set(['png', 'jpg', 'jpeg'])#TODO:make user aware
#for upload excel
UPLOAD_EXCEL = basedir+'/app/static/add_info/' #should use basedir | SpeedMe/leihuang.org | config.py | Python | apache-2.0 | 665 | 0.022556 |
from __future__ import print_function
import readline
import sys
import argparse
from . import crucible, config, config_ui, utils, patch
def main():
parser = argparse.ArgumentParser(description='Create Code Reviews')
parser.add_argument('--setup', action='store_true', help='setup banter configuration')
parser.add_argument('-t', '--title', help="set title of new review")
parser.add_argument("-r", "--reviewers", help="set reviewers of new review")
parser_results = vars(parser.parse_args())
if parser_results['setup']:
setup()
else:
return create_review(title=parser_results['title'],
reviewers=parser_results['reviewers'])
def create_review(title='', reviewers=''):
conf = load_config()
if conf is None:
return 1
crucible_url = conf.get_value('crucible', 'url')
crucible_conn = crucible.Crucible(crucible_url)
username = conf.get_value('crucible', 'username')
auth_token = conf.get_value('crucible', 'token')
project_key = conf.get_value('crucible', 'project_key')
reviewers = reviewers or conf.get_value('crucible', 'reviewers')
diff = patch.clean(sys.stdin.read())
review_id = do_create_review(crucible_conn, username, auth_token, project_key, diff, title)
if review_id == -1:
return review_id
add_reviewers(crucible_conn, auth_token, review_id, reviewers)
print(utils.combine_url_components(crucible_url, "cru", review_id))
def do_create_review(crucible_conn, username, auth_token, project_key, diff, title=''):
parameters = {
'allow_reviewers_to_join': True,
'author': username,
'description': '',
'name': title,
'project_key': project_key,
'patch': diff
}
resp = crucible_conn.create_review(auth_token, **parameters)
if resp.status_code == 200 or resp.status_code == 201:
return resp.json()['permaId']['id']
sys.stderr.write("Got " + str(resp.status_code) + " HTTP code from server!\n")
return -1
def add_reviewers(crucible_conn, auth_token, review_id, reviewers):
if reviewers is not None and reviewers != "":
reviewer_list = [r.strip() for r in reviewers.split(',')]
r = crucible_conn.add_reviewers(auth_token, review_id, reviewer_list)
def setup():
conf = config.Config()
conf.load_from_file()
updated_conf = config_ui.get_config_from_user(conf.as_dict())
set_crucible_token(updated_conf)
conf.set_from_dict(updated_conf)
conf.save()
def set_crucible_token(conf):
# get crucible token and forget crucible password
crucible_conn = crucible.Crucible(conf['crucible']['url'])
token = crucible_conn.get_auth_token(conf['crucible']['username'], conf['crucible']['password'])
conf['crucible']['token'] = token
del conf['crucible']['password']
def load_config():
"""load config, check for required fields, print error if any are missing"""
conf = config.Config()
conf.load_from_file()
if not has_all_required_fields(conf):
print("Your configuration is incomplete, please run 'banter setup' to get that fixed up")
return None
return conf
def has_all_required_fields(conf):
for field in ('url', 'username', 'token', 'project_key'):
if conf.get_value('crucible', field) is None:
return False
return True
if __name__ == '__main__':
sys.exit(main())
| markdrago/banter | banter/banter.py | Python | mit | 3,421 | 0.004969 |
# -*- coding: utf-8 -*-
"""
uds.data.check
~~~~~~~~~~~~~~
:copyright: Copyright (c) 2015, National Institute of Information and Communications Technology.All rights reserved.
:license: GPL2, see LICENSE for more details.
"""
import re
import datetime
import dateutil.parser
import pytz
import uds.logging
from uds.data import M2MDataVisitor
class M2MDataChecker(M2MDataVisitor):
"""M2MDataChecker check validity of M2M Data object.
"""
def __init__(self):
pass
def visit_v101(self, m2m_data):
"""Check v1.01 M2M Data.
:param M2MDataV101 m2m_data: Check target
:return: If the target is valid, return true, else return False.
:rtype: :class:`bool`
"""
device_info = m2m_data.device_info
# Check info schema
if 'longitude' not in device_info or device_info['longitude'] is None:
uds.logging.error('[check] M2M Data schema is invalid. longitude is not in device_info.')
return False
if 'latitude' not in device_info or device_info['latitude'] is None:
uds.logging.error('[check] M2M Data schema is invalid. latitude is not in device_info.')
return False
# Check info values
if self._check_geo_point(m2m_data.device_info['longitude'], m2m_data.device_info['latitude']) is False:
return False
for datum in m2m_data.data_values:
# Check datum schema
if 'time' not in datum or datum['time'] is None:
uds.logging.error('[check] M2M Data schema is invalid. time is none.')
return False
# Check datum values
if self._check_time(datum['time'], m2m_data.dict['primary']['timezone']) is False:
return False
return True
def visit_v102(self, m2m_data):
"""Check v1.02 M2M Data.
:param M2MDataV102 m2m_data: Check target
:return: If the target is valid, return true, else return False.
:rtype: :class:`bool`
"""
# Check info schema
# => nothing to do
for datum in m2m_data.data_values:
# Check datum schema
if 'time' not in datum or datum['time'] is None:
uds.logging.error('[check] M2M Data schema is invalid. time is none.')
return False
if 'longitude' not in datum or datum['longitude'] is None:
uds.logging.error('[check] M2M Data schema is invalid. longitude is none.')
return False
if 'latitude' not in datum or datum['latitude'] is None:
uds.logging.error('[check] M2M Data schema is invalid. latitude is none.')
return False
# Check datum values
if self._check_geo_point(datum['longitude'], datum['latitude']) is False:
return False
if self._check_time(datum['time'], m2m_data.dict['primary']['timezone']) is False:
return False
return True
@staticmethod
def _check_geo_point(longitude, latitude):
# Check whether longitude and latitude is within validity range.
if longitude < -180 or 180 < longitude or latitude < -90 or 90 < latitude:
uds.logging.error('[check] Geo point range is invalid. longitude or latitude is out of range.')
return False
if longitude == 0 and latitude == 0:
uds.logging.error('[check] Geo point range is invalid. longitude=latitude=0.')
return False
return True
@staticmethod
def _check_time(time, offset):
if offset is None:
uds.logging.error('[check] timezone is none.')
return False
# Check whether sensor time is earlier than current time.
try:
sensor_time = dateutil.parser.parse(time+offset) # sensor time
now_time = pytz.utc.localize(datetime.datetime.utcnow()) # current time
except Exception as e:
uds.logging.error(
'[check] time or timezone format is invalid. time={0}, timezone={1}, parse_error={2}'.format(
str(time), str(offset), str(e)))
return False
# 10分以上未来の場合、エラーとする
if (now_time - sensor_time) > datetime.timedelta(minutes=-10):
return True
else:
uds.logging.error('[check] Sensing time is out of range.')
return False
| nict-isp/uds-sdk | uds/data/check.py | Python | gpl-2.0 | 4,491 | 0.004035 |
# -*- coding: utf-8 -*-
"""
Created by chiesa on 18.01.16
Copyright 2015 Alpes Lasers SA, Neuchatel, Switzerland
"""
import json
import subprocess
from tempfile import NamedTemporaryFile
__author__ = 'chiesa'
__copyright__ = "Copyright 2015, Alpes Lasers SA"
def get_entry_points(venv_python_path, project_name):
f = NamedTemporaryFile(delete=False)
f.write('import pkg_resources\n')
f.write('import json\n\n')
f.write('print json.dumps(pkg_resources.get_entry_map(\'{0}\').get(\'console_scripts\', {{}}).keys())\n'.format(project_name))
f.close()
return json.loads(subprocess.check_output([venv_python_path, f.name]))
def get_project_version(venv_python_path, project_name):
f = NamedTemporaryFile(delete=False)
f.write('import pkg_resources\n')
f.write('import json\n\n')
f.write('print json.dumps(pkg_resources.get_distribution(\'{0}\').version)\n'.format(project_name))
f.close()
return json.loads(subprocess.check_output([venv_python_path, f.name])) | chiesax/sandbox | sandbox/install_project/venv_inspect.py | Python | mit | 1,009 | 0.003964 |
'''
Some helpers
'''
import os
import pwd
import time
import requests
import subprocess
def get_uid(username):
return int(pwd.getpwnam(username).pw_uid)
def file_rights(filepath, mode=None, uid=None, gid=None):
'''
Change file rights
'''
file_handle = os.open(filepath, os.O_RDONLY)
if mode:
os.fchmod(file_handle, mode)
if uid:
if not gid:
gid = 0
os.fchown(file_handle, uid, gid)
os.close(file_handle)
def cmd_exec(cmd, show_output=False):
if show_output:
p = subprocess.Popen(cmd, shell=True, close_fds=True)
stdout, stderr = p.communicate()
return p.returncode
else:
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, close_fds=True)
stdout, stderr = p.communicate()
return {
'error': p.returncode,
'stdout': stdout,
'stderr': stderr
}
def array_2_str(array):
return ''.join(array)
def get_ip_addresses():
result = cmd_exec('hostname -I')
return result['stdout'].split(' ')[:-1]
def wait_http(url, ok_message, interval=10):
couchdb_status = False
while not couchdb_status:
try:
requests.get(url)
couchdb_status = True
print ok_message
except requests.exceptions.ConnectionError, e:
print e
time.sleep(interval)
def wait_couchdb(interval=10):
wait_http('http://127.0.0.1:5984/', 'CouchDB OK', interval)
def wait_cozy_controller(interval=10):
wait_http('http://127.0.0.1:9002/', 'Cozy controller OK', interval)
def wait_cozy_datasytem(interval=10):
wait_http('http://127.0.0.1:9101/', 'Cozy data sytem OK', interval)
def wait_cozy_home(interval=10):
wait_http('http://127.0.0.1:9103/', 'Cozy home OK', interval)
def wait_cozy_proxy(interval=10):
wait_http('http://127.0.0.1:9104/', 'Cozy proxy OK', interval)
def wait_cozy_stack(interval=10):
wait_couchdb(interval)
wait_cozy_controller(interval)
wait_cozy_datasytem(interval)
wait_cozy_home(interval)
wait_cozy_proxy(interval)
| cozy/python_cozy_management | cozy_management/helpers.py | Python | lgpl-3.0 | 2,238 | 0.000447 |
# Copyright 2010 WebDriver committers
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class Command(object):
"""
Defines constants for the standard WebDriver commands.
While these constants have no meaning in and of themselves, they are
used to marshal commands through a service that implements WebDriver's
remote wire protocol:
http://code.google.com/p/selenium/wiki/JsonWireProtocol
"""
# Keep in sync with org.openqa.selenium.remote.DriverCommand
STATUS = "status"
NEW_SESSION = "newSession"
GET_ALL_SESSIONS = "getAllSessions"
DELETE_SESSION = "deleteSession"
CLOSE = "close"
QUIT = "quit"
GET = "get"
GO_BACK = "goBack"
GO_FORWARD = "goForward"
REFRESH = "refresh"
ADD_COOKIE = "addCookie"
GET_COOKIE = "getCookie"
GET_ALL_COOKIES = "getCookies"
DELETE_COOKIE = "deleteCookie"
DELETE_ALL_COOKIES = "deleteAllCookies"
FIND_ELEMENT = "findElement"
FIND_ELEMENTS = "findElements"
FIND_CHILD_ELEMENT = "findChildElement"
FIND_CHILD_ELEMENTS = "findChildElements"
CLEAR_ELEMENT = "clearElement"
CLICK_ELEMENT = "clickElement"
SEND_KEYS_TO_ELEMENT = "sendKeysToElement"
SEND_KEYS_TO_ACTIVE_ELEMENT = "sendKeysToActiveElement"
SUBMIT_ELEMENT = "submitElement"
UPLOAD_FILE = "uploadFile"
GET_CURRENT_WINDOW_HANDLE = "getCurrentWindowHandle"
GET_WINDOW_HANDLES = "getWindowHandles"
GET_WINDOW_SIZE = "getWindowSize"
GET_WINDOW_POSITION = "getWindowPosition"
SET_WINDOW_SIZE = "setWindowSize"
SET_WINDOW_POSITION = "setWindowPosition"
SWITCH_TO_WINDOW = "switchToWindow"
SWITCH_TO_FRAME = "switchToFrame"
SWITCH_TO_PARENT_FRAME = "switchToParentFrame"
GET_ACTIVE_ELEMENT = "getActiveElement"
GET_CURRENT_URL = "getCurrentUrl"
GET_PAGE_SOURCE = "getPageSource"
GET_TITLE = "getTitle"
EXECUTE_SCRIPT = "executeScript"
SET_BROWSER_VISIBLE = "setBrowserVisible"
IS_BROWSER_VISIBLE = "isBrowserVisible"
GET_ELEMENT_TEXT = "getElementText"
GET_ELEMENT_VALUE = "getElementValue"
GET_ELEMENT_TAG_NAME = "getElementTagName"
SET_ELEMENT_SELECTED = "setElementSelected"
IS_ELEMENT_SELECTED = "isElementSelected"
IS_ELEMENT_ENABLED = "isElementEnabled"
IS_ELEMENT_DISPLAYED = "isElementDisplayed"
GET_ELEMENT_LOCATION = "getElementLocation"
GET_ELEMENT_LOCATION_ONCE_SCROLLED_INTO_VIEW = "getElementLocationOnceScrolledIntoView"
GET_ELEMENT_SIZE = "getElementSize"
GET_ELEMENT_ATTRIBUTE = "getElementAttribute"
GET_ELEMENT_VALUE_OF_CSS_PROPERTY = "getElementValueOfCssProperty"
ELEMENT_EQUALS = "elementEquals"
SCREENSHOT = "screenshot"
IMPLICIT_WAIT = "implicitlyWait"
EXECUTE_ASYNC_SCRIPT = "executeAsyncScript"
SET_SCRIPT_TIMEOUT = "setScriptTimeout"
SET_TIMEOUTS = "setTimeouts"
MAXIMIZE_WINDOW = "windowMaximize"
GET_LOG = "getLog"
GET_AVAILABLE_LOG_TYPES = "getAvailableLogTypes"
#Alerts
DISMISS_ALERT = "dismissAlert"
ACCEPT_ALERT = "acceptAlert"
SET_ALERT_VALUE = "setAlertValue"
GET_ALERT_TEXT = "getAlertText"
# Advanced user interactions
CLICK = "mouseClick"
DOUBLE_CLICK = "mouseDoubleClick"
MOUSE_DOWN = "mouseButtonDown"
MOUSE_UP = "mouseButtonUp"
MOVE_TO = "mouseMoveTo"
# Screen Orientation
SET_SCREEN_ORIENTATION = "setScreenOrientation"
GET_SCREEN_ORIENTATION = "getScreenOrientation"
# Touch Actions
SINGLE_TAP = "touchSingleTap"
TOUCH_DOWN = "touchDown"
TOUCH_UP = "touchUp"
TOUCH_MOVE = "touchMove"
TOUCH_SCROLL = "touchScroll"
DOUBLE_TAP = "touchDoubleTap"
LONG_PRESS = "touchLongPress"
FLICK = "touchFlick"
#HTML 5
EXECUTE_SQL = "executeSql"
GET_LOCATION = "getLocation"
SET_LOCATION = "setLocation"
GET_APP_CACHE = "getAppCache"
GET_APP_CACHE_STATUS = "getAppCacheStatus"
CLEAR_APP_CACHE = "clearAppCache"
IS_BROWSER_ONLINE = "isBrowserOnline"
SET_BROWSER_ONLINE = "setBrowserOnline"
GET_LOCAL_STORAGE_ITEM = "getLocalStorageItem"
REMOVE_LOCAL_STORAGE_ITEM = "removeLocalStorageItem"
GET_LOCAL_STORAGE_KEYS = "getLocalStorageKeys"
SET_LOCAL_STORAGE_ITEM = "setLocalStorageItem"
CLEAR_LOCAL_STORAGE = "clearLocalStorage"
GET_LOCAL_STORAGE_SIZE = "getLocalStorageSize"
GET_SESSION_STORAGE_ITEM = "getSessionStorageItem"
REMOVE_SESSION_STORAGE_ITEM = "removeSessionStorageItem"
GET_SESSION_STORAGE_KEYS = "getSessionStorageKeys"
SET_SESSION_STORAGE_ITEM = "setSessionStorageItem"
CLEAR_SESSION_STORAGE = "clearSessionStorage"
GET_SESSION_STORAGE_SIZE = "getSessionStorageSize"
| compstak/selenium | py/selenium/webdriver/remote/command.py | Python | apache-2.0 | 5,188 | 0.000771 |
from rdkit import Chem
from rdkit import rdBase
from rdkit.Chem import rdMolDescriptors as rdMD
from rdkit.Chem import AllChem
from rdkit.Chem.EState import EStateIndices
from rdkit.Chem.EState import AtomTypes
import time
print rdBase.rdkitVersion
print rdBase.boostVersion
def getEState(mol):
return EStateIndices(mol)
def localopt(mol, steps = 500):
if mol.GetNumConformers() == 0:
mol=make3D(mol)
AllChem.MMFFOptimizeMolecule(mol, maxIters = steps)
return mol
def make3D(mol, steps = 50):
mol = Chem.AddHs(mol)
success = AllChem.EmbedMolecule(mol)
if success == -1: # Failed
success = AllChem.EmbedMolecule(mol, useRandomCoords = True)
if success == -1:
raise Error, "Embedding failed!"
mol = localopt(mol, steps)
return mol
def get3D(m,is3d):
if not is3d:
m = Chem.AddHs(m)
AllChem.EmbedMolecule(m)
AllChem.MMFFOptimizeMolecule(m)
r= rdMD.CalcAUTOCORR3D(m)+rdMD.CalcRDF(m)+rdMD.CalcMORSE(m)+rdMD.CalcWHIM(m)+rdMD.CalcGETAWAY(m)
return r
def generateALL():
m = Chem.MolFromSmiles('Cc1ccccc1')
thefile = open('testAC.txt', 'w')
filename="/Users/mbp/Github/rdkit_mine/Code/GraphMol/Descriptors/test_data/PBF_egfr.sdf"
suppl = Chem.SDMolSupplier(filename,removeHs=False)
mols = [x for x in suppl]
start = time.time()
for m in mols:
r= get3D(m,True)
for item in r:
thefile.write("%.3f," % item)
thefile.write("\n")
end = time.time()
print end - start
thefile = open('testSMWHIM.txt', 'w')
writer = Chem.SDWriter('3Dsmallmol.sdf')
A=['[H][H]','B','O=O','C','CC','CCC','CCCC','CCCCC','CCCCCC','CO','CCO','CCCO','CCCCO','CCCCCO','CCCCCCO','CCl','CCCl','CCCCl','CCCCCl','CCCCCCl','CCCCCCCl','CBr','CCBr','CCCBr','CCCCBr','CCCCCBr','CCCCCCBr','CI','CCI','CCCI','CCCCI','CCCCCI','CCCCCCI','CF','CCF','CCCF','CCCCF','CCCCCF','CCCCCCF','CS','CCS','CCCS','CCCCS','CCCCCS','CCCCCCS','CN','CCN','CCCN','CCCCN','CCCCCN','CCCCCCN']
for smi in A:
m = Chem.MolFromSmiles(smi)
m=localopt(m,100)
#r=get3D(m,True)
print smi
print "---------"
r=rdMD.CalcWHIM(m)
print "Ei:"+str(r[0])+ "," + str(r[1]) + "," + str(r[2])+ "\n"
print "Gi:"+str(r[5])+ "," + str(r[6]) + "," + str(r[7])+ "\n"
print "SI:"+str(rdMD.CalcSpherocityIndex(m))
print "AS:"+str(rdMD.CalcAsphericity(m))
print "EX:"+str(rdMD.CalcEccentricity(m))
for item in r:
thefile.write("%.3f," % item)
thefile.write("\n")
#m.SetProp("smi", smi)
#writer.write(m)
thefile = open('testBPA.txt', 'w')
writer = Chem.SDWriter('3DBPAmol.sdf')
B=['CN(C)CC(Br)c1ccccc1','CN(C)CC(Br)c1ccc(F)cc1','CN(C)CC(Br)c1ccc(Cl)cc1','CN(C)CC(Br)c1ccc(Cl)cc1','CN(C)CC(Br)c1ccc(I)cc1','CN(C)CC(Br)c1ccc(C)cc1','CN(C)CC(Br)c1cccc(F)c1','CN(C)CC(Br)c1cccc(Cl)c1','CN(C)CC(Br)c1cccc(Br)c1','CN(C)CC(Br)c1cccc(I)c1','CN(C)CC(Br)c1cccc(C)c1','CN(C)CC(Br)c1ccc(F)c(Cl)c1','CN(C)CC(Br)c1ccc(F)c(Br)c1','CN(C)CC(Br)c1ccc(F)c(C)c1','CN(C)CC(Br)c1ccc(Cl)c(Cl)c1','CN(C)CC(Br)c1ccc(Cl)c(Br)c1','CN(C)CC(Br)c1ccc(Cl)c(C)c1','CN(C)CC(Br)c1ccc(Br)c(Cl)c1','CN(C)CC(Br)c1ccc(Br)c(Br)c1','CN(C)CC(Br)c1ccc(Br)c(C)c1','CN(C)CC(Br)c1ccc(C)c(C)c1','CN(C)CC(Br)c1ccc(C)c(Br)c1']
for smi in B:
m = Chem.MolFromSmiles(smi)
m=localopt(m,100)
#r=get3D(m,True)
r=rdMD.CalcWHIM(m)
for item in r:
thefile.write("%.3f," % item)
thefile.write("\n")
#m.SetProp("smi", smi)
#writer.write(m)
A="G1w,G2w,G3w,Gw"
print dir(rdMD)
| rdkit/rdkit | Code/GraphMol/Descriptors/test3D_old.py | Python | bsd-3-clause | 3,537 | 0.035341 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.