text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
from django.core.exceptions import ImproperlyConfigured
from django.conf import settings
from rest_framework.settings import APISettings
USER_SETTINGS = getattr(settings, 'JWT_GRAPHENE', None)
DEFAULTS = {
'JWT_GRAPHENE_USER_ONLY_FIELDS': None,
'JWT_GRAPHENE_USER_EXCLUDE_FIELDS': None,
}
IMPORT_STRINGS = (
)
api_settings = APISettings(USER_SETTINGS, DEFAULTS, IMPORT_STRINGS)
if api_settings.JWT_GRAPHENE_USER_ONLY_FIELDS is not None and api_settings.JWT_GRAPHENE_USER_EXCLUDE_FIELDS is not None:
raise ImproperlyConfigured("can't set both JWT_GRAPHENE_USER_ONLY_FIELDS and JWT_GRAPHENE_USER_EXCLUDE_FIELDS")
| SillyFreak/django-graphene-jwt | graphene_jwt/settings.py | Python | agpl-3.0 | 629 | 0.00318 |
"""Shared OS X support functions."""
import os
import re
import sys
__all__ = [
'compiler_fixup',
'customize_config_vars',
'customize_compiler',
'get_platform_osx',
]
# configuration variables that may contain universal build flags,
# like "-arch" or "-isdkroot", that may need customization for
# the user environment
_UNIVERSAL_CONFIG_VARS = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS', 'BASECFLAGS',
'BLDSHARED', 'LDSHARED', 'CC', 'CXX',
'PY_CFLAGS', 'PY_LDFLAGS', 'PY_CPPFLAGS',
'PY_CORE_CFLAGS')
# configuration variables that may contain compiler calls
_COMPILER_CONFIG_VARS = ('BLDSHARED', 'LDSHARED', 'CC', 'CXX')
# prefix added to original configuration variable names
_INITPRE = '_OSX_SUPPORT_INITIAL_'
def _find_executable(executable, path=None):
"""Tries to find 'executable' in the directories listed in 'path'.
A string listing directories separated by 'os.pathsep'; defaults to
os.environ['PATH']. Returns the complete filename or None if not found.
"""
if path is None:
path = os.environ['PATH']
paths = path.split(os.pathsep)
base, ext = os.path.splitext(executable)
if (sys.platform == 'win32' or os.name == 'os2') and (ext != '.exe'):
executable = executable + '.exe'
if not os.path.isfile(executable):
for p in paths:
f = os.path.join(p, executable)
if os.path.isfile(f):
# the file exists, we have a shot at spawn working
return f
return None
else:
return executable
def _read_output(commandstring):
"""Output from succesful command execution or None"""
# Similar to os.popen(commandstring, "r").read(),
# but without actually using os.popen because that
# function is not usable during python bootstrap.
# tempfile is also not available then.
import contextlib
try:
import tempfile
fp = tempfile.NamedTemporaryFile()
except ImportError:
fp = open("/tmp/_osx_support.%s"%(
os.getpid(),), "w+b")
with contextlib.closing(fp) as fp:
cmd = "%s 2>/dev/null >'%s'" % (commandstring, fp.name)
return fp.read().decode('utf-8').strip() if not os.system(cmd) else None
def _find_build_tool(toolname):
"""Find a build tool on current path or using xcrun"""
return (_find_executable(toolname)
or _read_output("/usr/bin/xcrun -find %s" % (toolname,))
or ''
)
_SYSTEM_VERSION = None
def _get_system_version():
"""Return the OS X system version as a string"""
# Reading this plist is a documented way to get the system
# version (see the documentation for the Gestalt Manager)
# We avoid using platform.mac_ver to avoid possible bootstrap issues during
# the build of Python itself (distutils is used to build standard library
# extensions).
global _SYSTEM_VERSION
if _SYSTEM_VERSION is None:
_SYSTEM_VERSION = ''
try:
f = open('/System/Library/CoreServices/SystemVersion.plist')
except IOError:
# We're on a plain darwin box, fall back to the default
# behaviour.
pass
else:
try:
m = re.search(r'<key>ProductUserVisibleVersion</key>\s*'
r'<string>(.*?)</string>', f.read())
finally:
f.close()
if m is not None:
_SYSTEM_VERSION = '.'.join(m.group(1).split('.')[:2])
# else: fall back to the default behaviour
return _SYSTEM_VERSION
def _remove_original_values(_config_vars):
"""Remove original unmodified values for testing"""
# This is needed for higher-level cross-platform tests of get_platform.
for k in list(_config_vars):
if k.startswith(_INITPRE):
del _config_vars[k]
def _save_modified_value(_config_vars, cv, newvalue):
"""Save modified and original unmodified value of configuration var"""
oldvalue = _config_vars.get(cv, '')
if (oldvalue != newvalue) and (_INITPRE + cv not in _config_vars):
_config_vars[_INITPRE + cv] = oldvalue
_config_vars[cv] = newvalue
def _supports_universal_builds():
"""Returns True if universal builds are supported on this system"""
# As an approximation, we assume that if we are running on 10.4 or above,
# then we are running with an Xcode environment that supports universal
# builds, in particular -isysroot and -arch arguments to the compiler. This
# is in support of allowing 10.4 universal builds to run on 10.3.x systems.
osx_version = _get_system_version()
if osx_version:
try:
osx_version = tuple(int(i) for i in osx_version.split('.'))
except ValueError:
osx_version = ''
return bool(osx_version >= (10, 4)) if osx_version else False
def _find_appropriate_compiler(_config_vars):
"""Find appropriate C compiler for extension module builds"""
# Issue #13590:
# The OSX location for the compiler varies between OSX
# (or rather Xcode) releases. With older releases (up-to 10.5)
# the compiler is in /usr/bin, with newer releases the compiler
# can only be found inside Xcode.app if the "Command Line Tools"
# are not installed.
#
# Futhermore, the compiler that can be used varies between
# Xcode releases. Upto Xcode 4 it was possible to use 'gcc-4.2'
# as the compiler, after that 'clang' should be used because
# gcc-4.2 is either not present, or a copy of 'llvm-gcc' that
# miscompiles Python.
# skip checks if the compiler was overriden with a CC env variable
if 'CC' in os.environ:
return _config_vars
# The CC config var might contain additional arguments.
# Ignore them while searching.
cc = oldcc = _config_vars['CC'].split()[0]
if not _find_executable(cc):
# Compiler is not found on the shell search PATH.
# Now search for clang, first on PATH (if the Command LIne
# Tools have been installed in / or if the user has provided
# another location via CC). If not found, try using xcrun
# to find an uninstalled clang (within a selected Xcode).
# NOTE: Cannot use subprocess here because of bootstrap
# issues when building Python itself (and os.popen is
# implemented on top of subprocess and is therefore not
# usable as well)
cc = _find_build_tool('clang')
elif os.path.basename(cc).startswith('gcc'):
# Compiler is GCC, check if it is LLVM-GCC
data = _read_output("'%s' --version"
% (cc.replace("'", "'\"'\"'"),))
if 'llvm-gcc' in data:
# Found LLVM-GCC, fall back to clang
cc = _find_build_tool('clang')
if not cc:
raise SystemError(
"Cannot locate working compiler")
if cc != oldcc:
# Found a replacement compiler.
# Modify config vars using new compiler, if not already explictly
# overriden by an env variable, preserving additional arguments.
for cv in _COMPILER_CONFIG_VARS:
if cv in _config_vars and cv not in os.environ:
cv_split = _config_vars[cv].split()
cv_split[0] = cc if cv != 'CXX' else cc + '++'
_save_modified_value(_config_vars, cv, ' '.join(cv_split))
return _config_vars
def _remove_universal_flags(_config_vars):
"""Remove all universal build arguments from config vars"""
for cv in _UNIVERSAL_CONFIG_VARS:
# Do not alter a config var explicitly overriden by env var
if cv in _config_vars and cv not in os.environ:
flags = _config_vars[cv]
flags = re.sub('-arch\s+\w+\s', ' ', flags, re.ASCII)
flags = re.sub('-isysroot [^ \t]*', ' ', flags)
_save_modified_value(_config_vars, cv, flags)
return _config_vars
def _remove_unsupported_archs(_config_vars):
"""Remove any unsupported archs from config vars"""
# Different Xcode releases support different sets for '-arch'
# flags. In particular, Xcode 4.x no longer supports the
# PPC architectures.
#
# This code automatically removes '-arch ppc' and '-arch ppc64'
# when these are not supported. That makes it possible to
# build extensions on OSX 10.7 and later with the prebuilt
# 32-bit installer on the python.org website.
# skip checks if the compiler was overriden with a CC env variable
if 'CC' in os.environ:
return _config_vars
if re.search('-arch\s+ppc', _config_vars['CFLAGS']) is not None:
# NOTE: Cannot use subprocess here because of bootstrap
# issues when building Python itself
status = os.system("'%s' -arch ppc -x c /dev/null 2>/dev/null"%(
_config_vars['CC'].replace("'", "'\"'\"'"),))
# The Apple compiler drivers return status 255 if no PPC
if (status >> 8) == 255:
# Compiler doesn't support PPC, remove the related
# '-arch' flags if not explicitly overridden by an
# environment variable
for cv in _UNIVERSAL_CONFIG_VARS:
if cv in _config_vars and cv not in os.environ:
flags = _config_vars[cv]
flags = re.sub('-arch\s+ppc\w*\s', ' ', flags)
_save_modified_value(_config_vars, cv, flags)
return _config_vars
def _override_all_archs(_config_vars):
"""Allow override of all archs with ARCHFLAGS env var"""
# NOTE: This name was introduced by Apple in OSX 10.5 and
# is used by several scripting languages distributed with
# that OS release.
if 'ARCHFLAGS' in os.environ:
arch = os.environ['ARCHFLAGS']
for cv in _UNIVERSAL_CONFIG_VARS:
if cv in _config_vars and '-arch' in _config_vars[cv]:
flags = _config_vars[cv]
flags = re.sub('-arch\s+\w+\s', ' ', flags)
flags = flags + ' ' + arch
_save_modified_value(_config_vars, cv, flags)
return _config_vars
def _check_for_unavailable_sdk(_config_vars):
"""Remove references to any SDKs not available"""
# If we're on OSX 10.5 or later and the user tries to
# compile an extension using an SDK that is not present
# on the current machine it is better to not use an SDK
# than to fail. This is particularly important with
# the standalong Command Line Tools alternative to a
# full-blown Xcode install since the CLT packages do not
# provide SDKs. If the SDK is not present, it is assumed
# that the header files and dev libs have been installed
# to /usr and /System/Library by either a standalone CLT
# package or the CLT component within Xcode.
cflags = _config_vars.get('CFLAGS', '')
m = re.search(r'-isysroot\s+(\S+)', cflags)
if m is not None:
sdk = m.group(1)
if not os.path.exists(sdk):
for cv in _UNIVERSAL_CONFIG_VARS:
# Do not alter a config var explicitly overriden by env var
if cv in _config_vars and cv not in os.environ:
flags = _config_vars[cv]
flags = re.sub(r'-isysroot\s+\S+(?:\s|$)', ' ', flags)
_save_modified_value(_config_vars, cv, flags)
return _config_vars
def compiler_fixup(compiler_so, cc_args):
"""
This function will strip '-isysroot PATH' and '-arch ARCH' from the
compile flags if the user has specified one them in extra_compile_flags.
This is needed because '-arch ARCH' adds another architecture to the
build, without a way to remove an architecture. Furthermore GCC will
barf if multiple '-isysroot' arguments are present.
"""
stripArch = stripSysroot = False
compiler_so = list(compiler_so)
if not _supports_universal_builds():
# OSX before 10.4.0, these don't support -arch and -isysroot at
# all.
stripArch = stripSysroot = True
else:
stripArch = '-arch' in cc_args
stripSysroot = '-isysroot' in cc_args
if stripArch or 'ARCHFLAGS' in os.environ:
while True:
try:
index = compiler_so.index('-arch')
# Strip this argument and the next one:
del compiler_so[index:index+2]
except ValueError:
break
if 'ARCHFLAGS' in os.environ and not stripArch:
# User specified different -arch flags in the environ,
# see also distutils.sysconfig
compiler_so = compiler_so + os.environ['ARCHFLAGS'].split()
if stripSysroot:
while True:
try:
index = compiler_so.index('-isysroot')
# Strip this argument and the next one:
del compiler_so[index:index+2]
except ValueError:
break
# Check if the SDK that is used during compilation actually exists,
# the universal build requires the usage of a universal SDK and not all
# users have that installed by default.
sysroot = None
if '-isysroot' in cc_args:
idx = cc_args.index('-isysroot')
sysroot = cc_args[idx+1]
elif '-isysroot' in compiler_so:
idx = compiler_so.index('-isysroot')
sysroot = compiler_so[idx+1]
if sysroot and not os.path.isdir(sysroot):
from distutils import log
log.warn("Compiling with an SDK that doesn't seem to exist: %s",
sysroot)
log.warn("Please check your Xcode installation")
return compiler_so
def customize_config_vars(_config_vars):
"""Customize Python build configuration variables.
Called internally from sysconfig with a mutable mapping
containing name/value pairs parsed from the configured
makefile used to build this interpreter. Returns
the mapping updated as needed to reflect the environment
in which the interpreter is running; in the case of
a Python from a binary installer, the installed
environment may be very different from the build
environment, i.e. different OS levels, different
built tools, different available CPU architectures.
This customization is performed whenever
distutils.sysconfig.get_config_vars() is first
called. It may be used in environments where no
compilers are present, i.e. when installing pure
Python dists. Customization of compiler paths
and detection of unavailable archs is deferred
until the first extention module build is
requested (in distutils.sysconfig.customize_compiler).
Currently called from distutils.sysconfig
"""
if not _supports_universal_builds():
# On Mac OS X before 10.4, check if -arch and -isysroot
# are in CFLAGS or LDFLAGS and remove them if they are.
# This is needed when building extensions on a 10.3 system
# using a universal build of python.
_remove_universal_flags(_config_vars)
# Allow user to override all archs with ARCHFLAGS env var
_override_all_archs(_config_vars)
# Remove references to sdks that are not found
_check_for_unavailable_sdk(_config_vars)
return _config_vars
def customize_compiler(_config_vars):
"""Customize compiler path and configuration variables.
This customization is performed when the first
extension module build is requested
in distutils.sysconfig.customize_compiler).
"""
# Find a compiler to use for extension module builds
_find_appropriate_compiler(_config_vars)
# Remove ppc arch flags if not supported here
_remove_unsupported_archs(_config_vars)
# Allow user to override all archs with ARCHFLAGS env var
_override_all_archs(_config_vars)
return _config_vars
def get_platform_osx(_config_vars, osname, release, machine):
"""Filter values for get_platform()"""
# called from get_platform() in sysconfig and distutils.util
#
# For our purposes, we'll assume that the system version from
# distutils' perspective is what MACOSX_DEPLOYMENT_TARGET is set
# to. This makes the compatibility story a bit more sane because the
# machine is going to compile and link as if it were
# MACOSX_DEPLOYMENT_TARGET.
macver = _config_vars.get('MACOSX_DEPLOYMENT_TARGET', '')
macrelease = _get_system_version() or macver
macver = macver or macrelease
if macver:
release = macver
osname = "macosx"
# Use the original CFLAGS value, if available, so that we
# return the same machine type for the platform string.
# Otherwise, distutils may consider this a cross-compiling
# case and disallow installs.
cflags = _config_vars.get(_INITPRE+'CFLAGS',
_config_vars.get('CFLAGS', ''))
if ((macrelease + '.') >= '10.4.' and
'-arch' in cflags.strip()):
# The universal build will build fat binaries, but not on
# systems before 10.4
machine = 'fat'
archs = re.findall('-arch\s+(\S+)', cflags)
archs = tuple(sorted(set(archs)))
if len(archs) == 1:
machine = archs[0]
elif archs == ('i386', 'ppc'):
machine = 'fat'
elif archs == ('i386', 'x86_64'):
machine = 'intel'
elif archs == ('i386', 'ppc', 'x86_64'):
machine = 'fat3'
elif archs == ('ppc64', 'x86_64'):
machine = 'fat64'
elif archs == ('i386', 'ppc', 'ppc64', 'x86_64'):
machine = 'universal'
else:
raise ValueError(
"Don't know machine value for archs=%r" % (archs,))
elif machine == 'i386':
# On OSX the machine type returned by uname is always the
# 32-bit variant, even if the executable architecture is
# the 64-bit variant
if sys.maxsize >= 2**32:
machine = 'x86_64'
elif machine in ('PowerPC', 'Power_Macintosh'):
# Pick a sane name for the PPC architecture.
# See 'i386' case
if sys.maxsize >= 2**32:
machine = 'ppc64'
else:
machine = 'ppc'
return (osname, release, machine)
| lfcnassif/MultiContentViewer | release/modules/ext/libreoffice/program/python-core-3.3.0/lib/_osx_support.py | Python | lgpl-3.0 | 18,472 | 0.001462 |
# Dummy calls for representing openings
def pgmtoppm(atlas_slice):
result = atlas_slice[:-3] + "ppm"
with open(atlas_slice, "rb") as aslice, \
open(result, "w") as gif:
gif.write(atlas_slice + ".ppm")
return result
def pnmtojpeg(ppm_slice):
result = ppm_slice[:-3] + "jpg"
with open(ppm_slice, "rb") as aslice, \
open(result, "w") as gif:
gif.write(ppm_slice + ".jpg")
return result
| gems-uff/noworkflow | capture/noworkflow/resources/demo/2016_ipaw_paper/step4/convert.py | Python | mit | 449 | 0.002227 |
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""gRPC debug server in Python."""
# pylint: disable=g-bad-import-order
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import json
import threading
import time
from concurrent import futures
import grpc
from six.moves import queue
from tensorflow.core.debug import debug_service_pb2
from tensorflow.core.framework import graph_pb2
from tensorflow.python.debug.lib import debug_data
from tensorflow.python.debug.lib import debug_service_pb2_grpc
from tensorflow.python.platform import tf_logging as logging
DebugWatch = collections.namedtuple("DebugWatch",
["node_name", "output_slot", "debug_op"])
def _watch_key_event_reply(new_state, node_name, output_slot, debug_op):
"""Make `EventReply` proto to represent a request to watch/unwatch a debug op.
Args:
new_state: (`debug_service_pb2.EventReply.DebugOpStateChange.State`) the new
state to set the debug node to, i.e., whether the debug node will become
disabled under the grpc mode (`DISABLED`), become a watchpoint
(`READ_ONLY`) or become a breakpoint (`READ_WRITE`).
node_name: (`str`) name of the node.
output_slot: (`int`) output slot of the tensor.
debug_op: (`str`) the debug op attached to node_name:output_slot tensor to
watch or unwatch.
Returns:
An EventReply proto.
"""
event_reply = debug_service_pb2.EventReply()
state_change = event_reply.debug_op_state_changes.add()
state_change.state = new_state
state_change.node_name = node_name
state_change.output_slot = output_slot
state_change.debug_op = debug_op
return event_reply
class EventListenerBaseStreamHandler(object):
"""Per-stream handler of EventListener gRPC streams."""
def __init__(self):
"""Constructor of EventListenerBaseStreamHandler."""
def on_core_metadata_event(self, event):
"""Callback for core metadata.
Args:
event: The Event proto that carries a JSON string in its
`log_message.message` field.
"""
raise NotImplementedError(
"on_core_metadata_event() is not implemented in the base servicer "
"class")
def on_graph_def(self, graph_def, device_name, wall_time):
"""Callback for Event proto received through the gRPC stream.
This Event proto carries a GraphDef, encoded as bytes, in its graph_def
field.
Args:
graph_def: A GraphDef object.
device_name: Name of the device on which the graph was created.
wall_time: An epoch timestamp (in microseconds) for the graph.
"""
raise NotImplementedError(
"on_graph_def() is not implemented in the base servicer class")
def on_value_event(self, event):
"""Callback for Event proto received through the gRPC stream.
This Event proto carries a Tensor in its summary.value[0] field.
Args:
event: The Event proto from the stream to be processed.
"""
raise NotImplementedError(
"on_value_event() is not implemented in the base servicer class")
class EventListenerBaseServicer(debug_service_pb2_grpc.EventListenerServicer):
"""Base Python class for gRPC debug server."""
def __init__(self, server_port, stream_handler_class):
"""Constructor.
Args:
server_port: (int) Port number to bind to.
stream_handler_class: A class of the base class
`EventListenerBaseStreamHandler` that will be used to constructor
stream handler objects during `SendEvents` calls.
"""
self._server_port = server_port
self._stream_handler_class = stream_handler_class
self._server_lock = threading.Lock()
self._server_started = False
self._stop_requested = False
self._event_reply_queue = queue.Queue()
self._gated_grpc_debug_watches = set()
self._breakpoints = set()
def SendEvents(self, request_iterator, context):
"""Implementation of the SendEvents service method.
This method receives streams of Event protos from the client, and processes
them in ways specified in the on_event() callback. The stream is
bi-directional, but currently only the client-to-server stream (i.e., the
stream from the debug ops to the server) is used.
Args:
request_iterator: The incoming stream of Event protos.
context: Server context.
Raises:
ValueError: If there are more than one core metadata events.
Yields:
An empty stream of responses.
"""
core_metadata_count = 0
# A map from GraphDef hash to a list of received chunks.
graph_def_chunks = {}
tensor_chunks = {}
stream_handler = None
for event in request_iterator:
if not stream_handler:
stream_handler = self._stream_handler_class()
if event.graph_def:
maybe_graph_def, maybe_device_name, maybe_wall_time = (
self._process_encoded_graph_def_in_chunks(event, graph_def_chunks))
if maybe_graph_def:
stream_handler.on_graph_def(
maybe_graph_def, maybe_device_name, maybe_wall_time)
elif event.log_message.message:
core_metadata_count += 1
if core_metadata_count > 1:
raise ValueError(
"Expected one core metadata event; received multiple")
stream_handler.on_core_metadata_event(event)
elif event.summary and event.summary.value:
maybe_tensor_event = self._process_tensor_event_in_chunks(
event, tensor_chunks)
if maybe_tensor_event:
event_reply = stream_handler.on_value_event(maybe_tensor_event)
if event_reply is not None:
yield event_reply
# The server writes EventReply messages, if any.
while not self._event_reply_queue.empty():
event_reply = self._event_reply_queue.get()
for state_change in event_reply.debug_op_state_changes:
if (state_change.state ==
debug_service_pb2.EventReply.DebugOpStateChange.READ_WRITE):
logging.info("Adding breakpoint %s:%d:%s", state_change.node_name,
state_change.output_slot, state_change.debug_op)
self._breakpoints.add(
(state_change.node_name, state_change.output_slot,
state_change.debug_op))
elif (state_change.state ==
debug_service_pb2.EventReply.DebugOpStateChange.DISABLED):
logging.info("Removing watchpoint or breakpoint: %s:%d:%s",
state_change.node_name, state_change.output_slot,
state_change.debug_op)
self._breakpoints.discard(
(state_change.node_name, state_change.output_slot,
state_change.debug_op))
yield event_reply
def _process_tensor_event_in_chunks(self, event, tensor_chunks):
"""Possibly reassemble event chunks.
Due to gRPC's message size limit, a large tensor can be encapsulated in
multiple Event proto chunks to be sent through the debugger stream. This
method keeps track of the chunks that have arrived, reassemble all chunks
corresponding to a tensor when they have arrived and return the reassembled
Event proto.
Args:
event: The single Event proto that has arrived.
tensor_chunks: A dict used to keep track of the Event protos that have
arrived but haven't been reassembled.
Returns:
If all Event protos corresponding to a tensor have arrived, returns the
reassembled Event proto. Otherwise, return None.
"""
value = event.summary.value[0]
debugger_plugin_metadata = json.loads(value.metadata.plugin_data.content)
device_name = debugger_plugin_metadata["device"]
num_chunks = debugger_plugin_metadata["numChunks"]
chunk_index = debugger_plugin_metadata["chunkIndex"]
if num_chunks <= 1:
return event
debug_node_name = value.node_name
timestamp = int(event.wall_time)
tensor_key = "%s_%s_%d" % (device_name, debug_node_name, timestamp)
if tensor_key not in tensor_chunks:
tensor_chunks[tensor_key] = [None] * num_chunks
chunks = tensor_chunks[tensor_key]
if value.tensor.tensor_content:
chunks[chunk_index] = value.tensor
elif value.tensor.string_val:
chunks[chunk_index] = event
if None not in chunks:
if value.tensor.tensor_content:
event.summary.value[0].tensor.tensor_content = b"".join(
chunk.tensor_content for chunk in chunks)
del tensor_chunks[tensor_key]
return event
elif value.tensor.string_val:
merged_event = chunks[0]
for chunk in chunks[1:]:
merged_event.summary.value[0].tensor.string_val.extend(
list(chunk.summary.value[0].tensor.string_val))
return merged_event
def _process_encoded_graph_def_in_chunks(self,
event,
graph_def_chunks):
"""Process an Event proto containing a chunk of encoded GraphDef.
Args:
event: the Event proto containing the chunk of encoded GraphDef.
graph_def_chunks: A dict mapping keys for GraphDefs (i.e.,
"<graph_def_hash>,<device_name>,<wall_time>") to a list of chunks of
encoded GraphDefs.
Returns:
If all chunks of the GraphDef have arrived,
return decoded GraphDef proto, device name, wall_time.
Otherwise,
return None, None, None.
"""
graph_def = graph_pb2.GraphDef()
index_bar_0 = event.graph_def.find(b"|")
index_bar_1 = event.graph_def.find(b"|", index_bar_0 + 1)
index_bar_2 = event.graph_def.find(b"|", index_bar_1 + 1)
graph_def_hash_device_timestamp = event.graph_def[:index_bar_0]
chunk_index = int(event.graph_def[index_bar_0 + 1 : index_bar_1])
num_chunks = int(event.graph_def[index_bar_1 + 1 : index_bar_2])
if graph_def_hash_device_timestamp not in graph_def_chunks:
graph_def_chunks[graph_def_hash_device_timestamp] = [None] * num_chunks
graph_def_chunks[graph_def_hash_device_timestamp][
chunk_index] = event.graph_def[index_bar_2 + 1:]
if all(graph_def_chunks[graph_def_hash_device_timestamp]):
device_name = graph_def_hash_device_timestamp.split(b",")[1]
wall_time = int(graph_def_hash_device_timestamp.split(b",")[2])
graph_def.ParseFromString(
b"".join(graph_def_chunks[graph_def_hash_device_timestamp]))
del graph_def_chunks[graph_def_hash_device_timestamp]
self._process_graph_def(graph_def)
return graph_def, device_name, wall_time
else:
return None, None, None
def _process_graph_def(self, graph_def):
for node_def in graph_def.node:
if (debug_data.is_debug_node(node_def.name) and
node_def.attr["gated_grpc"].b):
node_name, output_slot, _, debug_op = (
debug_data.parse_debug_node_name(node_def.name))
self._gated_grpc_debug_watches.add(
DebugWatch(node_name, output_slot, debug_op))
def run_server(self, blocking=True):
"""Start running the server.
Args:
blocking: If `True`, block until `stop_server()` is invoked.
Raises:
ValueError: If server stop has already been requested, or if the server
has already started running.
"""
self._server_lock.acquire()
try:
if self._stop_requested:
raise ValueError("Server has already stopped")
if self._server_started:
raise ValueError("Server has already started running")
self.server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
debug_service_pb2_grpc.add_EventListenerServicer_to_server(self,
self.server)
self.server.add_insecure_port("[::]:%d" % self._server_port)
self.server.start()
self._server_started = True
finally:
self._server_lock.release()
if blocking:
while not self._stop_requested:
time.sleep(1.0)
def stop_server(self, grace=1.0):
"""Request server stopping.
Once stopped, server cannot be stopped or started again. This method is
non-blocking. Call `wait()` on the returned event to block until the server
has completely stopped.
Args:
grace: Grace period in seconds to be used when calling `server.stop()`.
Raises:
ValueError: If server stop has already been requested, or if the server
has not started running yet.
Returns:
A threading.Event that will be set when the server has completely stopped.
"""
self._server_lock.acquire()
try:
if not self._server_started:
raise ValueError("Server has not started running")
if self._stop_requested:
raise ValueError("Server has already stopped")
self._stop_requested = True
return self.server.stop(grace=grace)
finally:
self._server_lock.release()
def request_watch(self, node_name, output_slot, debug_op, breakpoint=False):
"""Request enabling a debug tensor watchpoint or breakpoint.
This will let the server send a EventReply to the client side
(i.e., the debugged TensorFlow runtime process) to request adding a watch
key (i.e., <node_name>:<output_slot>:<debug_op>) to the list of enabled
watch keys. The list applies only to debug ops with the attribute
gated_grpc=True.
The request will take effect on the next debugged `Session.run()` call.
To disable the watch, use `request_unwatch()`.
Args:
node_name: (`str`) name of the node that the to-be-watched tensor belongs
to, e.g., "hidden/Weights".
output_slot: (`int`) output slot index of the tensor to watch.
debug_op: (`str`) name of the debug op to enable. This should not include
any attribute substrings.
breakpoint: (`bool`) Iff `True`, the debug op will block and wait until it
receives an `EventReply` response from the server. The `EventReply`
proto may carry a TensorProto that modifies the value of the debug op's
output tensor.
"""
self._event_reply_queue.put(
_watch_key_event_reply(
debug_service_pb2.EventReply.DebugOpStateChange.READ_WRITE
if breakpoint else debug_service_pb2.EventReply.DebugOpStateChange.
READ_ONLY, node_name, output_slot, debug_op))
def request_unwatch(self, node_name, output_slot, debug_op):
"""Request disabling a debug tensor watchpoint or breakpoint.
The request will take effect on the next debugged `Session.run()` call.
This is the opposite of `request_watch()`.
Args:
node_name: (`str`) name of the node that the to-be-watched tensor belongs
to, e.g., "hidden/Weights".
output_slot: (`int`) output slot index of the tensor to watch.
debug_op: (`str`) name of the debug op to enable. This should not include
any attribute substrings.
"""
self._event_reply_queue.put(
_watch_key_event_reply(debug_service_pb2.EventReply.DebugOpStateChange.
DISABLED, node_name, output_slot, debug_op))
@property
def breakpoints(self):
"""Get a set of the currently-activated breakpoints.
Returns:
A `set` of 3-tuples: (node_name, output_slot, debug_op), e.g.,
{("MatMul", 0, "DebugIdentity")}.
"""
return self._breakpoints
def gated_grpc_debug_watches(self):
"""Get the list of debug watches with attribute gated_grpc=True.
Since the server receives `GraphDef` from the debugged runtime, it can only
return such debug watches that it has received so far.
Returns:
A `list` of `DebugWatch` `namedtuples` representing the debug watches with
gated_grpc=True. Each `namedtuple` element has the attributes:
`node_name` as a `str`,
`output_slot` as an `int`,
`debug_op` as a `str`.
"""
return list(self._gated_grpc_debug_watches)
| pavelchristof/gomoku-ai | tensorflow/python/debug/lib/grpc_debug_server.py | Python | apache-2.0 | 16,574 | 0.004827 |
"""ShutIt module. See http://shutit.tk
"""
from shutit_module import ShutItModule
class less(ShutItModule):
def build(self, shutit):
shutit.send('mkdir -p /tmp/build/less')
shutit.send('cd /tmp/build/less')
shutit.send('wget -qO- http://www.greenwoodsoftware.com/less/less-458.tar.gz | tar -zxf -')
shutit.send('cd less*')
shutit.send('./configure --prefix=/usr --sysconfdir=/etc')
shutit.send('make')
shutit.send('make install')
return True
#def get_config(self, shutit):
# shutit.get_config(self.module_id,'item','default')
# return True
def finalize(self, shutit):
shutit.send('rm -rf /tmp/build/less')
return True
#def remove(self, shutit):
# return True
#def test(self, shutit):
# return True
def module():
return less(
'shutit.tk.sd.less.less', 158844782.0056,
description='',
maintainer='',
depends=['shutit.tk.setup']
)
| ianmiell/shutit-distro | less/less.py | Python | gpl-2.0 | 877 | 0.039909 |
# Opus/UrbanSim urban simulation software.
# Copyright (C) 2005-2009 University of Washington
# See opus_core/LICENSE
from opus_core.opus_package import OpusPackage
class package(OpusPackage):
name = 'psrc_parcel'
required_opus_packages = ["opus_core", "opus_emme2", "urbansim", "urbansim_parcel"]
| christianurich/VIBe2UrbanSim | 3rdparty/opus/src/psrc_parcel/opus_package_info.py | Python | gpl-2.0 | 318 | 0.009434 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from __future__ import absolute_import
from setuptools import setup, find_packages
import codecs
import os
import re
import sys
def read(*parts):
path = os.path.join(os.path.dirname(__file__), *parts)
with codecs.open(path, encoding='utf-8') as fobj:
return fobj.read()
def find_version(*file_paths):
version_file = read(*file_paths)
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", version_file, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError("Unable to find version string.")
install_requires = [
'docker-compose >= 1.6.0, < 1.8'
]
setup(
name='tug',
version=find_version("tug", "__init__.py"),
description='Describe your infrastructure with yaml files',
url='https://github.com/metocean/tugboat-py',
author='MetOcean Solutions Ltd.',
author_email='[email protected]',
license='Apache License 2.0',
packages=find_packages(exclude=['tests.*', 'tests']),
include_package_data=True,
install_requires=install_requires,
entry_points={
'console_scripts': [
'tug=tug.main:main',
],
},
)
| metocean/tugboat-py | setup.py | Python | apache-2.0 | 1,239 | 0.003228 |
import re
import socket
import threading
import time
from chat.message import Message
from chat.user import User
from interfaces.chat import Chat
class TwitchChat(Chat):
host = 'irc.twitch.tv'
port = 6667
rate = 1.5
def __init__(self, username, passwd, channel):
'''
Creates IRC client for Twitch chat at specified channel.
:param username: Username to log in with.
:param passwd: Password to authenticate with.
:param channel: Channel to connect to.
'''
if not channel.startswith('#'):
channel = '#' + channel
self._nick = username
self._pass = passwd
self._chan = channel
self._sock = None
self._sock_lock = threading.Lock()
self.__connect()
def __sock_send(self, message, encoding='utf-8'):
'''
Sends message over socket in bytes format.
:param message: Message to send.
:param encoding: Encoding of string.
'''
with self._sock_lock:
self._sock.send(bytes(message, encoding))
def __authenticate(self, auth_type, authentication):
'''
Sends authentication message to server.
:param auth_type: One of three types: NICK, PASS, or JOIN
:param authentication: Content corresponding to auth_type.
'''
if auth_type not in ('NICK', 'PASS', 'JOIN'):
raise ValueError('Invalid auth type.')
message = '{} {}\r\n'.format(auth_type, authentication)
self.__sock_send(message)
def __connect(self):
'''
Connects client to server.
:param replace_current_socket: Whether or not to dispose of the current socket.
'''
if self._sock:
self.close()
self._sock = socket.socket()
self._sock.setblocking(True)
self._sock.connect((TwitchChat.host, TwitchChat.port))
self.__authenticate('PASS', self._pass)
self.__authenticate('NICK', self._nick)
self.__authenticate('JOIN', self._chan)
def __get_raw_message(self, timeout):
'''
Gets a UTF-8 decoded message from the server, responding to pings as needed
'''
while timeout:
raw_message = self._sock.recv(1024).decode('utf-8')
if raw_message.startswith('PING'):
self.__sock_send(raw_message.replace('PING', 'PONG'))
print('Ping received.')
else:
return raw_message
def _parse_message(raw_message):
'''
Parses raw message from server and returns a Message object.
:param raw_message: UTF-8 encoded message from server.
'''
result = re.search('^:(\\w+)!\\w+@[\\w.]+ [A-Z]+ #\\w+ :(.+)\\r\\n', raw_message)
if not result:
return
author, content = result.groups()
author = User(author)
return Message(author, content)
def send_message(self, content, max_attempts=2):
'''
Sends message to server and sleeps.
:param content: The message to send.
:param max_attempts: The maximum number of failed attempts to allow when sending message.
'''
message = 'PRIVMSG {} :{}\r\n'.format(self._chan, content)
for _ in range(max_attempts):
try:
self.__sock_send(message)
time.sleep(TwitchChat.rate)
break
except socket.error:
self.__connect() # re-establish connection and try again
def get_message(self, timeout=-1):
'''
Returns next message from server.
'''
start = time.time()
no_timeout = timeout < 0
while no_timeout or (time.time() - start) < timeout:
try:
raw_message = self.__get_raw_message(timeout)
message = TwitchChat._parse_message(raw_message)
if message:
return message
except socket.error:
self.__connect()
except ValueError:
pass
def close(self):
'''
Closes server connection.
'''
self._sock.shutdown(socket.SHUT_RDWR)
self._sock.close() | jk977/twitch-plays | bot/chat/twitchchat.py | Python | gpl-3.0 | 4,289 | 0.002098 |
# markdown is released under the BSD license
# Copyright 2007, 2008 The Python Markdown Project (v. 1.7 and later)
# Copyright 2004, 2005, 2006 Yuri Takhteyev (v. 0.2-1.6b)
# Copyright 2004 Manfred Stienstra (the original version)
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the <organization> nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE PYTHON MARKDOWN PROJECT ''AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL ANY CONTRIBUTORS TO THE PYTHON MARKDOWN PROJECT
# BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import unicode_literals
from __future__ import absolute_import
from . import util
from copy import deepcopy
def iteritems_compat(d):
"""Return an iterator over the (key, value) pairs of a dictionary.
Copied from `six` module."""
return iter(getattr(d, _iteritems)())
class OrderedDict(dict):
"""
A dictionary that keeps its keys in the order in which they're inserted.
Copied from Django's SortedDict with some modifications.
"""
def __new__(cls, *args, **kwargs):
instance = super(OrderedDict, cls).__new__(cls, *args, **kwargs)
instance.keyOrder = []
return instance
def __init__(self, data=None):
if data is None or isinstance(data, dict):
data = data or []
super(OrderedDict, self).__init__(data)
self.keyOrder = list(data) if data else []
else:
super(OrderedDict, self).__init__()
super_set = super(OrderedDict, self).__setitem__
for key, value in data:
# Take the ordering from first key
if key not in self:
self.keyOrder.append(key)
# But override with last value in data (dict() does this)
super_set(key, value)
def __deepcopy__(self, memo):
return self.__class__([(key, deepcopy(value, memo))
for key, value in self.items()])
def __copy__(self):
# The Python's default copy implementation will alter the state
# of self. The reason for this seems complex but is likely related to
# subclassing dict.
return self.copy()
def __setitem__(self, key, value):
if key not in self:
self.keyOrder.append(key)
super(OrderedDict, self).__setitem__(key, value)
def __delitem__(self, key):
super(OrderedDict, self).__delitem__(key)
self.keyOrder.remove(key)
def __iter__(self):
return iter(self.keyOrder)
def __reversed__(self):
return reversed(self.keyOrder)
def pop(self, k, *args):
result = super(OrderedDict, self).pop(k, *args)
try:
self.keyOrder.remove(k)
except ValueError:
# Key wasn't in the dictionary in the first place. No problem.
pass
return result
def popitem(self):
result = super(OrderedDict, self).popitem()
self.keyOrder.remove(result[0])
return result
def _iteritems(self):
for key in self.keyOrder:
yield key, self[key]
def _iterkeys(self):
for key in self.keyOrder:
yield key
def _itervalues(self):
for key in self.keyOrder:
yield self[key]
if util.PY3:
items = _iteritems
keys = _iterkeys
values = _itervalues
else:
iteritems = _iteritems
iterkeys = _iterkeys
itervalues = _itervalues
def items(self):
return [(k, self[k]) for k in self.keyOrder]
def keys(self):
return self.keyOrder[:]
def values(self):
return [self[k] for k in self.keyOrder]
def update(self, dict_):
for k, v in iteritems_compat(dict_):
self[k] = v
def setdefault(self, key, default):
if key not in self:
self.keyOrder.append(key)
return super(OrderedDict, self).setdefault(key, default)
def value_for_index(self, index):
"""Returns the value of the item at the given zero-based index."""
return self[self.keyOrder[index]]
def insert(self, index, key, value):
"""Inserts the key, value pair before the item with the given index."""
if key in self.keyOrder:
n = self.keyOrder.index(key)
del self.keyOrder[n]
if n < index:
index -= 1
self.keyOrder.insert(index, key)
super(OrderedDict, self).__setitem__(key, value)
def copy(self):
"""Returns a copy of this object."""
# This way of initializing the copy means it works for subclasses, too.
return self.__class__(self)
def __repr__(self):
"""
Replaces the normal dict.__repr__ with a version that returns the keys
in their Ordered order.
"""
return '{%s}' % ', '.join(['%r: %r' % (k, v) for k, v in iteritems_compat(self)])
def clear(self):
super(OrderedDict, self).clear()
self.keyOrder = []
def index(self, key):
""" Return the index of a given key. """
try:
return self.keyOrder.index(key)
except ValueError:
raise ValueError("Element '%s' was not found in OrderedDict" % key)
def index_for_location(self, location):
""" Return index or None for a given location. """
if location == '_begin':
i = 0
elif location == '_end':
i = None
elif location.startswith('<') or location.startswith('>'):
i = self.index(location[1:])
if location.startswith('>'):
if i >= len(self):
# last item
i = None
else:
i += 1
else:
raise ValueError('Not a valid location: "%s". Location key '
'must start with a ">" or "<".' % location)
return i
def add(self, key, value, location):
""" Insert by key location. """
i = self.index_for_location(location)
if i is not None:
self.insert(i, key, value)
else:
self.__setitem__(key, value)
def link(self, key, location):
""" Change location of an existing item. """
n = self.keyOrder.index(key)
del self.keyOrder[n]
try:
i = self.index_for_location(location)
if i is not None:
self.keyOrder.insert(i, key)
else:
self.keyOrder.append(key)
except Exception as e:
# restore to prevent data loss and reraise
self.keyOrder.insert(n, key)
raise e
| Teamxrtc/webrtc-streaming-node | third_party/webrtc/src/chromium/src/third_party/markdown/odict.py | Python | mit | 7,934 | 0.001008 |
a = "hello"
a += " "
a += "world"
print a
| jplevyak/pyc | tests/t39.py | Python | bsd-3-clause | 42 | 0 |
from django.forms import ModelForm, ModelChoiceField
from django.utils.translation import ugettext_lazy as _
from apps.task.models import Task
class FormChoiceField(ModelChoiceField):
def label_from_instance(self, obj):
return obj.name
class TaskForm(ModelForm):
"""
Task form used to add or update a task in the Chronos platform.
TODO: Develop this form
"""
parenttask = FormChoiceField(
queryset=Task.objects.all().order_by('name'),
empty_label=_('Please select an option'),
required=False,
)
class Meta:
model = Task
fields = ['name', 'description', 'comments', 'price', 'parenttask', 'is_visible']
| hgpestana/chronos | apps/task/forms.py | Python | mit | 630 | 0.025397 |
"""Tests for functions and classes in data/processing.py."""
import glob
import os
from absl.testing import absltest
import heatnet.data.processing as hdp
import heatnet.file_util as file_util
import heatnet.test.test_util as test_util
import numpy as np
import xarray as xr
class CDSPreprocessorTest(absltest.TestCase):
"""Tests for CDSPreprocesor."""
def test_init(self):
"""Tests CDSPreprocessor initialization."""
with file_util.mkdtemp() as tmp_dir:
data_paths = [
os.path.join(tmp_dir, 'temp_data.nc'),
os.path.join(tmp_dir, 'temp_data_2.nc')
]
proc_path = os.path.join(tmp_dir, 'temp_proc_data.nc')
variables = ['swvl1', 't2m']
for path, var in zip(data_paths, variables):
test_util.write_dummy_dataset(path, var)
pp = hdp.CDSPreprocessor(data_paths, base_out_path=proc_path, mode='ext')
self.assertEqual(pp.raw_files, data_paths)
self.assertEqual(pp.base_out_path, proc_path)
self.assertEqual(pp.lead_times, [1])
self.assertEqual(pp.past_times, [0])
pp.close()
pp = hdp.CDSPreprocessor(
data_paths[0], base_out_path=proc_path, mode='ext')
self.assertEqual(pp.raw_files, data_paths[0])
self.assertEqual(pp.base_out_path, proc_path)
self.assertEqual(pp.lead_times, [1])
self.assertEqual(pp.past_times, [0])
pp.close()
for path in data_paths:
os.remove(path)
def test_raw_to_batched_samples(self):
"""Tests default raw_to_batched_samples call."""
tol = 1.0e-4
with file_util.mkdtemp() as tmp_dir:
path = os.path.join(tmp_dir, 'temp_data.nc')
proc_path = os.path.join(tmp_dir, 'temp_proc_data.nc')
proc_path1 = os.path.join(tmp_dir, 'temp_proc_data.000000.nc')
test_util.write_dummy_dataset(path, 'swvl1')
pp = hdp.CDSPreprocessor(path, base_out_path=proc_path, mode='ext')
pp.raw_to_batched_samples()
self.assertEqual(pp.pred_varlev_time, ['swvl1/0'])
self.assertEqual(pp.tgt_varlev_time, ['swvl1/0/+1D'])
with xr.open_dataset(path) as ds, xr.open_dataset(proc_path1) as proc_ds:
self.assertTrue(
np.allclose(
ds.isel(time=0).swvl1.values,
proc_ds.isel(sample=0).sel(
pred_varlev='swvl1/0').predictors.values,
rtol=tol,
atol=tol))
os.remove(path)
for f in glob.glob(os.path.join(tmp_dir, 'temp_proc*')):
os.remove(f)
pp.close()
def test_offsets(self):
"""Tests correctness of time offsets from raw to processed data."""
tol = 1.0e-4
with file_util.mkdtemp() as tmp_dir:
data_paths = [
os.path.join(tmp_dir, 'temp_data.nc'),
os.path.join(tmp_dir, 'temp_data_3.nc'),
os.path.join(tmp_dir, 'temp_data_2.nc'),
]
variables = ['t2m', 'swvl1', 't2m_anom']
proc_path_1 = os.path.join(tmp_dir, 'temp_proc_data.000000.nc')
for path, var in zip(data_paths, variables):
test_util.write_dummy_dataset(path, var)
pp = hdp.CDSPreprocessor(
data_paths,
past_times=[1, 2],
lead_times=[1, 2],
base_out_path=os.path.join(tmp_dir, 'temp_proc_data.nc'),
mode='ext')
pp.raw_to_batched_samples()
with xr.open_dataset(proc_path_1) as proc_ds:
with xr.open_dataset(data_paths[0]) as ds:
# First possible target with lead time = 2
raw_data_slice = (ds.isel(time=4).t2m.values)
tgt_data_slice = (
proc_ds.sel(tgt_varlev='t2m/0/+1D').isel(sample=1).targets.values)
tgt2_data_slice = (
proc_ds.sel(tgt_varlev='t2m/0/+2D').isel(sample=0).targets.values)
pred0_data_slice = (
proc_ds.sel(pred_varlev='t2m/0').isel(sample=2).predictors.values)
pred1_data_slice = (
proc_ds.sel(pred_varlev='t2m/0/-1D').isel(
sample=3).predictors.values)
pred2_data_slice = (
proc_ds.sel(pred_varlev='t2m/0/-2D').isel(
sample=4).predictors.values)
self.assertTrue(
np.allclose(raw_data_slice, tgt_data_slice, rtol=tol, atol=tol))
self.assertTrue(
np.allclose(raw_data_slice, tgt2_data_slice, rtol=tol, atol=tol))
self.assertTrue(
np.allclose(raw_data_slice, pred0_data_slice, rtol=tol, atol=tol))
self.assertTrue(
np.allclose(raw_data_slice, pred1_data_slice, rtol=tol, atol=tol))
self.assertTrue(
np.allclose(raw_data_slice, pred2_data_slice, rtol=tol, atol=tol))
self.assertEqual(ds.time.values[2], proc_ds.sample.values[0])
with xr.open_dataset(data_paths[2]) as ds:
# First possible target with lead time = 2
raw_data_slice = (ds.isel(time=4).t2m_anom.values)
tgt_data_slice = (
proc_ds.sel(tgt_varlev='t2m_anom/0/+1D').isel(
sample=1).targets.values)
tgt2_data_slice = (
proc_ds.sel(tgt_varlev='t2m_anom/0/+2D').isel(
sample=0).targets.values)
pred0_data_slice = (
proc_ds.sel(pred_varlev='t2m_anom/0').isel(
sample=2).predictors.values)
pred1_data_slice = (
proc_ds.sel(pred_varlev='t2m_anom/0/-1D').isel(
sample=3).predictors.values)
pred2_data_slice = (
proc_ds.sel(pred_varlev='t2m_anom/0/-2D').isel(
sample=4).predictors.values)
self.assertTrue(
np.allclose(raw_data_slice, tgt_data_slice, rtol=tol, atol=tol))
self.assertTrue(
np.allclose(raw_data_slice, tgt2_data_slice, rtol=tol, atol=tol))
self.assertTrue(
np.allclose(raw_data_slice, pred0_data_slice, rtol=tol, atol=tol))
self.assertTrue(
np.allclose(raw_data_slice, pred1_data_slice, rtol=tol, atol=tol))
self.assertTrue(
np.allclose(raw_data_slice, pred2_data_slice, rtol=tol, atol=tol))
pp.close()
for path in data_paths:
os.remove(path)
for f in glob.glob(os.path.join(tmp_dir, 'temp_proc*')):
os.remove(f)
def test_mean_std_recovery(self):
"""Tests recovery of dimensional data from processed normalized data."""
tol = 1.0e-4
with file_util.mkdtemp() as tmp_dir:
data_paths = [
os.path.join(tmp_dir, 'temp_data.nc'),
os.path.join(tmp_dir, 'temp_data_3.nc'),
os.path.join(tmp_dir, 'temp_data_2.nc'),
]
variables = ['t2m', 'swvl1', 't2m_anom']
proc_path_1 = os.path.join(tmp_dir, 'temp_proc_data.000000.nc')
for path, var in zip(data_paths, variables):
test_util.write_dummy_dataset(path, var)
pp = hdp.CDSPreprocessor(
data_paths,
base_out_path=os.path.join(tmp_dir, 'temp_proc_data.nc'),
past_times=[1, 2],
lead_times=[1, 2],
mode='ext')
pp.raw_to_batched_samples(scale_variables=True)
with xr.open_dataset(proc_path_1) as proc_ds:
with xr.open_dataset(os.path.join(
tmp_dir, 'temp_proc_data.scales.nc')) as scale_ds:
with xr.open_dataset(data_paths[1]) as ds:
raw_values = ds.isel(time=4).swvl1.values
proc_values = proc_ds.isel(sample=2).sel(
pred_varlev='swvl1/0').predictors.values
proc_scaled_values = np.add(
np.multiply(
proc_values,
scale_ds.sel(pred_varlev='swvl1/0').pred_std.values),
scale_ds.sel(pred_varlev='swvl1/0').pred_mean.values)
self.assertTrue(
np.allclose(raw_values, proc_scaled_values, rtol=tol, atol=tol))
proc_values = proc_ds.isel(sample=4).sel(
pred_varlev='swvl1/0/-2D').predictors.values
proc_scaled_values = np.add(
np.multiply(
proc_values,
scale_ds.sel(pred_varlev='swvl1/0').pred_std.values),
scale_ds.sel(pred_varlev='swvl1/0').pred_mean.values)
self.assertTrue(
np.allclose(raw_values, proc_scaled_values, rtol=tol, atol=tol))
with xr.open_dataset(data_paths[2]) as ds:
raw_values = ds.isel(time=4).t2m_anom.values
proc_values = proc_ds.isel(sample=2).sel(
pred_varlev='t2m_anom/0').predictors.values
proc_scaled_values = np.add(
np.multiply(
proc_values,
scale_ds.sel(pred_varlev='t2m_anom/0').pred_std.values),
scale_ds.sel(pred_varlev='t2m_anom/0').pred_mean.values)
self.assertTrue(
np.allclose(raw_values, proc_scaled_values, rtol=tol, atol=tol))
proc_values = proc_ds.isel(sample=3).sel(
pred_varlev='t2m_anom/0/-1D').predictors.values
proc_scaled_values = np.add(
np.multiply(
proc_values,
scale_ds.sel(pred_varlev='t2m_anom/0').pred_std.values),
scale_ds.sel(pred_varlev='t2m_anom/0').pred_mean.values)
self.assertTrue(
np.allclose(raw_values, proc_scaled_values, rtol=tol, atol=tol))
pp.close()
for path in data_paths:
os.remove(path)
for f in glob.glob(os.path.join(tmp_dir, 'temp_proc*')):
os.remove(f)
if __name__ == '__main__':
absltest.main()
| google-research/heatnet | test/test_processing.py | Python | gpl-3.0 | 9,568 | 0.010033 |
import base64
import httplib
import logging
import unittest
import testsetup
import transferclient
import moreasserts
def clean_up():
hostname = testsetup.HOST
testsetup.clean_host(hostname)
class GetRecordTest(unittest.TestCase):
def assertRecordFields(self, record, fields):
for field in fields:
self.assert_(field in record.keys())
self.assert_(len(str(record[field])) > 0)
def assertStandardFields(self, record):
self.assertRecordFields(
record,
['vdi_uuid', 'status', 'transfer_mode', 'ip', 'port', 'use_ssl', 'username', 'password'])
def assertVdiStatus(self, record, vdi_uuid, status):
self.assertEqual(vdi_uuid, record['vdi_uuid'])
self.assertEqual(status, record['status'])
def testGetRecordRaisesArgumentErrorIfVdiUuidIsMissing(self):
hostname, network, vdi = testsetup.setup_host_and_network(templates=1, vdi_mb=10)
moreasserts.assertRaisesXenapiFailure(self, 'ArgumentError', transferclient.get_record, hostname)
clean_up()
def testGetRecordRaisesVDINotFoundIfThereIsNoSuchVDIOnTheHost(self):
hostname, network, vdi = testsetup.setup_host_and_network(templates=1, vdi_mb=10)
invalidvdi = vdi[:-6] + 'abcdef'
moreasserts.assertRaisesXenapiFailure(self, 'VDINotFound', transferclient.get_record,
hostname, vdi_uuid=invalidvdi)
clean_up()
def testGetRecordWithUnusedVDI(self):
hostname, network, vdi = testsetup.setup_host_and_network(templates=1, vdi_mb=10)
# No expose called.
record = transferclient.get_record(hostname, vdi_uuid=vdi)
self.assertRecordFields(record, ['status', 'vdi_uuid'])
self.assertVdiStatus(record, vdi, 'unused')
clean_up()
def testGetRecordWithHTTPExposedVDI(self):
hostname, network, vdi = testsetup.setup_host_and_network(templates=1, vdi_mb=10)
transferclient.expose(hostname, vdi_uuid=vdi, network_uuid=network, transfer_mode='http')
record = transferclient.get_record(hostname, vdi_uuid=vdi)
self.assertStandardFields(record)
self.assertVdiStatus(record, vdi, 'exposed')
self.assertRecordFields(record, ['url_path', 'url_full'])
self.assertEqual('http', record['transfer_mode'])
self.assertEqual('80', record['port']) # Standard HTTP port
clean_up()
def testGetRecordWithHTTPSExposedVDI(self):
hostname, network, vdi = testsetup.setup_host_and_network(templates=1, vdi_mb=10)
transferclient.expose(hostname, vdi_uuid=vdi, network_uuid=network, transfer_mode='http', use_ssl='true')
record = transferclient.get_record(hostname, vdi_uuid=vdi)
self.assertStandardFields(record)
self.assertVdiStatus(record, vdi, 'exposed')
self.assertRecordFields(record, ['url_path', 'url_full', 'ssl_cert'])
self.assertEqual('http', record['transfer_mode'])
self.assertEqual('443', record['port']) # Standard HTTPS port
clean_up()
def testGetRecordWithBITSExposedVDI(self):
hostname, network, vdi = testsetup.setup_host_and_network(templates=1, vdi_mb=10)
transferclient.expose(hostname, vdi_uuid=vdi, network_uuid=network, transfer_mode='bits')
record = transferclient.get_record(hostname, vdi_uuid=vdi)
self.assertStandardFields(record)
self.assertVdiStatus(record, vdi, 'exposed')
self.assertRecordFields(record, ['url_path', 'url_full'])
self.assertEqual('bits', record['transfer_mode'])
self.assertEqual('80', record['port']) # Standard HTTP port
clean_up()
def testGetRecordWithISCSIExposedVDI(self):
hostname, network, vdi = testsetup.setup_host_and_network(templates=1, vdi_mb=10)
transferclient.expose(hostname, vdi_uuid=vdi, network_uuid=network, transfer_mode='iscsi')
record = transferclient.get_record(hostname, vdi_uuid=vdi)
self.assertStandardFields(record)
self.assertVdiStatus(record, vdi, 'exposed')
self.assertRecordFields(record, ['iscsi_iqn', 'iscsi_lun', 'iscsi_sn'])
self.assertEqual('iscsi', record['transfer_mode'])
self.assertEqual('3260', record['port']) # Standard iSCSI port
clean_up()
def testGetRecordWorksWhenMultipleVDIsAreExposed(self):
hostname, network, vdi1 = testsetup.setup_host_and_network(templates=1, vdi_mb=10)
vdi2 = transferclient.create_vdi(hostname, 'Second Test VDI', 12 * 1024 * 1024)
vdi3 = transferclient.create_vdi(hostname, 'Third Test VDI', 14 * 1024 * 1024)
vdi4 = transferclient.create_vdi(hostname, 'Fourth Test VDI', 16 * 1024 * 1024)
transferclient.expose(hostname, vdi_uuid=vdi2, network_uuid=network, transfer_mode='http')
transferclient.expose(hostname, vdi_uuid=vdi3, network_uuid=network, transfer_mode='http')
record1 = transferclient.get_record(hostname, vdi_uuid=vdi1)
record2 = transferclient.get_record(hostname, vdi_uuid=vdi2)
record3 = transferclient.get_record(hostname, vdi_uuid=vdi3)
record4 = transferclient.get_record(hostname, vdi_uuid=vdi4)
self.assertVdiStatus(record1, vdi1, 'unused')
self.assertVdiStatus(record2, vdi2, 'exposed')
self.assertVdiStatus(record3, vdi3, 'exposed')
self.assertVdiStatus(record4, vdi4, 'unused')
clean_up()
def testGetRecordWorksWhenReexposingVDIMultipleTimes(self):
hostname, network, vdi = testsetup.setup_host_and_network(templates=1, vdi_mb=10)
transferclient.expose(hostname, vdi_uuid=vdi, network_uuid=network, transfer_mode='http')
retval = transferclient.unexpose(hostname, vdi_uuid=vdi)
self.assertEquals(retval, 'OK', 'Unexpose failed, never got to get_record testing.')
transferclient.expose(hostname, vdi_uuid=vdi, network_uuid=network, transfer_mode='http')
record = transferclient.get_record(hostname, vdi_uuid=vdi)
self.assertVdiStatus(record, vdi, 'exposed')
clean_up()
| xenserver/transfervm | transfertests/getrecord_test.py | Python | gpl-2.0 | 6,109 | 0.00442 |
from __future__ import unicode_literals
from .request import Request
from .response import Response
from .stat import Stat
from .primitives import Bool, UString, Vector
class GetChildrenRequest(Request):
"""
"""
opcode = 8
parts = (
("path", UString),
("watch", Bool),
)
class GetChildrenResponse(Response):
"""
"""
opcode = 8
parts = (
("children", Vector.of(UString)),
)
class GetChildren2Request(Request):
"""
"""
opcode = 12
parts = (
("path", UString),
("watch", Bool),
)
class GetChildren2Response(Response):
"""
"""
opcode = 12
parts = (
("children", Vector.of(UString)),
("stat", Stat),
)
| wglass/zoonado | zoonado/protocol/children.py | Python | apache-2.0 | 746 | 0 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-09-06 17:16
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('students', '0001_add_WhitelistedUsername_model'),
]
operations = [
migrations.CreateModel(
name='Booking',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('start_time', models.DateTimeField()),
('end_time', models.DateTimeField()),
('user', models.ForeignKey(default=None, editable=False, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| muhummadPatel/raspied | students/migrations/0002_add_Booking_model.py | Python | mit | 923 | 0.002167 |
import sys
def render(node, strict=False):
"""Recipe to render a given FST node.
The FST is composed of branch nodes which are either lists or dicts
and of leaf nodes which are strings. Branch nodes can have other
list, dict or leaf nodes as childs.
To render a string, simply output it. To render a list, render each
of its elements in order. To render a dict, you must follow the
node's entry in the nodes_rendering_order dictionnary and its
dependents constraints.
This function hides all this algorithmic complexity by returning
a structured rendering recipe, whatever the type of node. But even
better, you should subclass the RenderWalker which simplifies
drastically working with the rendered FST.
The recipe is a list of steps, each step correspond to a child and is actually a 3-uple composed of the following fields:
- `key_type` is a string determining the type of the child in the second field (`item`) of the tuple. It can be one of:
- 'constant': the child is a string
- 'node': the child is a dict
- 'key': the child is an element of a dict
- 'list': the child is a list
- 'formatting': the child is a list specialized in formatting
- `item` is the child itself: either a string, a dict or a list.
- `render_key` gives the key used to access this child from the parent node. It's a string if the node is a dict or a number if its a list.
Please note that "bool" `key_types` are never rendered, that's why
they are not shown here.
"""
if isinstance(node, list):
return render_list(node)
elif isinstance(node, dict):
return render_node(node, strict=strict)
else:
raise NotImplementedError("You tried to render a %s. Only list and dicts can be rendered." % node.__class__.__name__)
def render_list(node):
for pos, child in enumerate(node):
yield ('node', child, pos)
def render_node(node, strict=False):
for key_type, render_key, dependent in nodes_rendering_order[node['type']]:
if not dependent:
continue
elif key_type == "bool":
raise NotImplementedError("Bool keys are only used for dependency, they cannot be rendered. Please set the \"%s\"'s dependent key in \"%s\" node to False" % ((key_type, render_key, dependent), node['type']))
elif isinstance(dependent, str) and not node.get(dependent):
continue
elif isinstance(dependent, list) and not all([node.get(x) for x in dependent]):
continue
if strict:
try:
if key_type == "key":
assert isinstance(node[render_key], (dict, type(None)))
elif key_type == "string":
assert isinstance(node[render_key], str)
elif key_type in ("list", "formatting"):
assert isinstance(node[render_key], list)
elif key_type == "constant":
pass
else:
raise Exception("Invalid key_type '%s', should be one of those: key, string, list, formatting" % key_type)
if dependent is True:
pass
elif isinstance(dependent, str):
assert dependent in node
elif isinstance(dependent, list):
assert all([x in node for x in dependent])
except AssertionError as e:
sys.stdout.write("Where node.type == '%s', render_key == '%s' and node == %s\n" % (node["type"], render_key, node))
raise e
if key_type in ['key', 'string', 'list', 'formatting']:
yield (key_type, node[render_key], render_key)
elif key_type in ['constant', 'string']:
yield (key_type, render_key, render_key)
else:
raise NotImplementedError("Unknown key type \"%s\" in \"%s\" node" % (key_type, node['type']))
node_types = set(['node', 'list', 'key', 'formatting', 'constant', 'bool', 'string'])
def node_keys(node):
return [key for (_, key, _) in nodes_rendering_order[node['type']]]
def child_by_key(node, key):
if isinstance(node, list):
return node[key]
if key in node:
return node[key]
if key in node_keys(node):
return key
raise AttributeError("Cannot access key \"%s\" in node \"%s\"" % (key, node))
nodes_rendering_order = {
"int": [("string", "value", True)],
"long": [("string", "value", True)],
"name": [("string", "value", True)],
"hexa": [("string", "value", True)],
"octa": [("string", "value", True)],
"float": [("string", "value", True)],
"space": [("string", "value", True)],
"binary": [("string", "value", True)],
"complex": [("string", "value", True)],
"float_exponant": [("string", "value", True)],
"left_parenthesis": [("string", "value", True)],
"right_parenthesis": [("string", "value", True)],
"break": [("string", "type", True)],
"continue": [("string", "type", True)],
"pass": [("string", "type", True)],
"dotted_name": [("list", "value", True)],
"ifelseblock": [("list", "value", True)],
"atomtrailers": [("list", "value", True)],
"string_chain": [("list", "value", True)],
"endl": [
("formatting", "formatting", True),
("string", "value", True),
("string", "indent", True),
],
"star": [
("formatting", "first_formatting", True),
("string", "value", True),
("formatting", "second_formatting", True),
],
"string": [
("formatting", "first_formatting", True),
("string", "value", True),
("formatting", "second_formatting", True),
],
"raw_string": [
("formatting", "first_formatting", True),
("string", "value", True),
("formatting", "second_formatting", True),
],
"binary_string": [
("formatting", "first_formatting", True),
("string", "value", True),
("formatting", "second_formatting", True),
],
"unicode_string": [
("formatting", "first_formatting", True),
("string", "value", True),
("formatting", "second_formatting", True),
],
"binary_raw_string": [
("formatting", "first_formatting", True),
("string", "value", True),
("formatting", "second_formatting", True),
],
"unicode_raw_string": [
("formatting", "first_formatting", True),
("string", "value", True),
("formatting", "second_formatting", True),
],
# FIXME ugly, comment can end up in formatting of another
# node or being standalone, this is bad
"comment": [
("formatting", "formatting", "formatting"),
("string", "value", True),
],
"ternary_operator": [
("key", "first", True),
("formatting", "first_formatting", True),
("constant", "if", True),
("formatting", "second_formatting", True),
("key", "value", True),
("formatting", "third_formatting", True),
("constant", "else", True),
("formatting", "fourth_formatting", True),
("key", "second", True),
],
"ellipsis": [
("constant", ".", True),
("formatting", "first_formatting", True),
("constant", ".", True),
("formatting", "second_formatting", True),
("constant", ".", True),
],
"dot": [
("formatting", "first_formatting", True),
("constant", ".", True),
("formatting", "second_formatting", True),
],
"semicolon": [
("formatting", "first_formatting", True),
("constant", ";", True),
("formatting", "second_formatting", True),
],
"comma": [
("formatting", "first_formatting", True),
("constant", ",", True),
("formatting", "second_formatting", True),
],
"call": [
("formatting", "first_formatting", True),
("constant", "(", True),
("formatting", "second_formatting", True),
("list", "value", True),
("formatting", "third_formatting", True),
("constant", ")", True),
("formatting", "fourth_formatting", True),
],
"decorator": [
("constant", "@", True),
("key", "value", True),
("key", "call", "call"),
],
"class": [
("list", "decorators", True),
("constant", "class", True),
("formatting", "first_formatting", True),
("string", "name", True),
("formatting", "second_formatting", True),
("constant", "(", "parenthesis"),
("formatting", "third_formatting", True),
("list", "inherit_from", True),
("formatting", "fourth_formatting", True),
("constant", ")", "parenthesis"),
("formatting", "fifth_formatting", True),
("constant", ":", True),
("formatting", "sixth_formatting", True),
("list", "value", True),
("bool", "parenthesis", False),
],
"repr": [
("constant", "`", True),
("formatting", "first_formatting", True),
("list", "value", True),
("formatting", "second_formatting", True),
("constant", "`", True),
],
"list": [
("formatting", "first_formatting", True),
("constant", "[", True),
("formatting", "second_formatting", True),
("list", "value", True),
("formatting", "third_formatting", True),
("constant", "]", True),
("formatting", "fourth_formatting", True),
],
"associative_parenthesis": [
("formatting", "first_formatting", True),
("constant", "(", True),
("formatting", "second_formatting", True),
("key", "value", True),
("formatting", "third_formatting", True),
("constant", ")", True),
("formatting", "fourth_formatting", True),
],
"tuple": [
("formatting", "first_formatting", "with_parenthesis"),
("constant", "(", "with_parenthesis"),
("formatting", "second_formatting", "with_parenthesis"),
("list", "value", True),
("formatting", "third_formatting", "with_parenthesis"),
("constant", ")", "with_parenthesis"),
("formatting", "fourth_formatting", "with_parenthesis"),
("bool", "with_parenthesis", False),
],
"def": [
("list", "decorators", True),
("constant", "def", True),
("formatting", "first_formatting", True),
("string", "name", True),
("formatting", "second_formatting", True),
("constant", "(", True),
("formatting", "third_formatting", True),
("list", "arguments", True),
("formatting", "fourth_formatting", True),
("constant", ")", True),
("formatting", "fifth_formatting", True),
("constant", ":", True),
("formatting", "sixth_formatting", True),
("list", "value", True),
],
"call_argument": [
("key", "target", "target"),
("formatting", "first_formatting", "target"),
("constant", "=", "target"),
("formatting", "second_formatting", "target"),
("key", "value", True),
],
"def_argument": [
("key", "target", True),
("formatting", "first_formatting", "value"),
("constant", "=", "value"),
("formatting", "second_formatting", "value"),
("key", "value", "value"),
],
"list_argument": [
("constant", "*", True),
("formatting", "formatting", True),
("key", "value", True),
],
"dict_argument": [
("constant", "**", True),
("formatting", "formatting", True),
("key", "value", True),
],
"return": [
("constant", "return", True),
("formatting", "formatting", True),
("key", "value", "value"),
],
"raise": [
("constant", "raise", True),
("formatting", "first_formatting", True),
("key", "value", "value"),
("formatting", "second_formatting", "instance"),
("constant", ",", "instance"),
("formatting", "third_formatting", "instance"),
("key", "instance", "instance"),
("formatting", "fourth_formatting", "traceback"),
("constant", ",", "traceback"),
("formatting", "fifth_formatting", "traceback"),
("key", "traceback", "traceback"),
],
"assert": [
("constant", "assert", True),
("formatting", "first_formatting", True),
("key", "value", True),
("formatting", "second_formatting", "message"),
("constant", ",", "message"),
("formatting", "third_formatting", "message"),
("key", "message", "message"),
],
"set_comprehension": [
("formatting", "first_formatting", True),
("constant", "{", True),
("formatting", "second_formatting", True),
("key", "result", True),
("list", "generators", True),
("formatting", "third_formatting", True),
("constant", "}", True),
("formatting", "fourth_formatting", True),
],
"dict_comprehension": [
("formatting", "first_formatting", True),
("constant", "{", True),
("formatting", "second_formatting", True),
("key", "result", True),
("list", "generators", True),
("formatting", "third_formatting", True),
("constant", "}", True),
("formatting", "fourth_formatting", True),
],
"argument_generator_comprehension": [
("key", "result", True),
("list", "generators", True),
],
"generator_comprehension": [
("formatting", "first_formatting", True),
("constant", "(", True),
("formatting", "second_formatting", True),
("key", "result", True),
("list", "generators", True),
("formatting", "third_formatting", True),
("constant", ")", True),
("formatting", "fourth_formatting", True),
],
"list_comprehension": [
("formatting", "first_formatting", True),
("constant", "[", True),
("formatting", "second_formatting", True),
("key", "result", True),
("list", "generators", True),
("formatting", "third_formatting", True),
("constant", "]", True),
("formatting", "fourth_formatting", True),
],
"comprehension_loop": [
("formatting", "first_formatting", True),
("constant", "for", True),
("formatting", "second_formatting", True),
("key", "iterator", True),
("formatting", "third_formatting", True),
("constant", "in", True),
("formatting", "fourth_formatting", True),
("key", "target", True),
("list", "ifs", True),
],
"comprehension_if": [
("formatting", "first_formatting", True),
("constant", "if", True),
("formatting", "second_formatting", True),
("key", "value", True),
],
"getitem": [
("formatting", "first_formatting", True),
("constant", "[", True),
("formatting", "second_formatting", True),
("key", "value", True),
("formatting", "third_formatting", True),
("constant", "]", True),
("formatting", "fourth_formatting", True),
],
"slice": [
("key", "lower", "lower"),
("formatting", "first_formatting", True),
("constant", ":", True),
("formatting", "second_formatting", True),
("key", "upper", "upper"),
("formatting", "third_formatting", "has_two_colons"),
("constant", ":", "has_two_colons"),
("formatting", "fourth_formatting", "has_two_colons"),
("key", "step", ["has_two_colons", "step"]),
("bool", "has_two_colons", False),
],
"assignment": [
("key", "target", True),
("formatting", "first_formatting", True),
# FIXME should probably be a different node type
("string", "operator", "operator"),
("constant", "=", True),
("formatting", "second_formatting", True),
("key", "value", True),
],
"unitary_operator": [
("string", "value", True),
("formatting", "formatting", True),
("key", "target", True),
],
"binary_operator": [
("key", "first", True),
("formatting", "first_formatting", True),
("string", "value", True),
("formatting", "second_formatting", True),
("key", "second", True),
],
"boolean_operator": [
("key", "first", True),
("formatting", "first_formatting", True),
("string", "value", True),
("formatting", "second_formatting", True),
("key", "second", True),
],
"comparison_operator": [
("string", "first", True),
("formatting", "formatting", True),
("string", "second", "second"),
],
"comparison": [
("key", "first", True),
("formatting", "first_formatting", True),
("key", "value", True),
("formatting", "second_formatting", True),
("key", "second", True),
],
"with": [
("constant", "with", True),
("formatting", "first_formatting", True),
("list", "contexts", True),
("formatting", "second_formatting", True),
("constant", ":", True),
("formatting", "third_formatting", True),
("list", "value", True),
],
"with_context_item": [
("key", "value", True),
("formatting", "first_formatting", "as"),
("constant", "as", "as"),
("formatting", "second_formatting", "as"),
("key", "as", "as"),
],
"del": [
("constant", "del", True),
("formatting", "formatting", True),
("key", "value", True),
],
"yield": [
("constant", "yield", True),
("formatting", "formatting", True),
("key", "value", "value"),
],
"yield_atom": [
("constant", "(", True),
("formatting", "first_formatting", True),
("constant", "yield", True),
("formatting", "second_formatting", True),
("key", "value", "value"),
("formatting", "third_formatting", True),
("constant", ")", True),
],
"exec": [
("constant", "exec", True),
("formatting", "first_formatting", True),
("key", "value", True),
("formatting", "second_formatting", "globals"),
("constant", "in", "globals"),
("formatting", "third_formatting", "globals"),
("key", "globals", "globals"),
("formatting", "fourth_formatting", "locals"),
("constant", ",", "locals"),
("formatting", "fifth_formatting", "locals"),
("key", "locals", "locals"),
],
"global": [
("constant", "global", True),
("formatting", "formatting", True),
("list", "value", True),
],
"while": [
("constant", "while", True),
("formatting", "first_formatting", True),
("key", "test", True),
("formatting", "second_formatting", True),
("constant", ":", True),
("formatting", "third_formatting", True),
("list", "value", True),
("key", "else", "else"),
],
"for": [
("constant", "for", True),
("formatting", "first_formatting", True),
("key", "iterator", True),
("formatting", "second_formatting", True),
("constant", "in", True),
("formatting", "third_formatting", True),
("key", "target", True),
("formatting", "fourth_formatting", True),
("constant", ":", True),
("formatting", "fifth_formatting", True),
("list", "value", True),
("key", "else", "else"),
],
"if": [
("constant", "if", True),
("formatting", "first_formatting", True),
("key", "test", True),
("formatting", "second_formatting", True),
("constant", ":", True),
("formatting", "third_formatting", True),
("list", "value", True),
],
"elif": [
("constant", "elif", True),
("formatting", "first_formatting", True),
("key", "test", True),
("formatting", "second_formatting", True),
("constant", ":", True),
("formatting", "third_formatting", True),
("list", "value", True),
],
"else": [
("constant", "else", True),
("formatting", "first_formatting", True),
("constant", ":", True),
("formatting", "second_formatting", True),
("list", "value", True),
],
"lambda": [
("constant", "lambda", True),
("formatting", "first_formatting", True),
("list", "arguments", True),
("formatting", "second_formatting", True),
("constant", ":", True),
("formatting", "third_formatting", True),
("key", "value", True),
],
"try": [
("constant", "try", True),
("formatting", "first_formatting", True),
("constant", ":", True),
("formatting", "second_formatting", True),
("list", "value", True),
("list", "excepts", True),
("key", "else", "else"),
("key", "finally", "finally"),
],
"except": [
("constant", "except", True),
("formatting", "first_formatting", True),
("key", "exception", "exception"),
("formatting", "second_formatting", "delimiter"),
("string", "delimiter", "delimiter"),
("formatting", "third_formatting", "delimiter"),
("key", "target", "delimiter"),
("formatting", "fourth_formatting", True),
("constant", ":", True),
("formatting", "fifth_formatting", True),
("list", "value", True),
],
"finally": [
("constant", "finally", True),
("formatting", "first_formatting", True),
("constant", ":", True),
("formatting", "second_formatting", True),
("list", "value", True),
],
"dict": [
("formatting", "first_formatting", True),
("constant", "{", True),
("formatting", "second_formatting", True),
("list", "value", True),
("formatting", "third_formatting", True),
("constant", "}", True),
("formatting", "fourth_formatting", True),
],
"set": [
("formatting", "first_formatting", True),
("constant", "{", True),
("formatting", "second_formatting", True),
("list", "value", True),
("formatting", "third_formatting", True),
("constant", "}", True),
("formatting", "fourth_formatting", True),
],
"dictitem": [
("key", "key", True),
("formatting", "first_formatting", True),
("constant", ":", True),
("formatting", "second_formatting", True),
("key", "value", True),
],
"import": [
("formatting", "first_formatting", True),
("constant", "import", True),
("formatting", "second_formatting", True),
("list", "value", True),
],
"from_import": [
("constant", "from", True),
("formatting", "first_formatting", True),
("list", "value", True),
("formatting", "second_formatting", True),
("constant", "import", True),
("formatting", "third_formatting", True),
("list", "targets", True),
],
"dotted_as_name": [
("list", "value", True),
("formatting", "first_formatting", "target"),
("constant", "as", "target"),
("formatting", "second_formatting", "target"),
("string", "target", "target"),
],
"name_as_name": [
("string", "value", True),
("formatting", "first_formatting", "target"),
("constant", "as", "target"),
("formatting", "second_formatting", "target"),
("string", "target", "target"),
],
"print": [
("constant", "print", True),
("formatting", "formatting", True),
("constant", ">>", "destination"),
("formatting", "destination_formatting", "destination"),
("key", "destination", "destination"),
("list", "value", "value"),
],
}
class RenderWalker(object):
"""Inherit me and overload the methods you want.
When calling walk() on a FST node, this class will traverse all the
node's subtree by following the recipe given by the `render`
function for the node and recursively for all its childs. At each
recipe step, it will call methods that you can override to make a
specific process.
For every "node", "key", "list", "formatting" and "constant" childs,
it will call the `before` method when going down the tree and the
`after` method when going up. There are also specific
`before_[node,key,list,formatting,constant]` and
`after_[node,key,list,formatting,constant]` methods provided for
convenience.
The latter are called on specific steps:
* before_list: called before encountering a list of nodes
* after_list: called after encountering a list of nodes
* before_formatting: called before encountering a formatting list
* after_formatting: called after encountering a formatting list
* before_node: called before encountering a node
* after_node: called after encountering a node
* before_key: called before encountering a key type entry
* after_key: called after encountering a key type entry
* before_leaf: called before encountering a leaf of the FST (can be a constant (like "def" in a function definition) or an actual value like the value a name node)
* after_leaf: called after encountering a leaf of the FST (can be a constant (like "def" in a function definition) or an actual value like the value a name node)
Every method has the same signature: (self, node, render_pos, render_key).
"""
STOP = True
def __init__(self, strict=False):
self.strict = strict
def before_list(self, node, render_key):
pass
def after_list(self, node, render_key):
pass
def before_formatting(self, node, render_key):
pass
def after_formatting(self, node, render_key):
pass
def before_node(self, node, render_key):
pass
def after_node(self, node, render_key):
pass
def before_key(self, node, render_key):
pass
def after_key(self, node, render_key):
pass
def before_constant(self, node, render_key):
pass
def after_constant(self, node, render_key):
pass
def before_string(self, node, render_key):
pass
def after_string(self, node, render_key):
pass
def before(self, key_type, item, render_key):
if key_type not in node_types:
raise NotImplemented("Unknown key type: %s" % key_type)
to_call = getattr(self, 'before_' + key_type)
return to_call(item, render_key)
def after(self, key_type, item, render_key):
if key_type not in node_types:
raise NotImplemented("Unknown key type: %s" % key_type)
to_call = getattr(self, 'after_' + key_type)
return to_call(item, render_key)
def walk(self, node):
return self._walk(node)
def _walk(self, node):
for key_type, item, render_key in render(node, strict=getattr(self, "strict", False)):
stop = self._walk_on_item(key_type, item, render_key)
if stop == self.STOP:
return self.STOP
def _walk_on_item(self, key_type, item, render_key):
stop_before = self.before(key_type, item, render_key)
if stop_before:
return self.STOP
stop = self._walk(item) if key_type not in ['constant', 'string'] else False
stop_after = self.after(key_type, item, render_key)
if stop or stop_after:
return self.STOP
| cbonoz/codehealth | dependencies/baron/render.py | Python | mit | 34,151 | 0.000439 |
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Intangible()
result.template = "object/draft_schematic/vehicle/component/shared_structural_reinforcement_heavy.iff"
result.attribute_template_id = -1
result.stfName("string_id_table","")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | anhstudios/swganh | data/scripts/templates/object/draft_schematic/vehicle/component/shared_structural_reinforcement_heavy.py | Python | mit | 477 | 0.046122 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
import sqlite3 as lite
import sys
con = lite.connect('Database.db')
with con:
cur = con.cursor()
#Create data for the user table
cur.execute("CREATE TABLE users(UserId INT, UserName TEXT, Password TEXT)")
cur.execute("INSERT INTO users VALUES(1,'Admin','0cef1fb10f60529028a71f58e54ed07b')")
cur.execute("INSERT INTO users VALUES(2,'User','022b5ac7ea72a5ee3bfc6b3eb461f2fc')")
cur.execute("INSERT INTO users VALUES(3,'Guest','94ca112be7fc3f3934c45c6809875168')")
cur.execute("INSERT INTO users VALUES(4,'Plebian','0cbdc7572ff7d07cc6807a5b102a3b93')")
#Create some data for pageinformation
cur.execute("CREATE TABLE pages(pageId INT, title TEXT, content TEXT)")
cur.execute("INSERT INTO pages VALUES(1,'The welcome page','Some text about the welcome page is inserted here')")
cur.execute("INSERT INTO pages VALUES(2,'About','Some text about the about page!')")
cur.execute("INSERT INTO pages VALUES(3,'Contact','Some contact information is found here')")
con.commit()
#con.close()
| blabla1337/defdev | demos/input-validation/SQLI/config/initializer.py | Python | gpl-3.0 | 1,100 | 0.012727 |
# engine/__init__.py
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""SQL connections, SQL execution and high-level DB-API interface.
The engine package defines the basic components used to interface
DB-API modules with higher-level statement construction,
connection-management, execution and result contexts. The primary
"entry point" class into this package is the Engine and its public
constructor ``create_engine()``.
This package includes:
base.py
Defines interface classes and some implementation classes which
comprise the basic components used to interface between a DB-API,
constructed and plain-text statements, connections, transactions,
and results.
default.py
Contains default implementations of some of the components defined
in base.py. All current database dialects use the classes in
default.py as base classes for their own database-specific
implementations.
strategies.py
The mechanics of constructing ``Engine`` objects are represented
here. Defines the ``EngineStrategy`` class which represents how
to go from arguments specified to the ``create_engine()``
function, to a fully constructed ``Engine``, including
initialization of connection pooling, dialects, and specific
subclasses of ``Engine``.
threadlocal.py
The ``TLEngine`` class is defined here, which is a subclass of
the generic ``Engine`` and tracks ``Connection`` and
``Transaction`` objects against the identity of the current
thread. This allows certain programming patterns based around
the concept of a "thread-local connection" to be possible.
The ``TLEngine`` is created by using the "threadlocal" engine
strategy in conjunction with the ``create_engine()`` function.
url.py
Defines the ``URL`` class which represents the individual
components of a string URL passed to ``create_engine()``. Also
defines a basic module-loading strategy for the dialect specifier
within a URL.
"""
from .interfaces import (
Connectable,
CreateEnginePlugin,
Dialect,
ExecutionContext,
ExceptionContext,
# backwards compat
Compiled,
TypeCompiler
)
from .base import (
Connection,
Engine,
NestedTransaction,
RootTransaction,
Transaction,
TwoPhaseTransaction,
)
from .result import (
BaseRowProxy,
BufferedColumnResultProxy,
BufferedColumnRow,
BufferedRowResultProxy,
FullyBufferedResultProxy,
ResultProxy,
RowProxy,
)
from .util import (
connection_memoize
)
from . import util, strategies
# backwards compat
from ..sql import ddl
default_strategy = 'plain'
def create_engine(*args, **kwargs):
"""Create a new :class:`.Engine` instance.
The standard calling form is to send the URL as the
first positional argument, usually a string
that indicates database dialect and connection arguments::
engine = create_engine("postgresql://scott:tiger@localhost/test")
Additional keyword arguments may then follow it which
establish various options on the resulting :class:`.Engine`
and its underlying :class:`.Dialect` and :class:`.Pool`
constructs::
engine = create_engine("mysql://scott:tiger@hostname/dbname",
encoding='latin1', echo=True)
The string form of the URL is
``dialect[+driver]://user:password@host/dbname[?key=value..]``, where
``dialect`` is a database name such as ``mysql``, ``oracle``,
``postgresql``, etc., and ``driver`` the name of a DBAPI, such as
``psycopg2``, ``pyodbc``, ``cx_oracle``, etc. Alternatively,
the URL can be an instance of :class:`~sqlalchemy.engine.url.URL`.
``**kwargs`` takes a wide variety of options which are routed
towards their appropriate components. Arguments may be specific to
the :class:`.Engine`, the underlying :class:`.Dialect`, as well as the
:class:`.Pool`. Specific dialects also accept keyword arguments that
are unique to that dialect. Here, we describe the parameters
that are common to most :func:`.create_engine()` usage.
Once established, the newly resulting :class:`.Engine` will
request a connection from the underlying :class:`.Pool` once
:meth:`.Engine.connect` is called, or a method which depends on it
such as :meth:`.Engine.execute` is invoked. The :class:`.Pool` in turn
will establish the first actual DBAPI connection when this request
is received. The :func:`.create_engine` call itself does **not**
establish any actual DBAPI connections directly.
.. seealso::
:doc:`/core/engines`
:doc:`/dialects/index`
:ref:`connections_toplevel`
:param case_sensitive=True: if False, result column names
will match in a case-insensitive fashion, that is,
``row['SomeColumn']``.
.. versionchanged:: 0.8
By default, result row names match case-sensitively.
In version 0.7 and prior, all matches were case-insensitive.
:param connect_args: a dictionary of options which will be
passed directly to the DBAPI's ``connect()`` method as
additional keyword arguments. See the example
at :ref:`custom_dbapi_args`.
:param convert_unicode=False: if set to True, sets
the default behavior of ``convert_unicode`` on the
:class:`.String` type to ``True``, regardless
of a setting of ``False`` on an individual
:class:`.String` type, thus causing all :class:`.String`
-based columns
to accommodate Python ``unicode`` objects. This flag
is useful as an engine-wide setting when using a
DBAPI that does not natively support Python
``unicode`` objects and raises an error when
one is received (such as pyodbc with FreeTDS).
See :class:`.String` for further details on
what this flag indicates.
:param creator: a callable which returns a DBAPI connection.
This creation function will be passed to the underlying
connection pool and will be used to create all new database
connections. Usage of this function causes connection
parameters specified in the URL argument to be bypassed.
:param echo=False: if True, the Engine will log all statements
as well as a repr() of their parameter lists to the engines
logger, which defaults to sys.stdout. The ``echo`` attribute of
``Engine`` can be modified at any time to turn logging on and
off. If set to the string ``"debug"``, result rows will be
printed to the standard output as well. This flag ultimately
controls a Python logger; see :ref:`dbengine_logging` for
information on how to configure logging directly.
:param echo_pool=False: if True, the connection pool will log
all checkouts/checkins to the logging stream, which defaults to
sys.stdout. This flag ultimately controls a Python logger; see
:ref:`dbengine_logging` for information on how to configure logging
directly.
:param encoding: Defaults to ``utf-8``. This is the string
encoding used by SQLAlchemy for string encode/decode
operations which occur within SQLAlchemy, **outside of
the DBAPI.** Most modern DBAPIs feature some degree of
direct support for Python ``unicode`` objects,
what you see in Python 2 as a string of the form
``u'some string'``. For those scenarios where the
DBAPI is detected as not supporting a Python ``unicode``
object, this encoding is used to determine the
source/destination encoding. It is **not used**
for those cases where the DBAPI handles unicode
directly.
To properly configure a system to accommodate Python
``unicode`` objects, the DBAPI should be
configured to handle unicode to the greatest
degree as is appropriate - see
the notes on unicode pertaining to the specific
target database in use at :ref:`dialect_toplevel`.
Areas where string encoding may need to be accommodated
outside of the DBAPI include zero or more of:
* the values passed to bound parameters, corresponding to
the :class:`.Unicode` type or the :class:`.String` type
when ``convert_unicode`` is ``True``;
* the values returned in result set columns corresponding
to the :class:`.Unicode` type or the :class:`.String`
type when ``convert_unicode`` is ``True``;
* the string SQL statement passed to the DBAPI's
``cursor.execute()`` method;
* the string names of the keys in the bound parameter
dictionary passed to the DBAPI's ``cursor.execute()``
as well as ``cursor.setinputsizes()`` methods;
* the string column names retrieved from the DBAPI's
``cursor.description`` attribute.
When using Python 3, the DBAPI is required to support
*all* of the above values as Python ``unicode`` objects,
which in Python 3 are just known as ``str``. In Python 2,
the DBAPI does not specify unicode behavior at all,
so SQLAlchemy must make decisions for each of the above
values on a per-DBAPI basis - implementations are
completely inconsistent in their behavior.
:param execution_options: Dictionary execution options which will
be applied to all connections. See
:meth:`~sqlalchemy.engine.Connection.execution_options`
:param implicit_returning=True: When ``True``, a RETURNING-
compatible construct, if available, will be used to
fetch newly generated primary key values when a single row
INSERT statement is emitted with no existing returning()
clause. This applies to those backends which support RETURNING
or a compatible construct, including Postgresql, Firebird, Oracle,
Microsoft SQL Server. Set this to ``False`` to disable
the automatic usage of RETURNING.
:param isolation_level: this string parameter is interpreted by various
dialects in order to affect the transaction isolation level of the
database connection. The parameter essentially accepts some subset of
these string arguments: ``"SERIALIZABLE"``, ``"REPEATABLE_READ"``,
``"READ_COMMITTED"``, ``"READ_UNCOMMITTED"`` and ``"AUTOCOMMIT"``.
Behavior here varies per backend, and
individual dialects should be consulted directly.
Note that the isolation level can also be set on a per-:class:`.Connection`
basis as well, using the
:paramref:`.Connection.execution_options.isolation_level`
feature.
.. seealso::
:attr:`.Connection.default_isolation_level` - view default level
:paramref:`.Connection.execution_options.isolation_level`
- set per :class:`.Connection` isolation level
:ref:`SQLite Transaction Isolation <sqlite_isolation_level>`
:ref:`Postgresql Transaction Isolation <postgresql_isolation_level>`
:ref:`MySQL Transaction Isolation <mysql_isolation_level>`
:ref:`session_transaction_isolation` - for the ORM
:param label_length=None: optional integer value which limits
the size of dynamically generated column labels to that many
characters. If less than 6, labels are generated as
"_(counter)". If ``None``, the value of
``dialect.max_identifier_length`` is used instead.
:param listeners: A list of one or more
:class:`~sqlalchemy.interfaces.PoolListener` objects which will
receive connection pool events.
:param logging_name: String identifier which will be used within
the "name" field of logging records generated within the
"sqlalchemy.engine" logger. Defaults to a hexstring of the
object's id.
:param max_overflow=10: the number of connections to allow in
connection pool "overflow", that is connections that can be
opened above and beyond the pool_size setting, which defaults
to five. this is only used with :class:`~sqlalchemy.pool.QueuePool`.
:param module=None: reference to a Python module object (the module
itself, not its string name). Specifies an alternate DBAPI module to
be used by the engine's dialect. Each sub-dialect references a
specific DBAPI which will be imported before first connect. This
parameter causes the import to be bypassed, and the given module to
be used instead. Can be used for testing of DBAPIs as well as to
inject "mock" DBAPI implementations into the :class:`.Engine`.
:param paramstyle=None: The `paramstyle <http://legacy.python.org/dev/peps/pep-0249/#paramstyle>`_
to use when rendering bound parameters. This style defaults to the
one recommended by the DBAPI itself, which is retrieved from the
``.paramstyle`` attribute of the DBAPI. However, most DBAPIs accept
more than one paramstyle, and in particular it may be desirable
to change a "named" paramstyle into a "positional" one, or vice versa.
When this attribute is passed, it should be one of the values
``"qmark"``, ``"numeric"``, ``"named"``, ``"format"`` or
``"pyformat"``, and should correspond to a parameter style known
to be supported by the DBAPI in use.
:param pool=None: an already-constructed instance of
:class:`~sqlalchemy.pool.Pool`, such as a
:class:`~sqlalchemy.pool.QueuePool` instance. If non-None, this
pool will be used directly as the underlying connection pool
for the engine, bypassing whatever connection parameters are
present in the URL argument. For information on constructing
connection pools manually, see :ref:`pooling_toplevel`.
:param poolclass=None: a :class:`~sqlalchemy.pool.Pool`
subclass, which will be used to create a connection pool
instance using the connection parameters given in the URL. Note
this differs from ``pool`` in that you don't actually
instantiate the pool in this case, you just indicate what type
of pool to be used.
:param pool_logging_name: String identifier which will be used within
the "name" field of logging records generated within the
"sqlalchemy.pool" logger. Defaults to a hexstring of the object's
id.
:param pool_size=5: the number of connections to keep open
inside the connection pool. This used with
:class:`~sqlalchemy.pool.QueuePool` as
well as :class:`~sqlalchemy.pool.SingletonThreadPool`. With
:class:`~sqlalchemy.pool.QueuePool`, a ``pool_size`` setting
of 0 indicates no limit; to disable pooling, set ``poolclass`` to
:class:`~sqlalchemy.pool.NullPool` instead.
:param pool_recycle=-1: this setting causes the pool to recycle
connections after the given number of seconds has passed. It
defaults to -1, or no timeout. For example, setting to 3600
means connections will be recycled after one hour. Note that
MySQL in particular will disconnect automatically if no
activity is detected on a connection for eight hours (although
this is configurable with the MySQLDB connection itself and the
server configuration as well).
:param pool_reset_on_return='rollback': set the "reset on return"
behavior of the pool, which is whether ``rollback()``,
``commit()``, or nothing is called upon connections
being returned to the pool. See the docstring for
``reset_on_return`` at :class:`.Pool`.
.. versionadded:: 0.7.6
:param pool_timeout=30: number of seconds to wait before giving
up on getting a connection from the pool. This is only used
with :class:`~sqlalchemy.pool.QueuePool`.
:param strategy='plain': selects alternate engine implementations.
Currently available are:
* the ``threadlocal`` strategy, which is described in
:ref:`threadlocal_strategy`;
* the ``mock`` strategy, which dispatches all statement
execution to a function passed as the argument ``executor``.
See `example in the FAQ
<http://docs.sqlalchemy.org/en/latest/faq/metadata_schema.html#how-can-i-get-the-create-table-drop-table-output-as-a-string>`_.
:param executor=None: a function taking arguments
``(sql, *multiparams, **params)``, to which the ``mock`` strategy will
dispatch all statement execution. Used only by ``strategy='mock'``.
"""
strategy = kwargs.pop('strategy', default_strategy)
strategy = strategies.strategies[strategy]
return strategy.create(*args, **kwargs)
def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
"""Create a new Engine instance using a configuration dictionary.
The dictionary is typically produced from a config file.
The keys of interest to ``engine_from_config()`` should be prefixed, e.g.
``sqlalchemy.url``, ``sqlalchemy.echo``, etc. The 'prefix' argument
indicates the prefix to be searched for. Each matching key (after the
prefix is stripped) is treated as though it were the corresponding keyword
argument to a :func:`.create_engine` call.
The only required key is (assuming the default prefix) ``sqlalchemy.url``,
which provides the :ref:`database URL <database_urls>`.
A select set of keyword arguments will be "coerced" to their
expected type based on string values. The set of arguments
is extensible per-dialect using the ``engine_config_types`` accessor.
:param configuration: A dictionary (typically produced from a config file,
but this is not a requirement). Items whose keys start with the value
of 'prefix' will have that prefix stripped, and will then be passed to
:ref:`create_engine`.
:param prefix: Prefix to match and then strip from keys
in 'configuration'.
:param kwargs: Each keyword argument to ``engine_from_config()`` itself
overrides the corresponding item taken from the 'configuration'
dictionary. Keyword arguments should *not* be prefixed.
"""
options = dict((key[len(prefix):], configuration[key])
for key in configuration
if key.startswith(prefix))
options['_coerce_config'] = True
options.update(kwargs)
url = options.pop('url')
return create_engine(url, **options)
__all__ = (
'create_engine',
'engine_from_config',
)
| MarkWh1te/xueqiu_predict | python3_env/lib/python3.4/site-packages/sqlalchemy/engine/__init__.py | Python | mit | 18,857 | 0.000159 |
import re
from versions.software.utils import get_command_stdout, get_soup, \
get_text_between
def name():
"""Return the precise name for the software."""
return '7-Zip'
def installed_version():
"""Return the installed version of 7-Zip, or None if not installed."""
try:
version_string = get_command_stdout('7z')
return version_string.split()[1]
except FileNotFoundError:
pass
def latest_version():
"""Return the latest version of 7-Zip available for download."""
soup = get_soup('http://www.7-zip.org/')
if soup:
tag = soup.find('b', string=re.compile('^Download'))
if tag:
return tag.text.split()[2]
return 'Unknown'
| mchung94/latest-versions | versions/software/sevenzip.py | Python | mit | 718 | 0 |
# -*- coding: UTF-8 -*-
HMAP = {
' ': u'\u00A0\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200A\u202F\u205F',
'!': u'\uFF01\u01C3\u2D51\uFE15\uFE57',
'"': u'\uFF02',
'#': u'\uFF03\uFE5F',
'$': u'\uFF04\uFE69',
'%': u'\uFF05\u066A\u2052\uFE6A',
'&': u'\uFF06\uFE60',
"'": u'\uFF07\u02B9\u0374',
'(': u'\uFF08\uFE59',
')': u'\uFF09\uFE5A',
'*': u'\uFF0A\u22C6\uFE61',
'+': u'\uFF0B\u16ED\uFE62',
',': u'\uFF0C\u02CF\u16E7\u201A',
'-': u'\uFF0D\u02D7\u2212\u23BC\u2574\uFE63',
'.': u'\uFF0E\u2024',
'/': u'\uFF0F\u1735\u2044\u2215\u29F8',
'0': u'',
'1': u'',
'2': u'\u14BF',
'3': u'\u01B7\u2128',
'4': u'\u13CE',
'5': u'',
'6': u'\u13EE',
'7': u'',
'8': u'',
'9': u'\u13ED',
':': u'\uFF1A\u02D0\u02F8\u0589\u1361\u16EC\u205A\u2236\u2806\uFE13\uFE55',
';': u'\uFF1B\u037E\uFE14\uFE54',
'<': u'\uFF1C\u02C2\u2039\u227A\u276E\u2D66\uFE64',
'=': u'\uFF1D\u2550\u268C\uFE66',
'>': u'\uFF1E\u02C3\u203A\u227B\u276F\uFE65',
'?': u'\uFF1F\uFE16\uFE56',
'@': u'\uFF20\uFE6B',
'A': u'\u0391\u0410\u13AA',
'B': u'\u0392\u0412\u13F4\u15F7\u2C82',
'C': u'\u03F9\u0421\u13DF\u216D\u2CA4',
'D': u'\u13A0\u15EA\u216E',
'E': u'\u0395\u0415\u13AC',
'F': u'\u15B4',
'G': u'\u050C\u13C0',
'H': u'\u0397\u041D\u12D8\u13BB\u157C\u2C8E',
'I': u'\u0399\u0406\u2160',
'J': u'\u0408\u13AB\u148D',
'K': u'\u039A\u13E6\u16D5\u212A\u2C94',
'L': u'\u13DE\u14AA\u216C',
'M': u'\u039C\u03FA\u041C\u13B7\u216F',
'N': u'\u039D\u2C9A',
'O': u'\u039F\u041E\u2C9E',
'P': u'\u03A1\u0420\u13E2\u2CA2',
'Q': u'\u051A\u2D55',
'R': u'\u13A1\u13D2\u1587',
'S': u'\u0405\u13DA',
'T': u'\u03A4\u0422\u13A2',
'U': u'',
'V': u'\u13D9\u2164',
'W': u'\u13B3\u13D4',
'X': u'\u03A7\u0425\u2169\u2CAC',
'Y': u'\u03A5\u2CA8',
'Z': u'\u0396\u13C3',
'[': u'\uFF3B',
'\\': u'\uFF3C\u2216\u29F5\u29F9\uFE68',
']': u'\uFF3D',
'^': u'\uFF3E\u02C4\u02C6\u1DBA\u2303',
'_': u'\uFF3F\u02CD\u268A',
'`': u'\uFF40\u02CB\u1FEF\u2035',
'a': u'\u0251\u0430',
'b': u'',
'c': u'\u03F2\u0441\u217D',
'd': u'\u0501\u217E',
'e': u'\u0435\u1971',
'f': u'',
'g': u'\u0261',
'h': u'\u04BB',
'i': u'\u0456\u2170',
'j': u'\u03F3\u0458',
'k': u'',
'l': u'\u217C',
'm': u'\u217F',
'n': u'\u1952',
'o': u'\u03BF\u043E\u0D20\u2C9F',
'p': u'\u0440\u2CA3',
'q': u'',
'r': u'',
's': u'\u0455',
't': u'',
'u': u'\u1959\u222A',
'v': u'\u1D20\u2174\u2228\u22C1',
'w': u'\u1D21',
'x': u'\u0445\u2179\u2CAD',
'y': u'\u0443\u1EFF',
'z': u'\u1D22',
'{': u'\uFF5B\uFE5B',
'|': u'\uFF5C\u01C0\u16C1\u239C\u239F\u23A2\u23A5\u23AA\u23AE\uFFE8',
'}': u'\uFF5D\uFE5C',
'~': u'\uFF5E\u02DC\u2053\u223C',
}
| JoshuaRLi/notquite | notquite/constants.py | Python | mit | 2,903 | 0.000344 |
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
class PyThreadpoolctl(PythonPackage):
"""Python helpers to limit the number of threads used in the
threadpool-backed of common native libraries used for scientific
computing and data science (e.g. BLAS and OpenMP)."""
homepage = "https://github.com/joblib/threadpoolctl"
pypi = "threadpoolctl/threadpoolctl-2.0.0.tar.gz"
version('3.0.0', sha256='d03115321233d0be715f0d3a5ad1d6c065fe425ddc2d671ca8e45e9fd5d7a52a')
version('2.0.0', sha256='48b3e3e9ee079d6b5295c65cbe255b36a3026afc6dde3fb49c085cd0c004bbcf')
depends_on('[email protected]:', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run'), when='@3.0.0:')
| LLNL/spack | var/spack/repos/builtin/packages/py-threadpoolctl/package.py | Python | lgpl-2.1 | 857 | 0.002334 |
import json
import numpy as np
import os
import requests
from datetime import datetime
from flask_security import UserMixin, RoleMixin
from .app import db
DOWNLOAD_BASE_URL = 'https://archive.org/download/'
class Instance(db.EmbeddedDocument):
text = db.StringField(required=True)
source_id = db.ObjectIdField(required=True)
figurative = db.BooleanField(default=False)
include = db.BooleanField(default=False)
conceptual_metaphor = db.StringField(default='')
objects = db.StringField(default='')
subjects = db.StringField(default='')
active_passive = db.StringField(default='')
tense = db.StringField(default='')
description = db.StringField(default='')
spoken_by = db.StringField(default='')
# Has this particular quote been coded already?
repeat = db.BooleanField(default=False)
# If so, what is the index of the instance in the facet?
repeat_index = db.IntField()
# Is this a re-run (repeat of exact same episode)?
# If so, it should be excluded, but mark to keep track
rerun = db.BooleanField(default=False)
reviewed = db.BooleanField(default=False)
reference_url = db.URLField()
class Facet(db.Document):
instances = db.ListField(db.EmbeddedDocumentField(Instance))
word = db.StringField()
total_count = db.IntField(default=0)
number_reviewed = db.IntField(default=0)
class Project(db.Document):
name = db.StringField(required=True)
# corpus = db.ReferenceField(IatvCorpus)
facets = db.ListField(db.ReferenceField(Facet))
created = db.DateTimeField(default=datetime.now)
last_modified = db.DateTimeField(default=datetime.now)
def add_facet_from_search_results(self, facet_label, search_results):
instances = []
for res in search_results:
doc = IatvDocument.from_search_result(res)
doc.save()
new_instance = Instance(doc.document_data, doc.id)
# new_instance.save()
instances.append(new_instance)
new_facet = Facet(instances, facet_label, len(instances))
new_facet.save()
self.facets.append(new_facet)
self.save()
@classmethod
def from_search_results(cls, faceted_search_results, project_name):
'''
Arguments:
faceted_search_results (dict): e.g.
{
'epa/kill': [instance0, instance1, ...],
'epa/strangle': [instance0, ...],
'regulations/rob': [...]
}
'''
facets = []
for facet_label, search_results in faceted_search_results.items():
instances = []
for res in search_results:
doc = IatvDocument.from_search_result(res)
doc.save()
new_instance = Instance(doc.document_data, doc.id)
# new_instance.save()
instances.append(new_instance)
new_facet = Facet(instances, facet_label, len(instances))
new_facet.save()
facets.append(new_facet)
instances = []
return cls(project_name, facets)
class IatvDocument(db.Document):
document_data = db.StringField(required=True)
raw_srt = db.StringField()
iatv_id = db.StringField(required=True)
iatv_url = db.URLField(required=True)
network = db.StringField()
program_name = db.StringField()
# somewhat redundant in case localtime is missing or other issues
start_localtime = db.DateTimeField()
start_time = db.DateTimeField()
stop_time = db.DateTimeField()
runtime_seconds = db.FloatField()
utc_offset = db.StringField()
datetime_added = db.DateTimeField(default=datetime.now())
@classmethod
def from_search_result(cls, search_result):
'''
New document from iatv search results. See
https://archive.org/details/tv?q=epa+kill&time=20151202-20170516&rows=10&output=json
for an example search result that is parsed
'''
sr = search_result
document_data = sr['snip']
# eg WHO_20160108_113000_Today_in_Iowa_at_530
iatv_id = sr['identifier']
iatv_url = 'https://archive.org/details/' + iatv_id
id_spl = iatv_id.split('_')
network = id_spl[0]
program_name = ' '.join(id_spl[3:])
# eg 20160108
air_date_str = id_spl[1]
# eg 113000; UTC
air_time_str = id_spl[2]
start_localtime = datetime(
int(air_date_str[:4]),
int(air_date_str[4:6]),
int(air_date_str[6:]),
int(air_time_str[:2]),
int(air_time_str[2:4])
)
return cls(document_data, iatv_id=iatv_id, iatv_url=iatv_url,
network=network, program_name=program_name,
start_localtime=start_localtime)
def download_video(self, download_dir):
segments = int(np.ceil(self.runtime_seconds / 60.0))
for i in range(segments):
start_time = i * 60
stop_time = (i + 1) * 60
download_url = DOWNLOAD_BASE_URL + self.iatv_id + '/' +\
self.iatv_id + '.mp4?t=' + str(start_time) + '/' +\
str(stop_time) + '&exact=1&ignore=x.mp4'
res = requests.get(download_url)
download_path = os.path.join(
download_dir, '{}_{}.mp4'.format(self.iatv_id, i))
with open(download_path, 'wb') as handle:
handle.write(res.content)
class IatvCorpus(db.Document):
name = db.StringField()
documents = db.ListField(db.ReferenceField(IatvDocument))
class Role(db.Document, RoleMixin):
name = db.StringField(max_length=80, unique=True)
description = db.StringField(max_length=255)
class User(db.Document, UserMixin):
email = db.StringField(max_length=255)
password = db.StringField(max_length=255)
active = db.BooleanField(default=True)
confirmed_at = db.DateTimeField()
roles = db.ListField(db.ReferenceField(Role), default=[])
class Log(db.Document):
time_posted = db.DateTimeField(default=datetime.now)
user_email = db.StringField()
message = db.StringField()
| mtpain/metacorps | app/models.py | Python | bsd-3-clause | 6,238 | 0 |
# -*- coding: utf8 -*-
"""
This is part of shot detector.
Produced by w495 at 2017.05.04 04:18:27
"""
from __future__ import absolute_import, division, print_function
import datetime
from collections import Iterable
from enum import Enum
from types import BuiltinFunctionType, FunctionType
from uuid import UUID
from multipledispatch import dispatch
from numpy import ndarray
from six import (
text_type,
binary_type,
integer_types,
)
from .repr_dict import ReprDict
class ReprHash(ReprDict):
"""
...
"""
__slots__ = [
'logger',
'obj_type',
'obj',
'indent',
]
hashable_types = (
integer_types,
text_type,
binary_type,
bool,
float,
type(None),
ndarray,
Enum,
BuiltinFunctionType,
binary_type,
FunctionType,
UUID,
datetime.datetime,
datetime.timedelta
)
def to_hashable(self):
"""
:return:
"""
repr_hash = self.item(self.obj)
return repr_hash
def object_repr(self, obj):
"""
:param obj:
:return:
"""
var_dict = self.object_fields(obj)
repr_tuple = tuple(var_dict)
return repr_tuple
def object_fields(self, obj):
"""
:param obj:
:return:
"""
tuple_seq = self.object_field_seq(obj)
repr_tuple = tuple(tuple_seq)
return repr_tuple
@dispatch(dict)
def raw_item(self, value):
"""
:param value:
:return:
"""
tuple_seq = self.raw_item_seq(value)
repr_tuple = tuple(tuple_seq)
return repr_tuple
@dispatch(list)
def raw_item(self, value):
"""
:param value:
:return:
"""
repr_seq = self.raw_item_seq(value)
repr_tuple = tuple(repr_seq)
return repr_tuple
@dispatch(hashable_types)
def raw_item(self, value):
"""
:param value:
:return:
"""
return value
@dispatch(Iterable)
def raw_item(self, value):
"""
:param value:
:return:
"""
return tuple(value)
| w495/python-video-shot-detector | shot_detector/utils/repr_hash.py | Python | bsd-3-clause | 2,296 | 0.00784 |
#class SVNRepo:
# @classmethod
# def isBadVersion(cls, id)
# # Run unit tests to check whether verison `id` is a bad version
# # return true if unit tests passed else false.
# You can use SVNRepo.isBadVersion(10) to check whether version 10 is a
# bad version.
class Solution:
"""
@param n: An integers.
@return: An integer which is the first bad version.
"""
def findFirstBadVersion(self, n):
# write your code here
start, end = 1, n
if (n == 1):
return 1
while (start <= end):
i = (start + end) / 2
if (not SVNRepo.isBadVersion(i)):
start = i + 1
else:
end = i - 1
return start
| Rhadow/leetcode | lintcode/Medium/074_First_Bad_Version.py | Python | mit | 742 | 0.001348 |
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import os
import sys
cur_path = os.path.realpath(__file__)
for i in range(4):
cur_path = os.path.dirname(cur_path)
print(f'fate_path: {cur_path}')
sys.path.append(cur_path)
from examples.pipeline.hetero_feature_binning import common_tools
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
param = {
"name": "hetero_feature_binning_0",
"method": "optimal",
"optimal_binning_param": {
"metric_method": "chi-square",
"min_bin_pct": 0.05,
"max_bin_pct": 0.8,
"init_bucket_method": "bucket",
"init_bin_nums": 100,
"mixture": True
},
"compress_thres": 10000,
"head_size": 10000,
"error": 0.001,
"bin_num": 10,
"bin_indexes": -1,
"bin_names": None,
"category_indexes": None,
"category_names": None,
"adjustment_factor": 0.5,
"local_only": False,
"transform_param": {
"transform_cols": -1,
"transform_names": None,
"transform_type": "bin_num"
}
}
pipeline = common_tools.make_normal_dsl(config, namespace, param, host_dense_output=False)
pipeline.fit()
# common_tools.prettify(pipeline.get_component("hetero_feature_binning_0").get_summary())
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| FederatedAI/FATE | examples/pipeline/hetero_feature_binning/pipeline-hetero-binning-sparse-optimal-chi-square.py | Python | apache-2.0 | 2,362 | 0.00127 |
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2015 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
import requests
import yaml
PARTS_URI = 'https://wiki.ubuntu.com/Snappy/Parts'
PARTS_URI_PARAMS = {'action': 'raw'}
_WIKI_OPEN = '{{{'
_WIKI_CLOSE = '}}}'
logging.getLogger("urllib3").setLevel(logging.CRITICAL)
class Wiki:
wiki_parts = None
def _fetch(self):
if self.wiki_parts is None:
raw_content = requests.get(PARTS_URI, params=PARTS_URI_PARAMS)
content = raw_content.text.strip()
if content.startswith(_WIKI_OPEN):
content = content[len(_WIKI_OPEN):].strip()
if content.endswith(_WIKI_CLOSE):
content = content[:-len(_WIKI_CLOSE)]
self.wiki_parts = yaml.load(content)
def get_part(self, name):
self._fetch()
if name in self.wiki_parts:
if 'plugin' and 'type' in self.wiki_parts[name]:
del self.wiki_parts[name]['type']
return self.wiki_parts[name]
def compose(self, name, properties):
"""Return properties composed with the ones from part name in the wiki.
:param str name: The name of the part to query from the wiki
:param dict properties: The current set of properties
:return: Part properties from the wiki composed with the properties
passed as a parameter. If there is no wiki part named name,
properties will be returned.
:rtype: dict
:raises KeyError: if the part named name is not found in the wiki.
"""
self._fetch()
wiki_properties = self.wiki_parts[name]
for key in wiki_properties:
properties[key] = properties.get(key, wiki_properties[key])
properties['plugin'] = wiki_properties.get('plugin', None)
return properties
| rbreitenmoser/snapcraft | snapcraft/wiki.py | Python | gpl-3.0 | 2,447 | 0 |
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from default_config import config
db = SQLAlchemy()
def create_app(config_name):
# Define the WSGI application object
app = Flask(__name__)
# Configurations
app.config.from_object(config[config_name])
config[config_name].init_app(app)
# Define the database object which is imported
# by modules and controllers
db.init_app(app)
return app
# Import modules/components using their blueprint handler variables
# TODO: try this at the beginning and see if it still works (circular deps?!)
# from app.auth.controllers import auth as auth
# ..
# Register blueprint(s)
# app.register_blueprint(auth)
# ..
# Build the database:
# This is being done in manage.py
#db.create_all()
# TODO: We also need a manager (from flask-script)
# for setting up the database with persistent data (independent from the app-session),
# e.g. creating admins, standard roles, etc.
| nbbl/ger-trek | app/__init__.py | Python | gpl-2.0 | 1,012 | 0.008893 |
"""Let's Encrypt client crypto utility functions.
.. todo:: Make the transition to use PSS rather than PKCS1_v1_5 when the server
is capable of handling the signatures.
"""
import logging
import os
import OpenSSL
import zope.component
from acme import crypto_util as acme_crypto_util
from acme import jose
from letsencrypt import errors
from letsencrypt import interfaces
from letsencrypt import le_util
logger = logging.getLogger(__name__)
# High level functions
def init_save_key(key_size, key_dir, keyname="key-letsencrypt.pem"):
"""Initializes and saves a privkey.
Inits key and saves it in PEM format on the filesystem.
.. note:: keyname is the attempted filename, it may be different if a file
already exists at the path.
:param int key_size: RSA key size in bits
:param str key_dir: Key save directory.
:param str keyname: Filename of key
:returns: Key
:rtype: :class:`letsencrypt.le_util.Key`
:raises ValueError: If unable to generate the key given key_size.
"""
try:
key_pem = make_key(key_size)
except ValueError as err:
logger.exception(err)
raise err
config = zope.component.getUtility(interfaces.IConfig)
# Save file
le_util.make_or_verify_dir(key_dir, 0o700, os.geteuid(),
config.strict_permissions)
key_f, key_path = le_util.unique_file(
os.path.join(key_dir, keyname), 0o600)
key_f.write(key_pem)
key_f.close()
logger.info("Generating key (%d bits): %s", key_size, key_path)
return le_util.Key(key_path, key_pem)
def init_save_csr(privkey, names, path, csrname="csr-letsencrypt.pem"):
"""Initialize a CSR with the given private key.
:param privkey: Key to include in the CSR
:type privkey: :class:`letsencrypt.le_util.Key`
:param set names: `str` names to include in the CSR
:param str path: Certificate save directory.
:returns: CSR
:rtype: :class:`letsencrypt.le_util.CSR`
"""
csr_pem, csr_der = make_csr(privkey.pem, names)
config = zope.component.getUtility(interfaces.IConfig)
# Save CSR
le_util.make_or_verify_dir(path, 0o755, os.geteuid(),
config.strict_permissions)
csr_f, csr_filename = le_util.unique_file(
os.path.join(path, csrname), 0o644)
csr_f.write(csr_pem)
csr_f.close()
logger.info("Creating CSR: %s", csr_filename)
return le_util.CSR(csr_filename, csr_der, "der")
# Lower level functions
def make_csr(key_str, domains):
"""Generate a CSR.
:param str key_str: PEM-encoded RSA key.
:param list domains: Domains included in the certificate.
.. todo:: Detect duplicates in `domains`? Using a set doesn't
preserve order...
:returns: new CSR in PEM and DER form containing all domains
:rtype: tuple
"""
assert domains, "Must provide one or more hostnames for the CSR."
pkey = OpenSSL.crypto.load_privatekey(OpenSSL.crypto.FILETYPE_PEM, key_str)
req = OpenSSL.crypto.X509Req()
req.get_subject().CN = domains[0]
# TODO: what to put into req.get_subject()?
# TODO: put SAN if len(domains) > 1
req.add_extensions([
OpenSSL.crypto.X509Extension(
"subjectAltName",
critical=False,
value=", ".join("DNS:%s" % d for d in domains)
),
])
req.set_pubkey(pkey)
req.sign(pkey, "sha256")
return tuple(OpenSSL.crypto.dump_certificate_request(method, req)
for method in (OpenSSL.crypto.FILETYPE_PEM,
OpenSSL.crypto.FILETYPE_ASN1))
# WARNING: the csr and private key file are possible attack vectors for TOCTOU
# We should either...
# A. Do more checks to verify that the CSR is trusted/valid
# B. Audit the parsing code for vulnerabilities
def valid_csr(csr):
"""Validate CSR.
Check if `csr` is a valid CSR for the given domains.
:param str csr: CSR in PEM.
:returns: Validity of CSR.
:rtype: bool
"""
try:
req = OpenSSL.crypto.load_certificate_request(
OpenSSL.crypto.FILETYPE_PEM, csr)
return req.verify(req.get_pubkey())
except OpenSSL.crypto.Error as error:
logger.debug(error, exc_info=True)
return False
def csr_matches_pubkey(csr, privkey):
"""Does private key correspond to the subject public key in the CSR?
:param str csr: CSR in PEM.
:param str privkey: Private key file contents (PEM)
:returns: Correspondence of private key to CSR subject public key.
:rtype: bool
"""
req = OpenSSL.crypto.load_certificate_request(
OpenSSL.crypto.FILETYPE_PEM, csr)
pkey = OpenSSL.crypto.load_privatekey(OpenSSL.crypto.FILETYPE_PEM, privkey)
try:
return req.verify(pkey)
except OpenSSL.crypto.Error as error:
logger.debug(error, exc_info=True)
return False
def make_key(bits):
"""Generate PEM encoded RSA key.
:param int bits: Number of bits, at least 1024.
:returns: new RSA key in PEM form with specified number of bits
:rtype: str
"""
assert bits >= 1024 # XXX
key = OpenSSL.crypto.PKey()
key.generate_key(OpenSSL.crypto.TYPE_RSA, bits)
return OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM, key)
def valid_privkey(privkey):
"""Is valid RSA private key?
:param str privkey: Private key file contents in PEM
:returns: Validity of private key.
:rtype: bool
"""
try:
return OpenSSL.crypto.load_privatekey(
OpenSSL.crypto.FILETYPE_PEM, privkey).check()
except (TypeError, OpenSSL.crypto.Error):
return False
def pyopenssl_load_certificate(data):
"""Load PEM/DER certificate.
:raises errors.Error:
"""
openssl_errors = []
for file_type in (OpenSSL.crypto.FILETYPE_PEM, OpenSSL.crypto.FILETYPE_ASN1):
try:
return OpenSSL.crypto.load_certificate(file_type, data), file_type
except OpenSSL.crypto.Error as error: # TODO: other errors?
openssl_errors.append(error)
raise errors.Error("Unable to load: {0}".format(",".join(
str(error) for error in openssl_errors)))
def _get_sans_from_cert_or_req(cert_or_req_str, load_func,
typ=OpenSSL.crypto.FILETYPE_PEM):
try:
cert_or_req = load_func(typ, cert_or_req_str)
except OpenSSL.crypto.Error as error:
logger.exception(error)
raise
# pylint: disable=protected-access
return acme_crypto_util._pyopenssl_cert_or_req_san(cert_or_req)
def get_sans_from_cert(cert, typ=OpenSSL.crypto.FILETYPE_PEM):
"""Get a list of Subject Alternative Names from a certificate.
:param str cert: Certificate (encoded).
:param typ: `OpenSSL.crypto.FILETYPE_PEM` or `OpenSSL.crypto.FILETYPE_ASN1`
:returns: A list of Subject Alternative Names.
:rtype: list
"""
return _get_sans_from_cert_or_req(
cert, OpenSSL.crypto.load_certificate, typ)
def get_sans_from_csr(csr, typ=OpenSSL.crypto.FILETYPE_PEM):
"""Get a list of Subject Alternative Names from a CSR.
:param str csr: CSR (encoded).
:param typ: `OpenSSL.crypto.FILETYPE_PEM` or `OpenSSL.crypto.FILETYPE_ASN1`
:returns: A list of Subject Alternative Names.
:rtype: list
"""
return _get_sans_from_cert_or_req(
csr, OpenSSL.crypto.load_certificate_request, typ)
def dump_pyopenssl_chain(chain, filetype=OpenSSL.crypto.FILETYPE_PEM):
"""Dump certificate chain into a bundle.
:param list chain: List of `OpenSSL.crypto.X509` (or wrapped in
`acme.jose.ComparableX509`).
"""
# XXX: returns empty string when no chain is available, which
# shuts up RenewableCert, but might not be the best solution...
def _dump_cert(cert):
if isinstance(cert, jose.ComparableX509):
# pylint: disable=protected-access
cert = cert._wrapped
return OpenSSL.crypto.dump_certificate(filetype, cert)
# assumes that OpenSSL.crypto.dump_certificate includes ending
# newline character
return "".join(_dump_cert(cert) for cert in chain)
| g1franc/lets-encrypt-preview | letsencrypt/crypto_util.py | Python | apache-2.0 | 8,163 | 0.000123 |
from django.test import TestCase
from django.test.utils import override_settings
from util.testing import UrlResetMixin
class FaviconTestCase(UrlResetMixin, TestCase):
"""
Tests of the courseware favicon.
"""
shard = 1
def test_favicon_redirect(self):
resp = self.client.get("/favicon.ico")
self.assertEqual(resp.status_code, 301)
self.assertRedirects(
resp,
"/static/images/favicon.ico",
status_code=301, target_status_code=404 # @@@ how to avoid 404?
)
@override_settings(FAVICON_PATH="images/foo.ico")
def test_favicon_redirect_with_favicon_path_setting(self):
self.reset_urls()
resp = self.client.get("/favicon.ico")
self.assertEqual(resp.status_code, 301)
self.assertRedirects(
resp,
"/static/images/foo.ico",
status_code=301, target_status_code=404 # @@@ how to avoid 404?
)
| teltek/edx-platform | lms/djangoapps/courseware/tests/test_favicon.py | Python | agpl-3.0 | 961 | 0 |
# -*- encoding: utf-8 -*-
# Pilas engine - A video game framework.
#
# Copyright 2010 - Hugo Ruscitti
# License: LGPLv3 (see http://www.gnu.org/licenses/lgpl.html)
#
# Website - http://www.pilas-engine.com.ar
from pilas.actores import Actor
import pilas
DEMORA = 14
class Menu(Actor):
"""Un actor que puede mostrar una lista de opciones a seleccionar."""
def __init__(self, opciones, x=0, y=0, fuente=None,
color_normal=pilas.colores.gris,
color_resaltado=pilas.colores.blanco):
"""Inicializa el menú.
:param opciones: Tupla con al menos dos elementos obligatorios (:texto:, :funcion:) y :argumentos: opcionales
:param x: Posicion en el eje x
:param y: Posicion en el eje y
"""
self.opciones_como_actores = []
self.iconos_de_opciones = []
self.demora_al_responder = 0
Actor.__init__(self, "invisible.png", x=x, y=y)
self._verificar_opciones(opciones)
self.crear_texto_de_las_opciones(opciones, fuente, color_normal, color_resaltado)
self.opciones = opciones
self.seleccionar_primer_opcion()
self.opcion_actual = 0
# contador para evitar la repeticion de teclas
self.activar()
# Mapeamos unas teclas para mover el menu
teclas = {pilas.simbolos.IZQUIERDA: 'izquierda',
pilas.simbolos.DERECHA: 'derecha',
pilas.simbolos.ARRIBA: 'arriba',
pilas.simbolos.ABAJO: 'abajo',
pilas.simbolos.SELECCION: 'boton'}
# Creamos un control personalizado
self.control_menu = pilas.control.Control(pilas.escena_actual(), teclas)
def activar(self):
"""Se ejecuta para activar el comportamiento del menú."""
self.escena.mueve_mouse.conectar(self.cuando_mueve_el_mouse)
self.escena.click_de_mouse.conectar(self.cuando_hace_click_con_el_mouse)
def desactivar(self):
"""Deshabilita toda la funcionalidad del menú."""
self.escena.mueve_mouse.desconectar(self.cuando_mueve_el_mouse)
self.escena.click_de_mouse.desconectar(self.cuando_hace_click_con_el_mouse)
def crear_texto_de_las_opciones(self, opciones, fuente, color_normal, color_resaltado):
"""Genera un actor por cada opcion del menu.
:param opciones: Una lista con todas las opciones que tendrá el menú.
"""
for indice, opcion in enumerate(opciones):
y = self.y - indice * 50
if len(opcion) == 2:
texto, funcion, argumentos = opcion[0], opcion[1], opcion[2:] #No debería de aceptar argumentos
else:
if isinstance(opcion[2], list):
texto, funcion, argumentos = opcion[1], opcion[2][0], opcion[2][1:]
icono = pilas.actores.Actor(imagen=opcion[0], x=-120, y=y)
self.iconos_de_opciones.append(icono)
else:
texto, funcion, argumentos = opcion[0], opcion[1], opcion[2:]
opciones = pilas.actores.Opcion(texto, x=0, y=y, funcion_a_invocar=funcion, argumentos=argumentos, fuente=fuente,
color_normal=color_normal, color_resaltado=color_resaltado)
self.opciones_como_actores.append(opciones)
def seleccionar_primer_opcion(self):
"""Destaca la primer opción del menú."""
if self.opciones_como_actores:
self.opciones_como_actores[0].resaltar()
try:
self.iconos_de_opciones[0].escala = [self.escala * 2], .2
except:
pass
def _verificar_opciones(self, opciones):
"""Se asegura de que la lista este bien definida.
:param opciones: La lista de opciones a inspeccionar.
"""
for x in opciones:
if not isinstance(x, tuple) or len(x)<2:
raise Exception("Opciones incorrectas, cada opcion tiene que ser una tupla.")
def actualizar(self):
"Se ejecuta de manera periodica."
if self.demora_al_responder < 0:
if self.control_menu.boton:
self.control_menu.limpiar()
self.seleccionar_opcion_actual()
self.demora_al_responder = DEMORA
if self.control_menu.abajo:
self.mover_cursor(1)
self.demora_al_responder = DEMORA
elif self.control_menu.arriba:
self.mover_cursor(-1)
self.demora_al_responder = DEMORA
self.demora_al_responder -= 1
def seleccionar_opcion_actual(self):
"""Se ejecuta para activar y lanzar el item actual."""
opcion = self.opciones_como_actores[self.opcion_actual]
opcion.seleccionar()
def mover_cursor(self, delta):
"""Realiza un movimiento del cursor que selecciona opciones.
:param delta: El movimiento a realizar (+1 es avanzar y -1 retroceder).
"""
# Deja como no-seleccionada la opcion actual.
self._deshabilitar_opcion_actual()
# Se asegura que las opciones esten entre 0 y 'cantidad de opciones'.
self.opcion_actual += delta
self.opcion_actual %= len(self.opciones_como_actores)
# Selecciona la opcion nueva.
self.opciones_como_actores[self.opcion_actual].resaltar()
try:
self.iconos_de_opciones[self.opcion_actual].escala = [self.escala * 2],.3
except:
pass
def __setattr__(self, atributo, valor):
# Intenta propagar la accion a los actores del grupo.
try:
for x in self.opciones_como_actores:
setattr(x, atributo, valor)
for x in self.iconos_de_opciones:
setattr(x , atributo, valor)
except AttributeError:
pass
Actor.__setattr__(self, atributo, valor)
def cuando_mueve_el_mouse(self, evento):
"""Permite cambiar la opcion actual moviendo el mouse. Retorna True si el mouse esta sobre alguna opcion.
:param evento: El evento que representa el movimiento del mouse.
"""
for indice, opcion in enumerate(self.opciones_como_actores):
if opcion.colisiona_con_un_punto(evento.x, evento.y):
if indice != self.opcion_actual:
self._deshabilitar_opcion_actual()
self.opcion_actual = indice
self.opciones_como_actores[indice].resaltar()
try:
self.iconos_de_opciones[self.opcion_actual].escala = [self.escala * 2],.3
except:
pass
return True
def _deshabilitar_opcion_actual(self):
"""Le quita el foco o resaltado a la opción del menú actual."""
self.opciones_como_actores[self.opcion_actual].resaltar(False)
try:
self.iconos_de_opciones[self.opcion_actual].escala = [self.escala],.3
except:
pass
def cuando_hace_click_con_el_mouse(self, evento):
"""Se ejecuta cuando se hace click con el mouse.
:param evento: objeto que representa el evento click de mouse.
"""
if self.cuando_mueve_el_mouse(evento):
self.seleccionar_opcion_actual()
| fsalamero/pilas | pilas/actores/menu.py | Python | lgpl-3.0 | 7,370 | 0.004755 |
#!/usr/bin/python
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example gets a team by its ID.
To determine which teams exist, run get_all_teams.py.
Tags: TeamService.getTeam
"""
__author__ = '[email protected] (Jeff Sham)'
# Locate the client library. If module was installed via "setup.py" script, then
# the following two lines are not needed.
import os
import sys
sys.path.insert(0, os.path.join('..', '..', '..', '..'))
# Import appropriate classes from the client library.
from adspygoogle import DfpClient
# Initialize client object.
client = DfpClient(path=os.path.join('..', '..', '..', '..'))
# Initialize appropriate service.
team_service = client.GetService('TeamService', version='v201306')
# Set the ID of the team to get.
team_id = 'INSERT_TEAM_ID_HERE'
# Get team.
team = team_service.GetTeam(team_id)[0]
# Display results.
print ('Team with ID \'%s\' and name \'%s\' was found.'
% (team['id'], team['name']))
| caioserra/apiAdwords | examples/adspygoogle/dfp/v201306/get_team.py | Python | apache-2.0 | 1,511 | 0.001985 |
# -*- coding: utf-8 -*-
# Copyright 2020 Green Valley Belgium NV
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @@license_version:1.7@@
from rogerthat.dal import generator
from rogerthat.models import ActivationLog
from mcfw.rpc import returns, arguments
@returns([ActivationLog])
@arguments(min_timestamp=int, max_timestamp=int)
def get_activation_log(min_timestamp, max_timestamp):
qry = ActivationLog.gql("WHERE timestamp > :min_timestamp AND timestamp < :max_timestamp ORDER BY timestamp DESC")
qry.bind(min_timestamp=min_timestamp, max_timestamp=max_timestamp)
return generator(qry.run())
| our-city-app/oca-backend | src/rogerthat/dal/activation.py | Python | apache-2.0 | 1,112 | 0.000899 |
from riotwatcher import *
from time import sleep
import logging
log = logging.getLogger('log')
def getTeamOfSummoner( summonerId, game ):
for p in game['participants']:
if p['summonerId'] == summonerId:
return p['teamId']
def getSummonerIdsOfOpponentTeam( summonerId, game ):
teamId = getTeamOfSummoner(summonerId, game)
summoners = []
for p in game['participants']:
if p['teamId'] != teamId:
summoners.append(p['summonerId'])
return summoners
def queryPastGameIdSets( w, summonerIds, past10 ):
sets = {}
rqs = 0
for id in summonerIds:
response = w.get_match_list(id);
matchlist = []
if 'matches' in response:
matchlist = response['matches']
gamelist = []
if past10:
gamelist = w.get_recent_games(id)['games']
rqs += 2
if rqs >= 8:
sleep(10)
rqs = 0
log.debug('matches of summoner '+str(id)+': '+str(len(matchlist)))
s = set()
for match in matchlist:
s.add(match['matchId'])
for game in gamelist:
s.add(game['gameId'])
sets[id] = s
return sets
def computeFriendship( IdSets ):
searchedSets = set()
friendships = {}
for id in IdSets:
friendships[id] = {}
for id in IdSets:
searchedSets.add(id)
for gameId in IdSets[id]:
for id2 in IdSets:
if not id2 in searchedSets:
if gameId in IdSets[id2]:
if not id2 in friendships[id]:
friendships[id][id2] = 1
if not id in friendships[id2]:
friendships[id2][id] = 1
friendships[id][id2] += 1
friendships[id2][id] += 1
return friendships
def computePremades( friendshipRelations ):
premades = []
for id in friendshipRelations:
group = set(friendshipRelations[id].keys())
group.add(id)
if group not in premades:
premades.append(group)
finPremades = []
for group1 in premades:
finGroup = group1
for group2 in premades:
if group1 != group2 and len(group1 & group2) > 0:
finGroup = finGroup | group2
if finGroup not in finPremades:
finPremades.append(finGroup)
return finPremades
def getPremades( summonerName, lolAPIKey, past10 ):
w = riotwatcher.RiotWatcher(lolAPIKey, default_region=riotwatcher.EUROPE_WEST)
id = w.get_summoner(name=summonerName)['id']
game = w.get_current_game(id)
participants = game['participants']
idToParticipantsMap = {}
for p in participants:
log.info(p['summonerName'].encode('utf8')+' '+str(p['summonerId'])+' '+str(p['teamId']))
idToParticipantsMap[p['summonerId']] = p
log.debug(getSummonerIdsOfOpponentTeam(id,game))
gameIdSets = queryPastGameIdSets( w, getSummonerIdsOfOpponentTeam(id,game), past10 )
friendshipRelations = computeFriendship(gameIdSets)
log.debug(friendshipRelations)
premades = computePremades(friendshipRelations)
premadesNames = []
for group in premades:
groupNames = []
if len(group) > 1:
for summonerId in group:
groupNames.append(idToParticipantsMap[summonerId]['summonerName'])
premadesNames.append(groupNames)
return premadesNames | DenBaum/lolm8guesser | friendship.py | Python | mit | 3,046 | 0.043664 |
try:
from . import i18n
from . import connect_core
from . import screens
from . import exceptions
from . import command
except ModuleNotFoundError:
import i18n
import connect_core
import screens
import exceptions
import command
def fast_post_step0(
api: object,
board: str,
title: str,
content: str,
post_type: int) -> None:
api._goto_board(board)
cmd_list = list()
cmd_list.append(command.Ctrl_P)
cmd_list.append(str(post_type))
cmd_list.append(command.Enter)
cmd_list.append(str(title))
cmd_list.append(command.Enter)
cmd_list.append(str(content))
cmd_list.append(command.Ctrl_X)
cmd_list.append('s')
cmd = ''.join(cmd_list)
target_list = [
connect_core.TargetUnit(
i18n.HasPostPermission,
'發表文章於【',
break_detect=True,
),
connect_core.TargetUnit(
i18n.NoPermission,
'使用者不可發言',
break_detect=True,
),
connect_core.TargetUnit(
i18n.NoPermission,
'無法發文: 未達看板要求權限',
break_detect=True
),
connect_core.TargetUnit(
i18n.AnyKeyContinue,
'任意鍵繼續',
break_detect=True,
),
connect_core.TargetUnit(
i18n.SaveFile,
'確定要儲存檔案嗎',
break_detect=True,
)
]
index = api.connect_core.fast_send(cmd, target_list)
if index < 0:
screens.show(api.config, api.connect_core.get_screen_queue())
raise exceptions.UnknownError(i18n.UnknownError)
if index == 1 or index == 2:
raise exceptions.NoPermission(i18n.NoPermission)
def fast_post_step1(api: object, sign_file) -> None:
cmd = '\r'
target_list = [
connect_core.TargetUnit(
i18n.HasPostPermission,
'發表文章於【',
break_detect=True,
),
connect_core.TargetUnit(
i18n.NoPermission,
'使用者不可發言',
break_detect=True,
),
connect_core.TargetUnit(
i18n.NoPermission,
'無法發文: 未達看板要求權限',
break_detect=True
),
connect_core.TargetUnit(
i18n.AnyKeyContinue,
'任意鍵繼續',
break_detect=True,
),
connect_core.TargetUnit(
i18n.SaveFile,
'確定要儲存檔案嗎',
break_detect=True,
),
connect_core.TargetUnit(
i18n.SelectSignature,
'x=隨機',
response=str(sign_file) + '\r',
),
]
index = api.connect_core.fast_send(cmd, target_list)
if index < 0:
screens.show(api.config, api.connect_core.get_screen_queue())
raise exceptions.UnknownError(i18n.UnknownError)
def fast_post(
api: object,
board: str,
title: str,
content: str,
post_type: int,
sign_file) -> None:
api._goto_board(board)
cmd_list = list()
cmd_list.append(command.Ctrl_P)
cmd_list.append(str(post_type))
cmd_list.append(command.Enter)
cmd_list.append(str(title))
cmd_list.append(command.Enter)
cmd_list.append(str(content))
cmd_list.append(command.Ctrl_X)
cmd = ''.join(cmd_list)
target_list = [
connect_core.TargetUnit(
i18n.HasPostPermission,
'發表文章於【',
break_detect=True,
),
connect_core.TargetUnit(
i18n.NoPermission,
'使用者不可發言',
break_detect=True,
),
connect_core.TargetUnit(
i18n.NoPermission,
'無法發文: 未達看板要求權限',
break_detect=True
),
connect_core.TargetUnit(
i18n.AnyKeyContinue,
'任意鍵繼續',
break_detect=True,
),
connect_core.TargetUnit(
i18n.SaveFile,
'確定要儲存檔案嗎',
response='s' + command.Enter,
),
connect_core.TargetUnit(
i18n.SelectSignature,
'x=隨機',
response=str(sign_file) + command.Enter,
),
]
index = api.connect_core.fast_send(cmd, target_list)
if index < 0:
screens.show(api.config, api.connect_core.get_screen_queue())
raise exceptions.UnknownError(i18n.UnknownError)
if index == 1 or index == 2:
raise exceptions.NoPermission(i18n.NoPermission)
def post(
api: object,
board: str,
title: str,
content: str,
post_type: int,
sign_file) -> None:
api._goto_board(board)
cmd_list = list()
cmd_list.append(command.Ctrl_P)
cmd = ''.join(cmd_list)
target_list = [
connect_core.TargetUnit(
i18n.HasPostPermission,
'發表文章於【',
break_detect=True,
),
connect_core.TargetUnit(
i18n.NoPermission,
'使用者不可發言',
break_detect=True,
),
connect_core.TargetUnit(
i18n.NoPermission,
'無法發文: 未達看板要求權限',
break_detect=True
),
]
index = api.connect_core.send(cmd, target_list)
if index < 0:
screens.show(api.config, api.connect_core.get_screen_queue())
raise exceptions.UnknownError(i18n.UnknownError)
if index == 1 or index == 2:
raise exceptions.NoPermission(i18n.NoPermission)
screens.show(api.config, api.connect_core.get_screen_queue())
cmd_list = list()
cmd_list.append(str(post_type))
cmd_list.append(command.Enter)
cmd_list.append(str(title))
cmd_list.append(command.Enter)
cmd_list.append(command.Ctrl_Y * 40)
cmd_list.append(str(content))
cmd_list.append(command.Ctrl_X)
cmd = ''.join(cmd_list)
target_list = [
connect_core.TargetUnit(
i18n.AnyKeyContinue,
'任意鍵繼續',
break_detect=True,
),
connect_core.TargetUnit(
i18n.SaveFile,
'確定要儲存檔案嗎',
response='s' + command.Enter,
),
connect_core.TargetUnit(
i18n.SelectSignature,
'x=隨機',
response=str(sign_file) + command.Enter,
),
]
index = api.connect_core.send(
cmd,
target_list,
screen_timeout=api.config.screen_post_timeout
)
| Truth0906/PTTCrawlerLibrary | PyPtt/_api_post.py | Python | lgpl-3.0 | 6,653 | 0 |
import libtcodpy as libtcod
import math
import shelve
import textwrap
#############################################
# Constants and Big Vars
#############################################
# Testing State
TESTING = True
# Size of the window
SCREEN_WIDTH = 100
SCREEN_HEIGHT = 70
# Size of the Map
MAP_WIDTH = SCREEN_WIDTH
MAP_HEIGHT = SCREEN_HEIGHT - 7
# GUI Constants
BAR_WIDTH = 20
PANEL_HEIGHT = 7
PANEL_Y = SCREEN_HEIGHT - PANEL_HEIGHT
MSG_X = BAR_WIDTH + 2
MSG_WIDTH = SCREEN_WIDTH - BAR_WIDTH - 2
MSG_HEIGHT = PANEL_HEIGHT - 1
LEVEL_SCREEN_WIDTH = 40
CHARACTER_SCREEN_WIDTH = 30
# Rooms
ROOM_MAX_SIZE = 13
ROOM_MIN_SIZE = 6
MAX_ROOMS = 200
# Inventory
INVENTORY_WIDTH = 50
# Player Stats
LEVEL_UP_BASE = 200
LEVEL_UP_FACTOR = 150
# Magic
CONFUSE_NUM_TURNS = 10
CONFUSE_RANGE = 8
FIREBALL_RADIUS = 3
FIREBALL_DAMAGE = 25
HEAL_AMOUNT = 40
LIGHTNING_DAMAGE = 40
LIGHTNING_RANGE = 5
# Field of Vision
FOV_ALGO = 0
FOV_LIGHT_WALLS = True
LIMIT_FPS = 20 # 20 frames-per-second maximum
# Colors of Terrain
color_dark_wall = libtcod.Color(0, 0, 100)
color_light_wall = libtcod.Color(130, 110, 50)
color_dark_ground = libtcod.Color(50, 50, 150)
color_light_ground = libtcod.Color(200, 180, 50)
# Python 3 Global Vars
map = []
objects = []
game_msgs = []
stairs = None
dungeon_level = 1
torch_bonus = 0
#############################################
# Classes
#############################################
class AI_BasicMonster:
# AI for a Basic Monster
def __init__(self, owner):
self.owner = owner
owner.ai = self
def take_turn(self):
# A basic monster takes its turn. If you can see it, it can see you.
monster = self.owner
if libtcod.map_is_in_fov(fov_map, monster.x, monster.y):
# Move towards player if non-adjacent.
if monster.distance_to(player) >= 2:
monster.move_towards(player.x, player.y)
# Adjacent? Attack if the player is still alive.
elif player.fighter.hp > 0:
monster.fighter.attack(player)
class AI_ConfusedMonster:
# AI for a temporarily Confused Monster
def __init__(self, old_ai, num_turns = CONFUSE_NUM_TURNS):
self.owner = owner
self.old_ai = old_ai
self.num_turns = num_turns
owner.ai = self
def take_turn(self):
if self.num_turns > 0: # Monster still confused
# Move in a random direction, and decrease num_turns confused.
self.owner.move(libtcod.random_get_int(0, -1, 1), libtcod.random_get_int(0, -1, 1))
self.num_turns -= 1
else: # Restore the previous AI and destroy this one.
self.owner.ai = self.old_ai
message('The ' + self.owner.name + ' is no longer confused!')
class Equipment:
# An object that can be equipped, yielding bonuses. Automatically adds the Item component.
def __init__(self, owner, slot, power_bonus = 0, defense_bonus = 0, max_hp_bonus = 0, torch_bonus = 0, dodge_bonus = 0):
self.power_bonus = power_bonus
self.defense_bonus = defense_bonus
self.max_hp_bonus = max_hp_bonus
self.torch_bonus = torch_bonus
self.dodge_bonus = dodge_bonus
self.slot = slot
self.is_equipped = False
self.owner = owner
owner.equipment = self
if owner.item == None:
owner.item = Item(owner)
def toggle_equip(self):
# Toggle equip/dequip status.
if self.is_equipped:
self.dequip()
else:
self.equip()
def equip(self):
global fov_recompute
# If the slot is already being used, dequip whatever is there first.
old_equipment = get_equipped_in_slot(self.slot)
if old_equipment is not None:
old_equipment.dequip()
# Equip object and show a message about it.
self.is_equipped = True
message('Equipped ' + self.owner.name + ' on ' + self.slot + '.', libtcod.light_green)
fov_recompute = True
def dequip(self):
# Dequip object and show a message about it.
if not self.is_equipped:
return
self.is_equipped = False
message('Dequipped ' + self.owner.name + ' from ' + self.slot + '.', libtcod.light_yellow)
fov_recompute = True
def check_equip(self):
return self.is_equipped
class Fighter:
# A composite class for combat-related properties.
def __init__(self, owner, hp, defense, power, xp, death_function = None, to_hit = 80, dodge = 0):
self.owner = owner
self.owner.fighter = self
self.base_max_hp = hp
self.hp = hp
self.base_defense = defense
self.base_power = power
self.base_dodge = dodge
self.xp = xp
self.to_hit = to_hit
self.death_function = death_function
@property
def power(self):
# Returns dynamic power value.
bonus = sum(equipment.power_bonus for equipment in get_all_equipped(self.owner))
return self.base_power + bonus
@property
def defense(self):
# Returns dynamic defense value.
bonus = sum(equipment.defense_bonus for equipment in get_all_equipped(self.owner))
return self.base_defense + bonus
@property
def dodge(self):
# Returns dynamic dodge value.
bonus = sum(equipment.dodge_bonus for equipment in get_all_equipped(self.owner))
return self.base_dodge + bonus
@property
def max_hp(self):
# Returns dynamic max_hp value.
bonus = sum(equipment.max_hp_bonus for equipment in get_all_equipped(self.owner))
return self.base_max_hp + bonus
def attack(self, target):
chance_hit = libtcod.random_get_int(0, 1, 101)
if self.to_hit < (chance_hit + target.fighter.dodge):
message(self.owner.name.capitalize() + ' swings and misses!')
return
# A simple formula for attack damage.
damage = self.power - target.fighter.defense
if damage > 0:
# Make the target take some damageself.
message(self.owner.name.capitalize() + ' attacks ' + target.name + ' for ' + str(damage) + ' hit points.')
target.fighter.take_damage(damage)
else:
message(self.owner.name.capitalize() + ' attacks ' + target.name + ' but it has no effect!')
def heal(self, amount):
# Heal by the given amount, without going over the maximum.
self.hp += amount
if self.hp > self.max_hp:
self.hp = self.max_hp
def take_damage(self, damage):
# Apply damage if possible.
if damage > 0:
self.hp -= damage
# Check for death. If there's a death function, call it.
if self.hp <= 0:
function = self.death_function
if function is not None:
function(self.owner)
if self.owner != player: # Yield experience to the player
player.fighter.xp += self.xp
class Item:
# An item that can be picked up and used.
def __init__(self, owner, use_function = None):
self.use_function = use_function
self.owner = owner
owner.item = self
def drop(self):
# Add item to the map @ player's coordinates, and remove from the player's inventory.
gameobjects.append(self.owner)
inventory.remove(self.owner)
self.owner.x = player.x
self.owner.y = player.y
message('You dropped a ' + self.owner.name + '.', libtcod.yellow)
# Special Case: If the object has the Equipment component, dequip it before dropping.
if self.owner.equipment:
self.owner.equipment.dequip()
def pick_up(self):
# Add to the player's inventory and remove from the map.
if len(inventory) >= 26:
message('Your inventory is full, cannot pick up ' + self.owner.name + '.', libtcod.red)
else:
inventory.append(self.owner)
gameobjects.remove(self.owner)
message('You picked up a ' + self.owner.name + '!', libtcod.green)
# Special Case: Automatically equip, if the corresponding equipment slot is unused.
equipment = self.owner.equipment
if equipment and get_equipped_in_slot(equipment.slot) is None:
equipment.equip()
def use(self):
# Special case: If the object has the Equipment component, the "use" action is to equip/dequip the object.
if self.owner.equipment:
self.owner.equipment.toggle_equip()
return
# Just call the "use_function" if it is defined.
if self.use_function is None:
message('The ' + self.owner.name + ' cannot be used.')
else:
if self.use_function() != 'cancelled':
# Destroy after use, unless it was cancelled for some reason.
inventory.remove(self.owner)
class Light:
def __init__(self):
self.base_light_radius = 8
@property
def TORCH_RADIUS(self):
# Returns dynamic light value. Only works for items equipped by player.
torch_bonus = sum(equipment.torch_bonus for equipment in get_all_equipped(player))
return self.base_light_radius + torch_bonus
class GameObject:
# This object is a generic item in game: player, monster, item, tile feature
# An object is always represented as a symbol on screen.
def __init__(self, x, y, char, name, color, blocks = False, always_visible = False):
self.x = x
self.y = y
self.char = char
self.name = name
self.color = color
self.blocks = blocks
self.always_visible = always_visible
# Components which may be created later, but must exist to be tested.
self.fighter = None
self.ai = None
self.status_effect = None
self.item = None
self.equipment = None
def clear(self):
# Erase the character that represents this object.
libtcod.console_put_char(con, self.x, self.y, ' ', libtcod.BKGND_NONE)
def distance(self, x, y):
# Return the distance to some coordinates.
return math.sqrt((x - self.x) ** 2 + (y - self.y) ** 2)
def distance_to(self, other):
# Return the distance between self and another object.
dx = other.x - self.x
dy = other.y - self.y
return math.sqrt(dx ** 2 + dy ** 2)
def draw(self):
# Check to see if the object is in the player's FOV
if libtcod.map_is_in_fov(fov_map, self.x, self.y) or (self.always_visible and map[self.x][self.y].explored):
# Set the color and then draw the corresponding character of the object in that color.
libtcod.console_set_default_foreground(con, self.color)
libtcod.console_put_char(con, self.x, self.y, self.char, libtcod.BKGND_NONE)
def move(self, dx, dy):
# Move object by the param amount.
global map
if not is_blocked(self.x + dx, self.y + dy):
self.x += dx
self.y += dy
def move_towards(self, target_x, target_y):
# Generate vector from this object to the target, and distance.
dx = target_x - self.x
dy = target_y - self.y
distance = math.sqrt(dx ** 2 + dy ** 2)
# Normalize distance to length 1 (preserving direction), then round it and convert it to integer so the movement is restricted to the map grid
dx = int(round(dx / distance))
dy = int(round(dy / distance))
self.move(dx, dy)
def send_to_back(self):
# Make this object be drawn first, so all others appear above it if they're in the same tile.
global gameobjects
gameobjects.remove(self)
gameobjects.insert(0, self)
class Rect:
# A rectangle used on a map, namely for the creation of rooms.
def __init__(self, x, y, w, h):
self.x1 = x
self.y1 = y
self.x2 = x + w
self.y2 = y + h
def center(self):
# Returns the center of the Rect object.
center_x = (self.x1 + self.x2) // 2
center_y = (self.y1 + self.y2) // 2
return center_x, center_y
def intersect(self, other):
# Returns True if this rect intersects with another one.
return (self.x1 <= other.x2 and self.x2 >= other.x1 and self.y1 <= other.y2 and self.y2 >= other.y1)
class Status_Item_Regen:
# A class for item-based status effects that regenerate the player.
def __init__(self, owner, amount = 1, chance = 100):
self.amount = amount
self.chance = chance
self.owner = owner
owner.status_effect = self
def take_turn(self):
if self.owner.equipment.check_equip():
print("Something triggers.\n\n\n\n")
randint = libtcod.random_get_int(0, 1, 100)
if randint <= self.chance:
player.fighter.hp += self.amount
if player.fighter.hp > player.fighter.max_hp:
player.fighter.hp = player.fighter.max_hp
class Tile:
# A tile on the map
def __init__(self, blocked, block_sight = None):
self.blocked = blocked
# By default, if a tile is blocked, it also blocks sight.
if block_sight == None:
block_sight = blocked
self.block_sight = block_sight
self.explored = False
#############################################
# Functions
#############################################
def cast_confuse():
# Ask the player for a target to confuse.
message('Left-click an enemy to confuse it, or right-click to cancel.', libtcod.light_cyan)
monster = target_monster(CONFUSE_RANGE)
if monster is None:
return 'cancelled'
# Replace the monster's AI with a "confused" one; after some turns it will restore the old AI.
confused_ai = AI_ConfusedMonster(owner = monster, old_ai = monster.ai)
message('The eyes of the ' + monster.name + ' look vacant, as it starts to stumble around!', libtcod.light_green)
def cast_fireball():
# Ask the player for a target tile to throw a fireball at.
message('Left-click a target tile for the fireball, or right-click to cancel.', libtcod.light_cyan)
(x, y) = target_tile()
if x is None:
return 'cancelled'
message('The fireball explodes, burning everything within ' + str(FIREBALL_RADIUS) + ' tiles!', libtcod.orange)
# Damage every fighter-object in range, including the player.
for obj in gameobjects:
if obj.distance(x, y) <= FIREBALL_RADIUS and obj.fighter:
message('The ' + obj.name + ' gets burned for ' + str(FIREBALL_DAMAGE) + ' hit points.', libtcod.orange)
obj.fighter.take_damage(FIREBALL_DAMAGE)
def cast_heal():
# Heal the player
if player.fighter.hp == player.fighter.max_hp:
message('You are already at full health.', libtcod.red)
return 'cancelled'
message('Your wounds start to feel better!', libtcod.light_violet)
player.fighter.heal(HEAL_AMOUNT)
def cast_lightning():
# Find closest enemy (inside a maximum range) and damage it.
monster = closest_monster(LIGHTNING_RANGE)
if monster is None: # No enemy found within maximum range.
message('No enemy is close enough to strike.', libtcod.red)
return 'cancelled'
# Zap it!
message('A lighting bolt strikes the ' + monster.name + ' with a loud thunder! The damage is ' + str(LIGHTNING_DAMAGE) + ' hit points.', libtcod.light_blue)
monster.fighter.take_damage(LIGHTNING_DAMAGE)
def check_level_up():
# See if the player's experience is enough to level-up.
level_up_xp = LEVEL_UP_BASE + player.level * LEVEL_UP_FACTOR
if player.fighter.xp >= level_up_xp:
# Level up.
player.level += 1
player.fighter.xp -= level_up_xp
message('Your battle skills grow stronger! You reached level ' + str(player.level) + '!', libtcod.yellow)
choice = None
while choice == None: # keep asking until a choice is made
choice = menu('Level up! Choose a stat to raise:\n', ['Constitution (+20 HP, from ' + str(player.fighter.max_hp) + ')', 'Strength (+1 attack, from ' + str(player.fighter.power) + ')', 'Toughness (+1 defense, from ' + str(player.fighter.defense) + ')', 'Agility (+1 dodge, from ' + str(player.fighter.dodge) + ')'], LEVEL_SCREEN_WIDTH)
if choice == 0:
player.fighter.base_max_hp += 20
player.fighter.hp += 20
elif choice == 1:
player.fighter.base_power += 1
elif choice == 2:
player.fighter.base_defense += 1
elif choice == 3:
player.fighter.base_dodge += 1
def closest_monster(max_range):
# Find closest enemy, up to a maximum range, and in the player's FOV.
closest_enemy = None
closest_dist = max_range + 1 # Start with (slightly more than) maximum range.
for object in gameobjects:
if object.fighter and not object == player and libtcod.map_is_in_fov(fov_map, object.x, object.y):
# Calculate distance between this object and the player.
dist = player.distance_to(object)
if dist < closest_dist: # It's closer, so remember it.
closest_enemy = object
closest_dist = dist
return closest_enemy
def create_h_tunnel(x1, x2, y):
global map
for x in range(min(x1, x2), max(x1, x2) + 1):
map[x][y].blocked = False
map[x][y].block_sight = False
def create_room(room):
global map, gameobjects
# Create passable areas in rooms, carved out via rects from map.
for x in range(room.x1 + 1, room.x2):
for y in range(room.y1 + 1, room.y2):
map[x][y].blocked = False
map[x][y].block_sight = False
def create_v_tunnel(y1, y2, x):
global map
for y in range(min(y1, y2), max(y1, y2) + 1):
map[x][y].blocked = False
map[x][y].block_sight = False
def from_dungeon_level(table):
# Returns a value that depends on level. The table specifies what value occurs after each level, default is 0.
for (value, level) in reversed(table):
if dungeon_level >= level:
return value
return 0
def get_all_equipped(obj):
# Returns a list of equipped items.
if obj == player:
equipped_list = []
for item in inventory:
if item.equipment and item.equipment.is_equipped:
equipped_list.append(item.equipment)
return equipped_list
else:
return [] # Other gameobjects have no equipment
def get_equipped_in_slot(slot):
global inventory
# Returns the equipment in a slot, or None if it's empty.
for obj in inventory:
if obj.equipment and obj.equipment.slot == slot and obj.equipment.is_equipped:
return obj.equipment
return None
def get_names_under_mouse():
global mouse
# Return a string with the names of all gameobjects under the mouse
(x, y) = (mouse.cx, mouse.cy)
# Create a list with the names of all gameobjects at the mouse's coordinates and in FOV.
names = [obj.name for obj in gameobjects
if obj.x == x and obj.y == y and libtcod.map_is_in_fov(fov_map, obj.x, obj.y)]
names = ', '.join(names) # Join the names, separated by commas.
return names.capitalize()
def handle_keys():
global fov_recompute
global inventory
global key
global player
if key.vk == libtcod.KEY_ENTER and key.lalt:
#Alt + Enter: Toggle Fullscreen
libtcod.console_set_fullscreen(not libtcod.console_is_fullscreen())
elif key.vk == libtcod.KEY_ESCAPE:
return 'exit' # Exit Game
# Game State Keys
if game_state == 'playing':
# Movement keys
if key.vk == libtcod.KEY_UP:
player_move_or_attack(0, -1)
elif key.vk == libtcod.KEY_DOWN:
player_move_or_attack(0, 1)
elif key.vk == libtcod.KEY_LEFT:
player_move_or_attack(-1, 0)
elif key.vk == libtcod.KEY_RIGHT:
player_move_or_attack(1, 0)
else:
# Test for other keys.
key_char = chr(key.c)
# Pick up an item.
if key_char == 'g':
# Look for an item in the player's tile.
for object in gameobjects:
if object.x == player.x and object.y == player.y and object.item:
object.item.pick_up()
break
if key_char == 'i':
# Show the inventory.
chosen_item = inventory_menu('Press the key next to an item to use it, or any other to cancel.\n')
if chosen_item is not None:
chosen_item.use()
if key_char == 'd':
# sShow the inventory; if an item is selected, drop it.
chosen_item = inventory_menu('Press the key next to an item to drop it, or any other to cancel.\n')
if chosen_item is not None:
chosen_item.drop()
if key_char == '>':
# Go down stairs, if the player is on them
if stairs.x == player.x and stairs.y == player.y:
next_level()
if key_char == 'c':
# Show character information.
level_up_xp = LEVEL_UP_BASE + player.level * LEVEL_UP_FACTOR
msgbox('Character Information\n\nLevel: ' + str(player.level) + '\nExperience: ' + str(player.fighter.xp) + '\nExperience to level up: ' + str(level_up_xp) + '\n\nMaximum HP: ' + str(player.fighter.max_hp) + '\nAttack: ' + str(player.fighter.power) + '\nDefense: ' + str(player.fighter.defense) + '\nDodge: ' + str(player.fighter.dodge), CHARACTER_SCREEN_WIDTH)
if key_char == '?':
# Show help.
msgbox('Press the following keys for results.\n\nArrow Keys: Move.\nc: Inventory for use.\nd: Inventory for drop.\nShift + > when on stairs down: Go down.')
return 'didnt-take-turn'
def initialize_fov():
global fov_recompute, fov_map
fov_recompute = True
# Create the FOV map, in accordance with the established Map
fov_map = libtcod.map_new(MAP_WIDTH, MAP_HEIGHT)
for y in range(MAP_HEIGHT):
for x in range(MAP_WIDTH):
libtcod.map_set_properties(fov_map, x, y, not map[x][y].block_sight, not map[x][y].blocked)
# Clear Console
libtcod.console_clear(con)
def inventory_menu(header):
# Show a menu with each item of the inventory as an option.
if len(inventory) == 0:
options = ['Inventory is empty.']
else:
options = []
for item in inventory:
text = item.name
# Show additional information, in case it's equipped.
if item.equipment and item.equipment.is_equipped:
text = text + ' (on ' + item.equipment.slot + ')'
options.append(text)
index = menu(header, options, INVENTORY_WIDTH)
# If an item was chosen, return it.
if index is None or len(inventory) == 0:
return None
return inventory[index].item
def is_blocked(x, y):
global gameobjects
# First, test if the map tile is blocking.
if map[x][y].blocked:
return True
# Now check to see if there are any blocking gameobjects.
for object in gameobjects:
if object.blocks and object.x == x and object.y == y:
return True
# Otherwise, not blocked.
return False
def load_game():
# Open the previously saved shelve and load the game data.
global map, gameobjects, stairs, dungeon_level
global player, inventory
global game_msgs, game_state
file = shelve.open('savegame', 'r')
map = file['map']
gameobjects = file['objects']
player = gameobjects[file['player_index']] # Get index of player in gameobjects list and access it.
inventory = file['inventory']
game_msgs = file['game_msgs']
game_state = file['game_state']
stairs = gameobjects[file['stairs_index']]
dungeon_level = file['dungeon_level']
file.close()
# Initialize the FOV
initialize_fov()
def main_menu():
img = libtcod.image_load(b'menu_background3.png')
while not libtcod.console_is_window_closed():
# Show the background image at twice the regular resolution.
libtcod.image_blit_2x(img, 0, 0, 0)
# Show the game's title and credits.
libtcod.console_set_default_foreground(0, libtcod.light_yellow)
libtcod.console_print_ex(0, SCREEN_WIDTH//2, SCREEN_HEIGHT//2-4, libtcod.BKGND_NONE, libtcod.CENTER, 'TOMBS OF NEW BEGINNINGS')
libtcod.console_print_ex(0, SCREEN_WIDTH//2, SCREEN_HEIGHT-2, libtcod.BKGND_NONE, libtcod.CENTER, 'By Parker Harris Emerson')
# Show options and wait for the player's choice.
choice = menu('', ['Play a new game', 'Continue last game', 'Quit'], 24)
if choice == 0: # New Game
global light
light = Light()
new_game()
play_game()
elif choice == 1: #load last game
try:
load_game()
except:
msgbox('\n No saved game to load.\n', 24)
continue
play_game()
elif choice == 2: # Quit
break
def make_map():
global map, player, gameobjects, stairs
# The List of GameObjects
gameobjects = [player]
# Fill the map with "blocked" tiles.
map = [[ Tile(True)
for y in range(MAP_HEIGHT) ]
for x in range(MAP_WIDTH) ]
rooms = []
num_rooms = 0
for r in range(MAX_ROOMS):
# Random width and height for rooms.
w = libtcod.random_get_int(0, ROOM_MIN_SIZE, ROOM_MAX_SIZE)
h = libtcod.random_get_int(0, ROOM_MIN_SIZE, ROOM_MAX_SIZE)
# Random position without going out of the boundaries of the map.
x = libtcod.random_get_int(0, 0, MAP_WIDTH - w - 1)
y = libtcod.random_get_int(0, 0, MAP_HEIGHT - h - 1)
new_room = Rect(x, y, w, h)
# Run through the other rooms and see if they intersect with the new_room.
room_failed = False
for other_room in rooms:
if new_room.intersect(other_room):
room_failed = True
break
if not room_failed:
# There are no intersections, so this new_room is valid.
create_room(new_room)
# Create and place some gameobjects / monsters!
place_objects(new_room)
# Center coordinates of new room.
new_x, new_y = new_room.center()
if num_rooms == 0:
# If first room, initiate player at center tuple.
player.x = new_x
player.y = new_y
else: # If not the first room, make some tunnels.
# Center coordinates of previous room.
prev_x, prev_y = rooms[num_rooms - 1].center()
# Random 50/50 (random number that is either 0 or 1)
if libtcod.random_get_int(0, 0, 1) == 1:
# First move horizontally, then vertically.
create_h_tunnel(prev_x, new_x, prev_y)
create_v_tunnel(prev_y, new_y, new_x)
else:
# First move vertically, then horizontally.
create_v_tunnel(prev_y, new_y, prev_x)
create_h_tunnel(prev_x, new_x, new_y)
# Append the new room to the list of rooms.
rooms.append(new_room)
num_rooms += 1
# Create stairs at the center of the last room
stairs = GameObject(new_x, new_y, '>', 'stairs', libtcod.white, always_visible = True)
gameobjects.append(stairs)
stairs.send_to_back() # So it's drawn below the monsters
def message(new_msg, color = libtcod.white):
# Split the message along multiple lines if necessary.
new_msg_lines = textwrap.wrap(new_msg, MSG_WIDTH)
for line in new_msg_lines:
# If the buffer is full, remove the first line to make room for the new one
if len(game_msgs) == MSG_HEIGHT:
del game_msgs[0]
# Add the new line as a tuple, with the text and the color.
game_msgs.append( (line, color) )
def monster_death(monster):
# Transform monster into a corpse! Corpses don't block, can't be attacked and don't move.
message(monster.name.capitalize() + ' is dead! You gain ' + str(monster.fighter.xp) + ' experience points.', libtcod.orange)
monster.char = '%'
monster.color = libtcod.dark_red
monster.blocks = False
monster.fighter = None
monster.ai = None
monster.name = 'remains of ' + monster.name
monster.send_to_back()
def menu(header, options, width):
if len(options) > 26:
raise ValueError('Cannot have a menu with more than 26 options.')
# calculate total height for the header (after auto-wrap) WITH one line per option.
header_height = libtcod.console_get_height_rect(con, 0, 0, width, SCREEN_HEIGHT, header)
if header == '':
header_height = 0
height = len(options) + header_height
# Create an off-screen console that represents the menu's window.
window = libtcod.console_new(width, height)
# Print the header, with auto-wrap
libtcod.console_set_default_foreground(window, libtcod.white)
libtcod.console_print_rect_ex(window, 0, 0, width, height, libtcod.BKGND_NONE, libtcod.LEFT, header)
# Print all the options.
y = header_height
letter_index = ord('a')
for option_text in options:
text = '(' + chr(letter_index) + ') ' + option_text
libtcod.console_print_ex(window, 0, y, libtcod.BKGND_NONE, libtcod.LEFT, text)
y += 1
letter_index += 1
# Blit the contents of "window" to the root console.
x = SCREEN_WIDTH//2 - width//2
y = SCREEN_HEIGHT//2 - height//2
libtcod.console_blit(window, 0, 0, width, height, 0, x, y, 1.0, 0.7)
# Present the root console to the player and wait for a key-press.
libtcod.console_flush()
key = libtcod.console_wait_for_keypress(True)
if key.vk == libtcod.KEY_ENTER and key.lalt: #(special case) Alt+Enter: toggle fullscreen
libtcod.console_set_fullscreen(not libtcod.console_is_fullscreen())
# Convert the ASCII code to an index; if it corresponds to an option, return it.
index = key.c - ord('a')
if index >= 0 and index < len(options):
return index
return None
def msgbox(text, width = 50):
menu(text, [], width) # Use menu() as a sort of "message box".
def next_level():
global dungeon_level
# Advance to the next level
message('You take a moment to rest, and recover your strength.', libtcod.light_violet)
player.fighter.heal(player.fighter.max_hp // 2) #heal the player by 50%
message('After a rare moment of peace, you descend deeper into the heart of the dungeon...', libtcod.red)
dungeon_level += 1
make_map() # Create a fresh new level.
initialize_fov()
def new_game():
global game_msgs, game_state
global inventory, dungeon_level
global player
# Create the Player
player = GameObject(0, 0, '@', 'player', libtcod.white, blocks=True)
fighter_component = Fighter(player, hp = 100, defense = 1, power = 2, xp = 0, death_function = player_death)
player.level = 1
# Make the Map
dungeon_level = 1
make_map()
initialize_fov()
# Set Game State
game_state = 'playing'
# Create Inventory
inventory = []
# Create a list of game messages and their color.
game_msgs = []
# A warm welcoming message!
message('Welcome stranger! Prepare to perish in the Tombs of New Beginnings.', libtcod.red)
# Initial equipment: A simple dagger
obj = GameObject(0, 0, '-', 'dagger', libtcod.sky, always_visible = True)
equipment_component = Equipment(owner = obj, slot = 'right hand', power_bonus=2)
inventory.append(obj)
equipment_component.equip()
def place_objects(room):
# Place items in the rooms.
global gameobjects
# Maximum number of monsters per room.
max_monsters = from_dungeon_level([[2, 1], [3, 4], [5, 6]])
# Chance of each given monster.
monster_chances = {}
monster_chances['orc'] = 80 # Orcs always shows up, even if all other monsters have 0 chance
monster_chances['troll'] = from_dungeon_level([[15, 3], [30, 5], [60, 7]])
monster_chances['kobold'] = from_dungeon_level([[50, 1], [10, 3], [0, 5]])
monster_chances['skeleton'] = from_dungeon_level([[45, 1], [15, 3], [5, 4]])
monster_chances['blink dog'] = from_dungeon_level([[15, 2], [30, 5], [45, 8]])
# Maximum number of items per room.
max_items = from_dungeon_level([[1, 1], [2, 4]])
# Chance of each item (by default they have a chance of 0 at level 1, which then goes up)
item_chances = {}
item_chances['healing potion'] = 35 #healing potion always shows up, even if all other items have 0 chance
item_chances['lightning scroll'] = from_dungeon_level([[25, 4]])
item_chances['fireball scroll'] = from_dungeon_level([[25, 6]])
item_chances['confuse scroll'] = from_dungeon_level([[10, 2]])
item_chances['sword'] = from_dungeon_level([[5, 1], [10, 4]])
item_chances['wooden shield'] = from_dungeon_level([[5, 1], [15, 4]])
item_chances['bronze shield'] = from_dungeon_level([[5, 3], [10, 5]])
item_chances['cheap torch'] = from_dungeon_level([[15, 1], [0, 3]])
item_chances['sword of flame'] = from_dungeon_level([[10, 6]])
item_chances['wooden helm'] = from_dungeon_level([[10, 1], [5, 3]])
item_chances['amulet of health'] = from_dungeon_level([[10, 5], [15, 8]])
item_chances['leather armor'] = from_dungeon_level([[5, 1], [15, 3], [5, 5]])
item_chances['bronze armor'] = from_dungeon_level([[5, 3], [15, 5]])
#item_chances['ring of lesser regeneration'] = from_dungeon_level([[200, 1]])
# Choose random number of monsters.
num_monsters = libtcod.random_get_int(0, 0, max_monsters)
for i in range(num_monsters):
# Choose a random spot for each given monster.
x = libtcod.random_get_int(0, room.x1 + 1, room.x2 - 1)
y = libtcod.random_get_int(0, room.y1 + 1, room.y2 - 1)
# Only place object if x, y is not blocked.
if not is_blocked(x, y):
choice = random_choice(monster_chances)
# Create an orc.
if choice == 'orc':
monster = GameObject(x, y, 'o', 'orc', libtcod.desaturated_green, blocks = True)
fighter_component = Fighter(owner = monster, hp = 20, defense = 0, power = 4, xp = 35, death_function = monster_death)
ai_component = AI_BasicMonster(owner = monster)
# Create a troll.
elif choice == 'troll':
monster = GameObject(x, y, 'T', 'troll', libtcod.darker_green, blocks = True)
fighter_component = Fighter(monster, hp = 30, defense = 2, power = 8, xp = 100, death_function = monster_death)
ai_component = AI_BasicMonster(owner = monster)
# Create a kobold.
elif choice == 'kobold':
# Create more than one kobold in one monster 'slot.'
kobold_num = libtcod.random_get_int(0, 0, max_monsters)
for i in range(kobold_num + 1):
monster = GameObject(x, y, 'k', 'kobold', libtcod.darker_flame, blocks = True)
fighter_component = Fighter(monster, hp = 8, defense = 0, power = 3, xp = 20, death_function = monster_death)
ai_component = AI_BasicMonster(owner = monster)
# Append, unless last object in loop, then don't append because appendation (?) will happen after loop.
if i < kobold_num:
gameobjects.append(monster)
# Create a skeleton.
if choice == 'skeleton':
monster = GameObject(x, y, 'Z', 'skeleton', libtcod.white, blocks = True)
fighter_component = Fighter(monster, hp = 5, defense = 3, power = 3, xp = 25, death_function = monster_death)
ai_component = AI_BasicMonster(owner = monster)
# Create a skeleton.
if choice == 'blink dog':
monster = GameObject(x, y, 'b', 'blink dog', libtcod.dark_fuchsia, blocks = True)
fighter_component = Fighter(monster, hp = 20, defense = 0, power = 4, xp = 55, dodge = 20, death_function = monster_death)
ai_component = AI_BasicMonster(owner = monster)
gameobjects.append(monster)
# Choose random number of items.
num_items = libtcod.random_get_int(0, 0, max_items)
for i in range(num_items):
# Choose random spot for this item.
x = libtcod.random_get_int(0, room.x1+1, room.x2-1)
y = libtcod.random_get_int(0, room.y1+1, room.y2-1)
# Only place it if the tile is not blocked.
if not is_blocked(x, y):
choice = random_choice(item_chances)
# Create a healing potion.
if choice == 'healing potion':
item = GameObject(x, y, '!', 'healing potion', libtcod.violet)
item_component = Item(owner = item, use_function = cast_heal)
elif choice == 'lightning scroll':
# Create a lightning bolt scroll.
item = GameObject(x, y, '#', 'scroll of lightning bolt', libtcod.light_yellow)
item_component = Item(owner = item, use_function = cast_heal)
# Create a confuse scroll.
elif choice == 'confuse scroll':
item = GameObject(x, y, '#', 'scroll of confusion', libtcod.light_yellow)
item_component = Item(owner = item, use_function = cast_confuse)
# Create a fireball scroll.
elif choice == 'fireball scroll':
item = GameObject(x, y, '#', 'scroll of fireball', libtcod.light_yellow)
item_component = Item(owner = item, use_function = cast_fireball)
# Create a sword.
elif choice == 'sword':
item = GameObject(x, y, '/', 'sword', libtcod.sky)
equipment_component = Equipment(owner = item, slot='right hand', power_bonus = 3)
# Create a wooden shield.
elif choice == 'wooden shield':
item = GameObject(x, y, '[', 'wooden shield', libtcod.darker_orange)
equipment_component = Equipment(owner = item, slot = 'left hand', dodge_bonus = 5)
# Create a bronze shield.
elif choice == 'bronze shield':
item = GameObject(x, y, '[', 'bronze shield', libtcod.sepia)
equipment_component = Equipment(owner = item, slot = 'left hand', dodge_bonus = 10)
# Create a torch.
elif choice == 'cheap torch':
item = GameObject(x, y, 'i', 'cheap torch', libtcod.dark_orange)
equipment_component = Equipment(owner = item, slot = 'left hand', torch_bonus = 2)
# Create a sword of flame.
elif choice == 'sword of flame':
item = GameObject(x, y, '/', 'sword of flame', libtcod.dark_orange)
equipment_component = Equipment(owner = item, slot = 'left hand', torch_bonus = 2, power_bonus = 3)
# Create a wooden helm.
elif choice == 'wooden helm':
item = GameObject(x, y, 'n', 'wooden helm', libtcod.darker_orange)
equipment_component = Equipment(owner = item, slot = 'head', defense_bonus = 1)
# Create an amulet of health.
elif choice == 'amulet of health':
item = GameObject(x, y, '\"', 'amulet of health', libtcod.darker_orange)
equipment_component = Equipment(owner = item, slot = 'neck', max_hp_bonus = 10)
# Create leather armor.
elif choice == 'leather armor':
item = GameObject(x, y, '[', 'leather armor', libtcod.desaturated_orange)
equipment_component = Equipment(owner = item, slot = 'chest', defense_bonus = 1)
# Create bronze armor.
elif choice == 'bronze armor':
item = GameObject(x, y, '[', 'bronze armor', libtcod.sepia)
equipment_component = Equipment(owner = item, slot = 'chest', defense_bonus = 3)
# Create ring of lesser regeneration.
#elif choice == 'ring of lesser regeneration':
# item = GameObject(x, y, '=', 'ring of lesser regeneration', libtcod.sepia)
# equipment_component = Equipment(owner = item, slot = 'finger')
# status_component = Status_Item_Regen(item, 1, 100)
# Add item to all gameobjects on map.
gameobjects.append(item)
item.send_to_back() # Items appear below other gameobjects.
def play_game():
global key, mouse, gameobjects
player_action = None
mouse = libtcod.Mouse()
key = libtcod.Key()
# Play Game
while not libtcod.console_is_window_closed():
# Render the screen.
libtcod.sys_check_for_event( libtcod.EVENT_KEY_PRESS | libtcod.EVENT_MOUSE, key, mouse)
render_all()
libtcod.console_flush()
check_level_up()
for object in gameobjects:
object.clear()
# Handle key input and exit game if needed.
player_action = handle_keys()
if player_action == 'exit':
save_game()
break
# Let the monsters take their turn.
if game_state == 'playing' and player_action != 'didnt-take-turn':
for object in gameobjects:
if object.ai:
object.ai.take_turn()
if object.status_effect:
object.status_effect.take_turn()
def player_death(player):
# Player dead. The game ended!
global game_state
message('You died!', libtcod.red)
game_state = 'dead'
# For added effect, transform the player into a corpse.
player.char = '%'
player.color = libtcod.dark_red
def player_move_or_attack(dx, dy):
global fov_recompute
# The coordinates the player is attempting to move to / attack.
x = player.x + dx
y = player.y + dy
# Check for attackable object at coordinates.
target = None
for chk_object in gameobjects:
if chk_object.x == x and chk_object.y == y and chk_object.fighter:
target = chk_object
break
# Attack the target if found, otherwise move player.
if target is not None:
player.fighter.attack(target)
else:
player.move(dx, dy)
fov_recompute = True
def random_choice_index(chances):
# Choose one option from list of chances, returning its index
# The dice will land on some number between 1 and the sum of the chances.
dice = libtcod.random_get_int(0, 1, sum(chances))
# Go through all chances, keeping the sum so far.
running_sum = 0
choice = 0
for w in chances:
running_sum += w
# See if the dice landed in the part that corresponds to this choice.
if dice <= running_sum:
return choice
choice += 1
def random_choice(chances_dict):
# Choose one option from dictionary of chances, returning its key.
chances = list(chances_dict.values())
strings = list(chances_dict.keys())
return strings[random_choice_index(chances)]
def render_all():
global color_light_ground, color_light_wall
global color_dark_ground, color_dark_wall
global fov_recompute
global fov_map, map
global light
if fov_recompute:
# Recompute the FOV if needed (the player moved or something has changed the FOV)
fov_recompute = False
libtcod.map_compute_fov(fov_map, player.x, player.y, light.TORCH_RADIUS, FOV_LIGHT_WALLS, FOV_ALGO)
for y in range(MAP_HEIGHT):
for x in range(MAP_WIDTH):
visible = libtcod.map_is_in_fov(fov_map, x, y)
wall = map[x][y].block_sight
if not visible:
# This means it's outside of the player's FOV
if map[x][y].explored:
# Only render tiles outside FOV if they've been explored.
if wall:
libtcod.console_set_char_background(con, x, y, color_dark_wall, libtcod.BKGND_SET)
else:
libtcod.console_set_char_background(con, x, y, color_dark_ground, libtcod.BKGND_SET)
else:
# This means it's visible
if wall:
libtcod.console_set_char_background(con, x, y, color_light_wall, libtcod.BKGND_SET)
else:
libtcod.console_set_char_background(con, x, y, color_light_ground, libtcod.BKGND_SET)
# It is visible, and as such, has been explored.
map[x][y].explored = True
# Draw all gameobjects in the object list.
for object in gameobjects:
if object != player:
object.draw()
player.draw()
# Blit the contents of 'con' to the root console.
libtcod.console_blit(con, 0, 0, SCREEN_WIDTH, SCREEN_HEIGHT, 0, 0, 0)
# Prepare to render the GUI panel.
libtcod.console_set_default_background(panel, libtcod.black)
libtcod.console_clear(panel)
# Print the game messages, one line at a time.
y = 1
for (line, color) in game_msgs:
libtcod.console_set_default_foreground(panel, color)
libtcod.console_print_ex(panel, MSG_X, y, libtcod.BKGND_NONE, libtcod.LEFT, line)
y += 1
# Show the player's stats.
render_bar(1, 1, BAR_WIDTH, 'HP', player.fighter.hp, player.fighter.max_hp, libtcod.light_red, libtcod.darker_red)
libtcod.console_print_ex(panel, 1, 3, libtcod.BKGND_NONE, libtcod.LEFT, 'Dungeon level ' + str(dungeon_level))
# Display names of gameobjects under the mouse.
libtcod.console_set_default_foreground(panel, libtcod.light_gray)
libtcod.console_print_ex(panel, 1, 0, libtcod.BKGND_NONE, libtcod.LEFT, get_names_under_mouse())
# Blit the contents of "panel" to the root console.
libtcod.console_blit(panel, 0, 0, SCREEN_WIDTH, PANEL_HEIGHT, 0, 0, PANEL_Y)
def render_bar(x, y, total_width, name, value, maximum, bar_color, back_color):
# Render a bar (e.g., HP, experience, etc). First; calculate the width of the bar:
bar_width = int(float(value) / maximum * total_width)
# Render the background color.
libtcod.console_set_default_background(panel, back_color)
libtcod.console_rect(panel, x, y, total_width, 1, False, libtcod.BKGND_SCREEN)
# Now render the bar on top.
libtcod.console_set_default_background(panel, bar_color)
if bar_width > 0:
libtcod.console_rect(panel, x, y, bar_width, 1, False, libtcod.BKGND_SCREEN)
# Then, centered text with current and max values.
libtcod.console_set_default_foreground(panel, libtcod.white)
libtcod.console_print_ex(panel, x + total_width // 2, y, libtcod.BKGND_NONE, libtcod.CENTER, name + ': ' + str(value) + '/' + str(maximum))
def save_game():
# Open a new empty shelve (possibly overwriting an old one) to write the game data.
file = shelve.open('savegame', 'n')
file['map'] = map
file['objects'] = gameobjects
file['player_index'] = gameobjects.index(player) #index of player in gameobjects list
file['inventory'] = inventory
file['game_msgs'] = game_msgs
file['game_state'] = game_state
file['stairs_index'] = gameobjects.index(stairs)
file['dungeon_level'] = dungeon_level
file.close()
def target_monster(max_range = None):
# Returns a clicked monster within FOV and within a range, or None if right-clicked.
while True:
(x, y) = target_tile(max_range)
if x is None: # Player cancelled
return None
# Return first clicked monster, otherwise keep looping.
for obj in gameobjects:
if obj.x == x and obj.y == y and obj.fighter and obj != player:
return obj
def target_tile(max_range = None):
# Return the position of a tile left-clicked in player's FOV (optionally in a range), or (None, None) if right-clicked.
global key, mouse
while True:
# Render the screen. This erases the inventory and shows the names of gameobjects under the mouse.
libtcod.console_flush()
libtcod.sys_check_for_event( libtcod.EVENT_KEY_PRESS | libtcod.EVENT_MOUSE, key, mouse)
render_all()
(x, y) = (mouse.cx, mouse.cy)
# Cancel if the player right-clicked or pressed Escape.
if mouse.rbutton_pressed or key.vk == libtcod.KEY_ESCAPE:
return (None, None)
# Accept if the player clicked in FOV, and in range if applicable.
if (mouse.lbutton_pressed and libtcod.map_is_in_fov(fov_map, x, y) and (max_range is None or player.distance(x, y) <= max_range)):
return (x, y)
#############################################
# Initialization of Main Loop
#############################################
libtcod.console_set_custom_font(b'arial12x12.png', libtcod.FONT_TYPE_GREYSCALE | libtcod.FONT_LAYOUT_TCOD)
libtcod.console_init_root(SCREEN_WIDTH, SCREEN_HEIGHT, b'python/libtcod tutorial', False)
libtcod.sys_set_fps(LIMIT_FPS)
con = libtcod.console_new(SCREEN_WIDTH, SCREEN_HEIGHT)
panel = libtcod.console_new(SCREEN_WIDTH, PANEL_HEIGHT)
main_menu()
| emersonp/roguelike | rl.py | Python | mit | 45,673 | 0.021085 |
#!/usr/bin/env python
# coding: utf-8
from __future__ import unicode_literals
import logging
import click
import socket
from mkdocs import __version__
from mkdocs import utils
from mkdocs import exceptions
from mkdocs import config
from mkdocs.commands import build, gh_deploy, new, serve
log = logging.getLogger(__name__)
# Disable the warning that Click displays (as of Click version 5.0) when users
# use unicode_literals in Python 2.
# See http://click.pocoo.org/dev/python3/#unicode-literals for more details.
click.disable_unicode_literals_warning = True
class State(object):
''' Maintain logging level.'''
def __init__(self, log_name='mkdocs', level=logging.INFO):
self.logger = logging.getLogger(log_name)
self.logger.propagate = False
stream = logging.StreamHandler()
formatter = logging.Formatter("%(levelname)-7s - %(message)s ")
stream.setFormatter(formatter)
self.logger.addHandler(stream)
self.logger.setLevel(level)
pass_state = click.make_pass_decorator(State, ensure=True)
def verbose_option(f):
def callback(ctx, param, value):
state = ctx.ensure_object(State)
if value:
state.logger.setLevel(logging.DEBUG)
return click.option('-v', '--verbose',
is_flag=True,
expose_value=False,
help='Enable verbose output',
callback=callback)(f)
def quiet_option(f):
def callback(ctx, param, value):
state = ctx.ensure_object(State)
if value:
state.logger.setLevel(logging.ERROR)
return click.option('-q', '--quiet',
is_flag=True,
expose_value=False,
help='Silence warnings',
callback=callback)(f)
def common_options(f):
f = verbose_option(f)
f = quiet_option(f)
return f
clean_help = "Remove old files from the site_dir before building (the default)."
config_help = "Provide a specific MkDocs config"
dev_addr_help = ("IP address and port to serve documentation locally (default: "
"localhost:8000)")
strict_help = ("Enable strict mode. This will cause MkDocs to abort the build "
"on any warnings.")
theme_dir_help = "The theme directory to use when building your documentation."
theme_help = "The theme to use when building your documentation."
theme_choices = utils.get_theme_names()
site_dir_help = "The directory to output the result of the documentation build."
reload_help = "Enable the live reloading in the development server (this is the default)"
no_reload_help = "Disable the live reloading in the development server."
dirty_reload_help = "Enable the live reloading in the development server, but only re-build files that have changed"
commit_message_help = ("A commit message to use when commiting to the "
"Github Pages remote branch")
remote_branch_help = ("The remote branch to commit to for Github Pages. This "
"overrides the value specified in config")
remote_name_help = ("The remote name to commit to for Github Pages. This "
"overrides the value specified in config")
force_help = "Force the push to the repository."
@click.group(context_settings={'help_option_names': ['-h', '--help']})
@click.version_option(__version__, '-V', '--version')
@common_options
def cli():
"""
MkDocs - Project documentation with Markdown.
"""
@cli.command(name="serve")
@click.option('-f', '--config-file', type=click.File('rb'), help=config_help)
@click.option('-a', '--dev-addr', help=dev_addr_help, metavar='<IP:PORT>')
@click.option('-s', '--strict', is_flag=True, help=strict_help)
@click.option('-t', '--theme', type=click.Choice(theme_choices), help=theme_help)
@click.option('-e', '--theme-dir', type=click.Path(), help=theme_dir_help)
@click.option('--livereload', 'livereload', flag_value='livereload', help=reload_help, default=True)
@click.option('--no-livereload', 'livereload', flag_value='no-livereload', help=no_reload_help)
@click.option('--dirtyreload', 'livereload', flag_value='dirty', help=dirty_reload_help)
@common_options
def serve_command(dev_addr, config_file, strict, theme, theme_dir, livereload):
"""Run the builtin development server"""
logging.getLogger('tornado').setLevel(logging.WARNING)
# Don't override config value if user did not specify --strict flag
# Conveniently, load_config drops None values
strict = strict or None
try:
serve.serve(
config_file=config_file,
dev_addr=dev_addr,
strict=strict,
theme=theme,
theme_dir=theme_dir,
livereload=livereload
)
except (exceptions.ConfigurationError, socket.error) as e: # pragma: no cover
# Avoid ugly, unhelpful traceback
raise SystemExit('\n' + str(e))
@cli.command(name="build")
@click.option('-c', '--clean/--dirty', is_flag=True, default=True, help=clean_help)
@click.option('-f', '--config-file', type=click.File('rb'), help=config_help)
@click.option('-s', '--strict', is_flag=True, help=strict_help)
@click.option('-t', '--theme', type=click.Choice(theme_choices), help=theme_help)
@click.option('-e', '--theme-dir', type=click.Path(), help=theme_dir_help)
@click.option('-d', '--site-dir', type=click.Path(), help=site_dir_help)
@common_options
def build_command(clean, config_file, strict, theme, theme_dir, site_dir):
"""Build the MkDocs documentation"""
# Don't override config value if user did not specify --strict flag
# Conveniently, load_config drops None values
strict = strict or None
try:
build.build(config.load_config(
config_file=config_file,
strict=strict,
theme=theme,
theme_dir=theme_dir,
site_dir=site_dir
), dirty=not clean)
except exceptions.ConfigurationError as e: # pragma: no cover
# Avoid ugly, unhelpful traceback
raise SystemExit('\n' + str(e))
@cli.command(name="json")
@click.option('-c', '--clean/--dirty', is_flag=True, default=True, help=clean_help)
@click.option('-f', '--config-file', type=click.File('rb'), help=config_help)
@click.option('-s', '--strict', is_flag=True, help=strict_help)
@click.option('-d', '--site-dir', type=click.Path(), help=site_dir_help)
@common_options
def json_command(clean, config_file, strict, site_dir):
"""Build the MkDocs documentation to JSON files
Rather than building your documentation to HTML pages, this
outputs each page in a simple JSON format. This command is
useful if you want to index your documentation in an external
search engine.
"""
log.warning("The json command is deprecated and will be removed in a "
"future MkDocs release. For details on updating: "
"http://www.mkdocs.org/about/release-notes/")
# Don't override config value if user did not specify --strict flag
# Conveniently, load_config drops None values
strict = strict or None
try:
build.build(config.load_config(
config_file=config_file,
strict=strict,
site_dir=site_dir
), dump_json=True, dirty=not clean)
except exceptions.ConfigurationError as e: # pragma: no cover
# Avoid ugly, unhelpful traceback
raise SystemExit('\n' + str(e))
@cli.command(name="gh-deploy")
@click.option('-c', '--clean/--dirty', is_flag=True, default=True, help=clean_help)
@click.option('-f', '--config-file', type=click.File('rb'), help=config_help)
@click.option('-m', '--message', help=commit_message_help)
@click.option('-b', '--remote-branch', help=remote_branch_help)
@click.option('-r', '--remote-name', help=remote_name_help)
@click.option('--force', is_flag=True, help=force_help)
@common_options
def gh_deploy_command(config_file, clean, message, remote_branch, remote_name, force):
"""Deploy your documentation to GitHub Pages"""
try:
cfg = config.load_config(
config_file=config_file,
remote_branch=remote_branch,
remote_name=remote_name
)
build.build(cfg, dirty=not clean)
gh_deploy.gh_deploy(cfg, message=message, force=force)
except exceptions.ConfigurationError as e: # pragma: no cover
# Avoid ugly, unhelpful traceback
raise SystemExit('\n' + str(e))
@cli.command(name="new")
@click.argument("project_directory")
@common_options
def new_command(project_directory):
"""Create a new MkDocs project"""
new.new(project_directory)
if __name__ == '__main__': # pragma: no cover
cli()
| lukfor/mkdocs | mkdocs/__main__.py | Python | bsd-2-clause | 8,721 | 0.00172 |
# Copyright 2021 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for no_pivot_ldl."""
import numpy as np
import tensorflow.compat.v2 as tf
from tensorflow_probability.python.experimental.linalg.no_pivot_ldl import no_pivot_ldl
from tensorflow_probability.python.experimental.linalg.no_pivot_ldl import simple_robustified_cholesky
from tensorflow_probability.python.internal import test_util
@test_util.test_all_tf_execution_regimes
class NoPivotLDLTest(test_util.TestCase):
def _randomDiag(self, n, batch_shape, low, high, forcemin=None, seed=42):
np.random.seed(seed)
shape = batch_shape + [n]
diag = np.random.uniform(low, high, size=shape)
if forcemin:
assert forcemin < low
diag = np.where(diag == np.min(diag, axis=-1)[..., np.newaxis],
forcemin, diag)
return diag
def _randomTril(self, n, batch_shape, seed=42):
np.random.seed(seed)
unit_tril = np.random.standard_normal(batch_shape + [n, n])
unit_tril = np.tril(unit_tril)
unit_tril[..., range(n), range(n)] = 1.
return unit_tril
def _randomSymmetricMatrix(self, n, batch_shape, low, high,
forcemin=None, seed=42):
diag = self._randomDiag(n, batch_shape, low, high, forcemin, seed)
unit_tril = self._randomTril(n, batch_shape, seed)
return np.einsum('...ij,...j,...kj->...ik', unit_tril, diag, unit_tril)
def testLDLRandomPSD(self):
matrix = self._randomSymmetricMatrix(
10, [2, 1, 3], 1e-6, 10., forcemin=0., seed=42)
left, diag = self.evaluate(no_pivot_ldl(matrix))
reconstruct = np.einsum('...ij,...j,...kj->...ik', left, diag, left)
self.assertAllClose(matrix, reconstruct)
def testLDLIndefinite(self):
matrix = [[1., 2.], [2., 1.]]
left, diag = self.evaluate(no_pivot_ldl(matrix))
reconstruct = np.einsum('...ij,...j,...kj->...ik', left, diag, left)
self.assertAllClose(matrix, reconstruct)
def testSimpleIsCholeskyRandomPD(self):
matrix = self._randomSymmetricMatrix(10, [2, 1, 3], 1e-6, 10., seed=42)
chol, left = self.evaluate(
(tf.linalg.cholesky(matrix),
simple_robustified_cholesky(matrix)))
self.assertAllClose(chol, left)
def testSimpleIndefinite(self):
matrix = [[1., 2.], [2., 1.]]
left = self.evaluate(
simple_robustified_cholesky(matrix, tol=.1))
reconstruct = np.einsum('...ij,...kj->...ik', left, left)
eigv, _ = self.evaluate(tf.linalg.eigh(reconstruct))
self.assertAllTrue(eigv > 0.)
def testXlaCompileBug(self):
inp = tf.Variable([[2., 1.], [1., 2.]])
self.evaluate(inp.initializer)
alt_chol = simple_robustified_cholesky
alt_chol_nojit = tf.function(alt_chol, autograph=False, jit_compile=False)
alt_chol_jit = tf.function(alt_chol, autograph=False, jit_compile=True)
answer = np.array([[1.4142135, 0.], [0.70710677, 1.2247449]])
self.assertAllClose(self.evaluate(alt_chol(inp)), answer)
self.assertAllClose(self.evaluate(alt_chol_nojit(inp)), answer)
self.assertAllClose(self.evaluate(alt_chol_jit(inp)), answer)
with tf.GradientTape():
chol_with_grad = alt_chol(inp)
chol_nojit_with_grad = alt_chol_nojit(inp)
# Not supported by TF-XLA (WAI), see b/193584244
# chol_jit_with_grad = alt_chol_jit(inp)
self.assertAllClose(self.evaluate(chol_with_grad), answer)
self.assertAllClose(self.evaluate(chol_nojit_with_grad), answer)
# But wrapping the tape in tf.function should work.
@tf.function(autograph=False, jit_compile=True)
def jit_with_grad(mat):
with tf.GradientTape():
return alt_chol_jit(mat)
self.assertAllClose(self.evaluate(jit_with_grad(inp)), answer)
if __name__ == '__main__':
test_util.main()
| tensorflow/probability | tensorflow_probability/python/experimental/linalg/no_pivot_ldl_test.py | Python | apache-2.0 | 4,358 | 0.00413 |
# -*- Mode: Python; test-case-name: flumotion.test.test_ui_fgtk -*-
# vi:si:et:sw=4:sts=4:ts=4
#
# Flumotion - a streaming media server
# Copyright (C) 2004,2005,2006,2007 Fluendo, S.L. (www.fluendo.com).
# All rights reserved.
# This file may be distributed and/or modified under the terms of
# the GNU General Public License version 2 as published by
# the Free Software Foundation.
# This file is distributed without any warranty; without even the implied
# warranty of merchantability or fitness for a particular purpose.
# See "LICENSE.GPL" in the source distribution for more information.
# Licensees having purchased or holding a valid Flumotion Advanced
# Streaming Server license may use this file in accordance with the
# Flumotion Advanced Streaming Server Commercial License Agreement.
# See "LICENSE.Flumotion" in the source distribution for more information.
# Headers in this file shall remain intact.
"""
I am a collection of extended GTK widgets for use in Flumotion.
"""
import gobject
from kiwi.ui.widgets.checkbutton import ProxyCheckButton
from kiwi.ui.widgets.combo import ProxyComboBox
from kiwi.ui.widgets.entry import ProxyEntry
from kiwi.ui.widgets.radiobutton import ProxyRadioButton
from kiwi.ui.widgets.spinbutton import ProxySpinButton
__version__ = "$Rev$"
class FProxyComboBox(ProxyComboBox):
def set_enum(self, enum_class, value_filter=()):
"""
Set the given enum_class on the combobox.
As a side effect, this makes the combobox an enum-based one.
This also sets the combobox to the first enum value.
"""
values = []
for enum in enum_class:
# If values are specified, filter them out
if value_filter and not enum in value_filter:
continue
values.append((enum.nick, enum))
self.prefill(values)
class ProxyWidgetMapping:
# In PyGTK 2.4.0 gtk.glade.XML type_dict parameter is buggy
# If it can't find the name it raises a silent KeyError which
# will be raised at random point later (as soon some code call
# PyErr_Occurred()), to avoid this, we reimplement the function
# as it is internally, eg failback to the real GType, by doing
# this PyMapping_GetItemString will never set the error.
types = {'GtkCheckButton': ProxyCheckButton,
'GtkComboBox': FProxyComboBox,
'GtkEntry': ProxyEntry,
'GtkRadioButton': ProxyRadioButton,
'GtkSpinButton': ProxySpinButton}
def __getitem__(self, name):
if name in self.types:
return self.types[name]
else:
return gobject.type_from_name(name)
| ylatuya/Flumotion | flumotion/ui/fgtk.py | Python | gpl-2.0 | 2,667 | 0.000375 |
import sys
from .space_delimited import SpaceDelimited
try:
from nltk.stem.snowball import SnowballStemmer
stemmer = SnowballStemmer("french")
except ValueError:
raise ImportError("Could not load stemmer for {0}. ".format(__name__))
try:
from nltk.corpus import stopwords as nltk_stopwords
stopwords = set(nltk_stopwords.words('french') + ["a"])
except LookupError:
raise ImportError("Could not load stopwords for {0}. ".format(__name__) +
"You may need to install the nltk 'stopwords' " +
"corpora. See http://www.nltk.org/data.html")
try:
import enchant
dictionary = enchant.Dict("fr")
except enchant.errors.DictNotFoundError:
raise ImportError("No enchant-compatible dictionary found for 'fr'. " +
"Consider installing 'myspell-fr'.")
badwords = [
r"con",
r"fesse", r"foutre",
r"merde+", r"merdique",
r"prostituee?", r"putain", r"putes",
r"salop", r"stupide",
]
sys.modules[__name__] = SpaceDelimited(
__name__,
doc="""
french
======
revision
--------
.. autoattribute:: revision.words
.. autoattribute:: revision.content_words
.. autoattribute:: revision.badwords
.. autoattribute:: revision.misspellings
.. autoattribute:: revision.infonoise
parent_revision
---------------
.. autoattribute:: parent_revision.words
.. autoattribute:: parent_revision.content_words
.. autoattribute:: parent_revision.badwords
.. autoattribute:: parent_revision.misspellings
.. autoattribute:: parent_revision.infonoise
diff
----
.. autoattribute:: diff.words_added
.. autoattribute:: diff.words_removed
.. autoattribute:: diff.badwords_added
.. autoattribute:: diff.badwords_removed
.. autoattribute:: diff.misspellings_added
.. autoattribute:: diff.misspellings_removed
""",
badwords=badwords,
dictionary=dictionary,
stemmer=stemmer,
stopwords=stopwords
)
| ToAruShiroiNeko/revscoring | revscoring/languages/french.py | Python | mit | 1,905 | 0 |
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import sys
import pytest
from nose.plugins.skip import SkipTest
if sys.version_info < (2, 7):
raise SkipTest("F5 Ansible modules require Python >= 2.7")
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import Mock
from ansible.compat.tests.mock import patch
from ansible.module_utils.basic import AnsibleModule
try:
from library.bigip_monitor_tcp_echo import Parameters
from library.bigip_monitor_tcp_echo import ModuleManager
from library.bigip_monitor_tcp_echo import ArgumentSpec
from library.bigip_monitor_tcp_echo import HAS_F5SDK
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import iControlUnexpectedHTTPError
from test.unit.modules.utils import set_module_args
except ImportError:
try:
from ansible.modules.network.f5.bigip_monitor_tcp_echo import Parameters
from ansible.modules.network.f5.bigip_monitor_tcp_echo import ModuleManager
from ansible.modules.network.f5.bigip_monitor_tcp_echo import ArgumentSpec
from ansible.modules.network.f5.bigip_monitor_tcp_echo import HAS_F5SDK
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import iControlUnexpectedHTTPError
from units.modules.utils import set_module_args
except ImportError:
raise SkipTest("F5 Ansible modules require the f5-sdk Python library")
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except Exception:
pass
fixture_data[path] = data
return data
class TestParameters(unittest.TestCase):
def test_module_parameters(self):
args = dict(
name='foo',
parent='parent',
ip='10.10.10.10',
interval=20,
timeout=30,
time_until_up=60,
partition='Common'
)
p = Parameters(params=args)
assert p.name == 'foo'
assert p.parent == '/Common/parent'
assert p.ip == '10.10.10.10'
assert p.type == 'tcp_echo'
assert p.destination == '10.10.10.10'
assert p.interval == 20
assert p.timeout == 30
assert p.time_until_up == 60
def test_module_parameters_ints_as_strings(self):
args = dict(
name='foo',
parent='parent',
ip='10.10.10.10',
interval='20',
timeout='30',
time_until_up='60',
partition='Common'
)
p = Parameters(params=args)
assert p.name == 'foo'
assert p.parent == '/Common/parent'
assert p.ip == '10.10.10.10'
assert p.type == 'tcp_echo'
assert p.destination == '10.10.10.10'
assert p.interval == 20
assert p.timeout == 30
assert p.time_until_up == 60
def test_api_parameters(self):
args = dict(
name='foo',
defaultsFrom='/Common/parent',
destination='10.10.10.10',
interval=20,
timeout=30,
timeUntilUp=60
)
p = Parameters(params=args)
assert p.name == 'foo'
assert p.parent == '/Common/parent'
assert p.ip == '10.10.10.10'
assert p.type == 'tcp_echo'
assert p.destination == '10.10.10.10'
assert p.interval == 20
assert p.timeout == 30
assert p.time_until_up == 60
class TestManagerEcho(unittest.TestCase):
def setUp(self):
self.spec = ArgumentSpec()
def test_create_monitor(self, *args):
set_module_args(dict(
name='foo',
ip='10.10.10.10',
interval=20,
timeout=30,
time_until_up=60,
server='localhost',
password='password',
user='admin'
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods in the specific type of manager
mm = ModuleManager(module=module)
mm.exists = Mock(side_effect=[False, True])
mm.create_on_device = Mock(return_value=True)
results = mm.exec_module()
assert results['changed'] is True
def test_create_monitor_idempotent(self, *args):
set_module_args(dict(
name='foo',
ip='10.10.10.10',
interval=20,
timeout=30,
time_until_up=60,
server='localhost',
password='password',
user='admin'
))
current = Parameters(params=load_fixture('load_ltm_monitor_tcp_echo.json'))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods in the specific type of manager
mm = ModuleManager(module=module)
mm.exists = Mock(return_value=True)
mm.read_current_from_device = Mock(return_value=current)
results = mm.exec_module()
assert results['changed'] is False
def test_update_interval(self, *args):
set_module_args(dict(
name='foo',
interval=10,
server='localhost',
password='password',
user='admin'
))
current = Parameters(params=load_fixture('load_ltm_monitor_tcp_echo.json'))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods in the specific type of manager
mm = ModuleManager(module=module)
mm.exists = Mock(return_value=True)
mm.read_current_from_device = Mock(return_value=current)
mm.update_on_device = Mock(return_value=True)
results = mm.exec_module()
assert results['changed'] is True
assert results['interval'] == 10
def test_update_interval_larger_than_existing_timeout(self, *args):
set_module_args(dict(
name='foo',
interval=30,
server='localhost',
password='password',
user='admin'
))
current = Parameters(params=load_fixture('load_ltm_monitor_tcp_echo.json'))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods in the specific type of manager
mm = ModuleManager(module=module)
mm.exists = Mock(return_value=True)
mm.read_current_from_device = Mock(return_value=current)
mm.update_on_device = Mock(return_value=True)
with pytest.raises(F5ModuleError) as ex:
mm.exec_module()
assert "must be less than" in str(ex)
def test_update_interval_larger_than_new_timeout(self, *args):
set_module_args(dict(
name='foo',
interval=10,
timeout=5,
server='localhost',
password='password',
user='admin'
))
current = Parameters(params=load_fixture('load_ltm_monitor_tcp_echo.json'))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods in the specific type of manager
mm = ModuleManager(module=module)
mm.exists = Mock(return_value=True)
mm.read_current_from_device = Mock(return_value=current)
mm.update_on_device = Mock(return_value=True)
with pytest.raises(F5ModuleError) as ex:
mm.exec_module()
assert "must be less than" in str(ex)
def test_update_timeout(self, *args):
set_module_args(dict(
name='foo',
timeout=300,
server='localhost',
password='password',
user='admin'
))
current = Parameters(params=load_fixture('load_ltm_monitor_tcp_echo.json'))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods in the specific type of manager
mm = ModuleManager(module=module)
mm.exists = Mock(return_value=True)
mm.read_current_from_device = Mock(return_value=current)
mm.update_on_device = Mock(return_value=True)
results = mm.exec_module()
assert results['changed'] is True
assert results['timeout'] == 300
def test_update_time_until_up(self, *args):
set_module_args(dict(
name='foo',
time_until_up=300,
server='localhost',
password='password',
user='admin'
))
current = Parameters(params=load_fixture('load_ltm_monitor_tcp_echo.json'))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods in the specific type of manager
mm = ModuleManager(module=module)
mm.exists = Mock(return_value=True)
mm.read_current_from_device = Mock(return_value=current)
mm.update_on_device = Mock(return_value=True)
results = mm.exec_module()
assert results['changed'] is True
assert results['time_until_up'] == 300
| rahushen/ansible | test/units/modules/network/f5/test_bigip_monitor_tcp_echo.py | Python | gpl-3.0 | 10,010 | 0.001199 |
# Required environmental variables:
# * DATABASE_URL
# * MINICASH_LOCAL_DIR
# * MINICASH_SECRET_KEY
from .base import *
DEBUG = False
# Allow all host headers
ALLOWED_HOSTS = ['*']
# Simplified static file serving.
# https://warehouse.python.org/project/whitenoise/
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
# Honor the 'X-Forwarded-Proto' header for request.is_secure()
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
| BasicWolf/minicash | src/minicash/app/settings/heroku.py | Python | apache-2.0 | 472 | 0 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': ' Customer points based on invoice amounts',
'version': '1.0',
'category': 'Generic Modules',
'author': 'Rajkumar',
'website': 'http://www.openerp.com',
'depends': ['product','base','account'],
'init_xml': [ ],
'update_xml': ['customer_commission.xml','customer_commission_board_view.xml'],
'demo_xml': [ ],
'test': [ ],
'installable': True,
'active': False,
'description': """ Customer points are created as based on invoice amounts
using these points to reduce the invoice amount another payments"""
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| ksrajkumar/openerp-6.1 | openerp/addons/itara_customer_commission/__openerp__.py | Python | agpl-3.0 | 1,639 | 0.005491 |
import time
from datetime import timedelta
from typing import List
from treeherder.config import settings
from treeherder.perf.sheriffing_criteria import (
EngineerTractionFormula,
FixRatioFormula,
CriteriaTracker,
TotalAlertsFormula,
)
from treeherder.perf.sheriffing_criteria import criteria_tracking
from mo_times import Duration
from django.core.management.base import BaseCommand
def pretty_enumerated(formulas: List[str]) -> str:
comma = ', '
return ' & '.join(comma.join(formulas).rsplit(comma, maxsplit=1))
class Command(BaseCommand):
ENGINEER_TRACTION = 'engineer traction'
FIX_RATIO = 'fix ratio'
FORMULAS = [ENGINEER_TRACTION, FIX_RATIO] # register new formulas here
help = f'''
Compute the {pretty_enumerated(FORMULAS)} for multiple framework/suite combinations,
according to the Perf Sheriffing Criteria specification.\nRequires "{criteria_tracking.CRITERIA_FILENAME}" to be provided for both program input & output.
'''
INITIAL_PROMPT_MSG = 'Computing Perf Sheriffing Criteria... (may take some time)'
PRECISION = '.1f'
def add_arguments(self, parser):
parser.add_argument(
'--quantifying-period',
'-qp',
default=settings.QUANTIFYING_PERIOD,
type=self.parse_time_interval,
help='''How far back to look for gathering formula's input data, from now.
Expressed in a humanized form.
Examples: 1year, 6month, 2weeks etc.
More details about accepted forms: https://github.com/mozilla/ActiveData/blob/dev/docs/jx_time.md#duration''',
metavar='QUANTIFYING_PERIOD',
)
parser.add_argument(
'--bug-cooldown',
'-bc',
default=settings.BUG_COOLDOWN_TIME,
type=self.parse_time_interval,
help='''How old Bugzilla bugs should be to be taken into consideration.
Expressed in a humanized form.
Examples: 1year, 6month, 2weeks etc.
More details about accepted forms: https://github.com/mozilla/ActiveData/blob/dev/docs/jx_time.md#duration''',
metavar='BUG_COOLDOWN',
)
parser.add_argument(
'--multiprocessing',
'-mp',
action='store_true',
help='''Experimental! Whether to use a process pool instead of a thread pool''',
)
subparser = parser.add_subparsers(dest='individually')
individual_parser = subparser.add_parser(
'individually',
help='Compute perf sheriffing criteria for individual framework/suite combo (no CSV file required)',
)
individual_parser.add_argument('framework', action='store')
individual_parser.add_argument('suite', action='store')
individual_parser.add_argument('--test', default=None)
def handle(self, *args, **options):
if options.get('individually'):
return self._handle_individually(options)
quant_period = options['quantifying_period']
bug_cooldown = options['bug_cooldown']
multiprocessed = options['multiprocessing']
init_params = (None, quant_period, bug_cooldown)
formula_map = {
'EngineerTraction': EngineerTractionFormula(*init_params),
'FixRatio': FixRatioFormula(*init_params),
'TotalAlerts': TotalAlertsFormula(quant_period),
}
tracker = CriteriaTracker(formula_map, multiprocessed=multiprocessed)
tracker.load_records()
start = time.time()
tracker.update_records()
duration = time.time() - start
print(f'{self.INITIAL_PROMPT_MSG}', end='')
for record in tracker:
print(record)
print(f"Took {duration:.1f} seconds")
def _handle_individually(self, options):
framework = options['framework']
suite = options['suite']
test = options['test']
quant_period = options['quantifying_period']
bug_cooldown = options['bug_cooldown']
init_params = (None, quant_period, bug_cooldown)
targetted_test = (framework, suite, test)
engineer_traction = EngineerTractionFormula(*init_params)
fix_ratio = FixRatioFormula(*init_params)
print(f'\r{self.INITIAL_PROMPT_MSG}', end='')
compute_start = time.time()
eng_traction_result = engineer_traction(*targetted_test)
fix_ratio_result = fix_ratio(*targetted_test)
compute_duration = time.time() - compute_start
# turn into regular percentages
eng_traction_result *= 100
fix_ratio_result *= 100
# display results (inline)
test_moniker = ' '.join(filter(None, (suite, test)))
title = f'Perf Sheriffing Criteria for {framework} - {test_moniker}'
big_underline = '-' * len(title)
# & results headers
eng_traction_head = self.ENGINEER_TRACTION.capitalize()
fix_ratio_head = self.FIX_RATIO.capitalize()
justify_head = self.__get_head_justification(eng_traction_head, fix_ratio_head)
# let's update 1st prompt line
print(f"\r{' ' * len(self.INITIAL_PROMPT_MSG)}", end='')
print(
f"\rComputing Perf Sheriffing Criteria... (took {compute_duration:{self.PRECISION}} seconds)"
)
# display title
print(big_underline)
print(title)
print(big_underline)
# & actual results
print(f'{eng_traction_head:<{justify_head}}: {eng_traction_result:{self.PRECISION}}%')
print(f'{fix_ratio_head:<{justify_head}}: {fix_ratio_result:{self.PRECISION}}%')
print(big_underline)
def __get_head_justification(self, *result_heads):
return max([len(head) for head in result_heads]) + 1
def parse_time_interval(self, interval: str) -> timedelta:
duration = Duration(interval)
return timedelta(seconds=duration.total_seconds())
| jmaher/treeherder | treeherder/perf/management/commands/compute_criteria_formulas.py | Python | mpl-2.0 | 5,956 | 0.002183 |
default_app_config = 'comet.apps.CometIndicatorConfig' | LegoStormtroopr/comet-indicator-registry | comet/__init__.py | Python | bsd-2-clause | 54 | 0.018519 |
import abc
class Container(abc.ABC):
"""
A container for exposed methods and/or event handlers
for a better modularization of the application.
Example usage
::
# in users.py
class UsersModule(gemstone.Container):
@gemstone.exposed_method("users.register")
def users_register(self, username, password):
pass
@gemstone.exposed_method("users.login")
def users_login(self)
"""
def __init__(self):
self.microservice = None
def set_microservice(self, microservice):
self.microservice = microservice
def get_executor(self):
"""
Returns the executor instance used by the microservice.
"""
return self.microservice.get_executor()
def get_io_loop(self):
"""
Returns the current IOLoop used by the microservice.
:return:
"""
return self.microservice.get_io_loop()
def get_exposed_methods(self):
exposed = []
for item in self._iter_methods():
if getattr(item, "_exposed_public", False) or \
getattr(item, "_exposed_private", False):
exposed.append(item)
return exposed
def get_event_handlers(self):
handlers = []
for item in self._iter_methods():
if getattr(item, "_event_handler", False):
handlers.append(item)
return handlers
def _iter_methods(self):
for item_name in dir(self):
item = getattr(self, item_name)
if callable(item):
yield item
| vladcalin/gemstone | gemstone/core/container.py | Python | mit | 1,639 | 0 |
from numpy import *
from stft import *
from pvoc import *
stft = STFT(16384, 2, 4)
pvoc = PhaseVocoder(stft)
time = 0
def process(i, o):
global time
for x in stft.forward(i):
x = pvoc.forward(x)
x = pvoc.to_bin_offset(x)
x = pvoc.shift(x, lambda y: sin(y + time*0.01)*mean(y))
x = pvoc.from_bin_offset(x)
x = pvoc.backward(x)
stft.backward(x)
stft.pop(o)
time += 1
| nwoeanhinnogaehr/live-python-jacker | examples/pvoc5.py | Python | gpl-3.0 | 431 | 0.00232 |
#!/usr/bin/env python
import telnetlib, argparse
parser = argparse.ArgumentParser(description='Firefox bookmarks backup tool')
parser.add_argument('output', metavar='FILE', type=str)
parser.add_argument('--host', metavar='host', type=str, default="localhost", help="mozrep host")
parser.add_argument('--port', metavar='port', type=int, default=4242, help="mozrep port")
args = parser.parse_args()
host = args.host
port = args.port
backup_to = args.output
print("Connecting to mozrep at %s:%s" % (host, port))
t = telnetlib.Telnet(host, port=port)
t.write(b'Components.utils.import("resource://gre/modules/XPCOMUtils.jsm");')
t.write(b'XPCOMUtils.defineLazyModuleGetter(this, "PlacesBackups", "resource://gre/modules/PlacesBackups.jsm");')
t.write(('PlacesBackups.saveBookmarksToJSONFile("%s");' % backup_to).encode('ascii'))
t.write(b'repl.quit()')
print("Done")
| hugoArregui/ff-bookmarks-backup | ff-bookmarks-backup.py | Python | bsd-3-clause | 867 | 0.005767 |
# -*- coding: utf-8 -*-
# python-holidays
# ---------------
# A fast, efficient Python library for generating country, province and state
# specific sets of holidays on the fly. It aims to make determining whether a
# specific date is a holiday as fast and flexible as possible.
#
# Authors: dr-prodigy <[email protected]> (c) 2017-2022
# ryanss <[email protected]> (c) 2014-2017
# Website: https://github.com/dr-prodigy/python-holidays
# License: MIT (see LICENSE file)
from datetime import date
from dateutil.easter import easter
from dateutil.relativedelta import relativedelta as rd, MO, SA, FR, WE, TU
from holidays.constants import JAN, MAR, APR, MAY, JUN, AUG, SEP, OCT, NOV, DEC
from holidays.constants import SAT, SUN, WEEKEND
from holidays.holiday_base import HolidayBase
class Australia(HolidayBase):
country = "AU"
PROVINCES = ["ACT", "NSW", "NT", "QLD", "SA", "TAS", "VIC", "WA"]
def __init__(self, **kwargs):
self.prov = kwargs.pop("prov", None)
HolidayBase.__init__(self, **kwargs)
def _populate(self, year):
# ACT: Holidays Act 1958
# NSW: Public Holidays Act 2010
# NT: Public Holidays Act 2013
# QLD: Holidays Act 1983
# SA: Holidays Act 1910
# TAS: Statutory Holidays Act 2000
# VIC: Public Holidays Act 1993
# WA: Public and Bank Holidays Act 1972
# TODO do more research on history of Aus holidays
# New Year's Day
name = "New Year's Day"
jan1 = date(year, JAN, 1)
self[jan1] = name
if self.observed and jan1.weekday() in WEEKEND:
self[jan1 + rd(weekday=MO)] = name + " (Observed)"
# Australia Day
jan26 = date(year, JAN, 26)
if year >= 1935:
if self.prov == "NSW" and year < 1946:
name = "Anniversary Day"
else:
name = "Australia Day"
self[jan26] = name
if self.observed and year >= 1946 and jan26.weekday() in WEEKEND:
self[jan26 + rd(weekday=MO)] = name + " (Observed)"
elif year >= 1888 and self.prov != "SA":
name = "Anniversary Day"
self[jan26] = name
# Adelaide Cup
if self.prov == "SA":
name = "Adelaide Cup"
if year >= 2006:
# subject to proclamation ?!?!
self[date(year, MAR, 1) + rd(weekday=MO(+2))] = name
else:
self[date(year, MAR, 1) + rd(weekday=MO(+3))] = name
# Canberra Day
# Info from https://www.timeanddate.com/holidays/australia/canberra-day
# and https://en.wikipedia.org/wiki/Canberra_Day
if self.prov == "ACT" and year >= 1913:
name = "Canberra Day"
if year >= 1913 and year <= 1957:
self[date(year, MAR, 12)] = name
elif year >= 1958 and year <= 2007:
self[date(year, MAR, 1) + rd(weekday=MO(+3))] = name
elif year >= 2008 and year != 2012:
self[date(year, MAR, 1) + rd(weekday=MO(+2))] = name
elif year == 2012:
self[date(year, MAR, 12)] = name
# Easter
self[easter(year) + rd(weekday=FR(-1))] = "Good Friday"
if self.prov in ("ACT", "NSW", "NT", "QLD", "SA", "VIC"):
self[easter(year) + rd(weekday=SA(-1))] = "Easter Saturday"
if self.prov in ("ACT", "NSW", "QLD", "VIC"):
self[easter(year)] = "Easter Sunday"
self[easter(year) + rd(weekday=MO)] = "Easter Monday"
# Anzac Day
if year > 1920:
name = "Anzac Day"
apr25 = date(year, APR, 25)
self[apr25] = name
if self.observed:
if apr25.weekday() == SAT and self.prov in ("WA", "NT"):
self[apr25 + rd(weekday=MO)] = name + " (Observed)"
elif apr25.weekday() == SUN and self.prov in (
"ACT",
"QLD",
"SA",
"WA",
"NT",
):
self[apr25 + rd(weekday=MO)] = name + " (Observed)"
# Western Australia Day
if self.prov == "WA" and year > 1832:
if year >= 2015:
name = "Western Australia Day"
else:
name = "Foundation Day"
self[date(year, JUN, 1) + rd(weekday=MO(+1))] = name
# Sovereign's Birthday
if year >= 1952:
name = "Queen's Birthday"
elif year > 1901:
name = "King's Birthday"
if year >= 1936:
name = "Queen's Birthday"
if self.prov == "QLD":
if year == 2012:
self[date(year, JUN, 11)] = "Queen's Diamond Jubilee"
if year < 2016 and year != 2012:
dt = date(year, JUN, 1) + rd(weekday=MO(+2))
self[dt] = name
else:
dt = date(year, OCT, 1) + rd(weekday=MO)
self[dt] = name
elif self.prov == "WA":
# by proclamation ?!?!
self[date(year, OCT, 1) + rd(weekday=MO(-1))] = name
elif self.prov in ("NSW", "VIC", "ACT", "SA", "NT", "TAS"):
dt = date(year, JUN, 1) + rd(weekday=MO(+2))
self[dt] = name
elif year > 1911:
self[date(year, JUN, 3)] = name # George V
elif year > 1901:
self[date(year, NOV, 9)] = name # Edward VII
# Picnic Day
if self.prov == "NT":
name = "Picnic Day"
self[date(year, AUG, 1) + rd(weekday=MO)] = name
# Bank Holiday
if self.prov == "NSW":
if year >= 1912:
name = "Bank Holiday"
self[date(year, 8, 1) + rd(weekday=MO)] = name
# Labour Day
name = "Labour Day"
if self.prov in ("NSW", "ACT", "SA"):
self[date(year, OCT, 1) + rd(weekday=MO)] = name
elif self.prov == "WA":
self[date(year, MAR, 1) + rd(weekday=MO)] = name
elif self.prov == "VIC":
self[date(year, MAR, 1) + rd(weekday=MO(+2))] = name
elif self.prov == "QLD":
if 2013 <= year <= 2015:
self[date(year, OCT, 1) + rd(weekday=MO)] = name
else:
self[date(year, MAY, 1) + rd(weekday=MO)] = name
elif self.prov == "NT":
name = "May Day"
self[date(year, MAY, 1) + rd(weekday=MO)] = name
elif self.prov == "TAS":
name = "Eight Hours Day"
self[date(year, MAR, 1) + rd(weekday=MO(+2))] = name
# Family & Community Day
if self.prov == "ACT":
name = "Family & Community Day"
if 2007 <= year <= 2009:
self[date(year, NOV, 1) + rd(weekday=TU)] = name
elif year == 2010:
# first Monday of the September/October school holidays
# moved to the second Monday if this falls on Labour day
# TODO need a formula for the ACT school holidays then
# http://www.cmd.act.gov.au/communication/holidays
self[date(year, SEP, 26)] = name
elif year == 2011:
self[date(year, OCT, 10)] = name
elif year == 2012:
self[date(year, OCT, 8)] = name
elif year == 2013:
self[date(year, SEP, 30)] = name
elif year == 2014:
self[date(year, SEP, 29)] = name
elif year == 2015:
self[date(year, SEP, 28)] = name
elif year == 2016:
self[date(year, SEP, 26)] = name
elif year == 2017:
self[date(year, SEP, 25)] = name
# Reconciliation Day
if self.prov == "ACT":
name = "Reconciliation Day"
if year >= 2018:
self[date(year, 5, 27) + rd(weekday=MO)] = name
if self.prov == "VIC":
# Grand Final Day
if year == 2020:
# Rescheduled due to COVID-19
self[date(year, OCT, 23)] = "Grand Final Day"
elif year == 2021:
# Rescheduled due to COVID-19
self[date(year, SEP, 24)] = "Grand Final Day"
elif year >= 2015:
self[date(year, SEP, 24) + rd(weekday=FR)] = "Grand Final Day"
# Melbourne Cup
self[date(year, NOV, 1) + rd(weekday=TU)] = "Melbourne Cup"
# The Royal Queensland Show (Ekka)
# The Show starts on the first Friday of August - providing this is
# not prior to the 5th - in which case it will begin on the second
# Friday. The Wednesday during the show is a public holiday.
if self.prov == "QLD":
name = "The Royal Queensland Show"
if year == 2020:
self[date(year, AUG, 14)] = name
if year == 2021:
self[date(year, OCT, 29)] = name
else:
self[
date(year, AUG, 5) + rd(weekday=FR) + rd(weekday=WE)
] = name
# Christmas Day
name = "Christmas Day"
dec25 = date(year, DEC, 25)
self[dec25] = name
if self.observed and dec25.weekday() in WEEKEND:
self[date(year, DEC, 27)] = name + " (Observed)"
# Boxing Day
if self.prov == "SA":
name = "Proclamation Day"
else:
name = "Boxing Day"
dec26 = date(year, DEC, 26)
self[dec26] = name
if self.observed and dec26.weekday() in WEEKEND:
self[date(year, DEC, 28)] = name + " (Observed)"
class AU(Australia):
pass
class AUS(Australia):
pass
| ryanss/holidays.py | holidays/countries/australia.py | Python | mit | 9,872 | 0 |
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class RLimma(RPackage):
"""Data analysis, linear models and differential expression
for microarray data."""
homepage = "https://www.bioconductor.org/packages/limma/"
url = "https://www.bioconductor.org/packages/release/bioc/src/contrib/limma_3.32.6.tar.gz"
list_url = homepage
version('3.32.6', 'df5dc2b85189a24e939efa3a8e6abc41')
| TheTimmy/spack | var/spack/repos/builtin/packages/r-limma/package.py | Python | lgpl-2.1 | 1,617 | 0.001237 |
import Utils
from Utils import printe
class CommandBuilder(object):
def __init__(self, *command_args):
self.command_args = list(command_args)
def append(self, *args):
for arg in args:
if isinstance(arg, str):
self.command_args += [arg]
elif isinstance(arg, list) or isinstance(arg, tuple):
for sub_arg in arg:
self.append(sub_arg)
else:
printe('Error appending argument of unknown type: {}'.format(
str(type(arg))), terminate=True)
return self
def debug(self):
return Utils.debug(*self.command_args)
def run(self, replaceForeground=False):
return Utils.run(*self.command_args,
replaceForeground=replaceForeground)
| bytejive/lazy-docker | CommandBuilder.py | Python | apache-2.0 | 828 | 0 |
# Copyright (c) 2013-2014 Will Thames <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os
import glob
import imp
import ansible.utils
from ansible.playbook.task import Task
import ansible.constants as C
from ansible.module_utils.splitter import split_args
import yaml
from yaml.composer import Composer
from yaml.constructor import Constructor
LINE_NUMBER_KEY = '__line__'
def load_plugins(directory):
result = []
fh = None
for pluginfile in glob.glob(os.path.join(directory, '[A-Za-z]*.py')):
pluginname = os.path.basename(pluginfile.replace('.py', ''))
try:
fh, filename, desc = imp.find_module(pluginname, [directory])
mod = imp.load_module(pluginname, fh, filename, desc)
obj = getattr(mod, pluginname)()
result.append(obj)
finally:
if fh:
fh.close()
return result
def tokenize(line):
result = list()
tokens = line.lstrip().split(" ")
if tokens[0] == '-':
tokens = tokens[1:]
if tokens[0] == 'action:' or tokens[0] == 'local_action:':
tokens = tokens[1:]
command = tokens[0].replace(":", "")
args = list()
kwargs = dict()
for arg in tokens[1:]:
if "=" in arg:
kv = arg.split("=", 1)
kwargs[kv[0]] = kv[1]
else:
args.append(arg)
return (command, args, kwargs)
def _playbook_items(pb_data):
if isinstance(pb_data, dict):
return pb_data.items()
elif not pb_data:
return []
else:
return [item for play in pb_data for item in play.items()]
def find_children(playbook):
if not os.path.exists(playbook[0]):
return []
results = []
basedir = os.path.dirname(playbook[0])
pb_data = ansible.utils.parse_yaml_from_file(playbook[0])
items = _playbook_items(pb_data)
for item in items:
for child in play_children(basedir, item, playbook[1]):
if "$" in child['path'] or "{{" in child['path']:
continue
valid_tokens = list()
for token in split_args(child['path']):
if '=' in token:
break
valid_tokens.append(token)
path = ' '.join(valid_tokens)
results.append({
'path': ansible.utils.path_dwim(basedir, path),
'type': child['type']
})
return results
def play_children(basedir, item, parent_type):
delegate_map = {
'tasks': _taskshandlers_children,
'pre_tasks': _taskshandlers_children,
'post_tasks': _taskshandlers_children,
'include': _include_children,
'roles': _roles_children,
'dependencies': _roles_children,
'handlers': _taskshandlers_children,
}
(k, v) = item
if k in delegate_map:
if v:
return delegate_map[k](basedir, k, v, parent_type)
return []
def _include_children(basedir, k, v, parent_type):
return [{'path': ansible.utils.path_dwim(basedir, v), 'type': parent_type}]
def _taskshandlers_children(basedir, k, v, parent_type):
return [{'path': ansible.utils.path_dwim(basedir, th['include']),
'type': 'tasks'}
for th in v if 'include' in th]
def _roles_children(basedir, k, v, parent_type):
results = []
for role in v:
if isinstance(role, dict):
results.extend(_look_for_role_files(basedir, role['role']))
else:
results.extend(_look_for_role_files(basedir, role))
return results
def _rolepath(basedir, role):
role_path = None
possible_paths = [
# if included from a playbook
ansible.utils.path_dwim(basedir, os.path.join('roles', role)),
ansible.utils.path_dwim(basedir, role),
# if included from roles/[role]/meta/main.yml
ansible.utils.path_dwim(
basedir, os.path.join('..', '..', '..', 'roles', role)
),
ansible.utils.path_dwim(basedir,
os.path.join('..', '..', role))
]
if C.DEFAULT_ROLES_PATH:
search_locations = C.DEFAULT_ROLES_PATH.split(os.pathsep)
for loc in search_locations:
loc = os.path.expanduser(loc)
possible_paths.append(ansible.utils.path_dwim(loc, role))
for path_option in possible_paths:
if os.path.isdir(path_option):
role_path = path_option
break
return role_path
def _look_for_role_files(basedir, role):
role_path = _rolepath(basedir, role)
if not role_path:
return []
results = []
for th in ['tasks', 'handlers', 'meta']:
for ext in ('.yml', '.yaml'):
thpath = os.path.join(role_path, th, 'main' + ext)
if os.path.exists(thpath):
results.append({'path': thpath, 'type': th})
break
return results
def rolename(filepath):
idx = filepath.find('roles/')
if idx < 0:
return ''
role = filepath[idx+6:]
role = role[:role.find('/')]
return role
def _kv_to_dict(v):
(command, args, kwargs) = tokenize(v)
return (dict(module=command, module_arguments=args, **kwargs))
def normalize_task(task):
''' ensures that all tasks have an action key
and that string values are converted to python objects '''
result = dict()
for (k, v) in task.items():
if k in Task.VALID_KEYS or k.startswith('with_'):
if k == 'local_action' or k == 'action':
if not isinstance(v, dict):
v = _kv_to_dict(v)
v['module_arguments'] = v.get('module_arguments', list())
result['action'] = v
else:
result[k] = v
else:
if isinstance(v, basestring):
v = _kv_to_dict(k + ' ' + v)
elif not v:
v = dict(module=k)
else:
if isinstance(v, dict):
v.update(dict(module=k))
else:
if k == '__line__':
# Keep the line number stored
result[k] = v
continue
else:
# Should not get here!
print "Was not expecting value %s of type %s for key %s" % (str(v), type(v), k)
print "Task: %s" % str(task)
exit(1)
v['module_arguments'] = v.get('module_arguments', list())
result['action'] = v
return result
def task_to_str(task):
name = task.get("name")
if name:
return name
action = task.get("action")
args = " ".join(["k=v" for (k, v) in action.items() if k != "module_arguments"] +
action.get("module_arguments"))
return "{0} {1}".format(action["module"], args)
def get_action_tasks(yaml, file):
tasks = list()
if file['type'] in ['tasks', 'handlers']:
tasks = yaml
else:
for block in yaml:
for section in ['tasks', 'handlers', 'pre_tasks', 'post_tasks']:
if section in block:
block_tasks = block.get(section) or []
tasks.extend(block_tasks)
return [normalize_task(task) for task in tasks
if 'include' not in task.keys()]
def parse_yaml_linenumbers(data):
"""Parses yaml as ansible.utils.parse_yaml but with linenumbers.
The line numbers are stored in each node's LINE_NUMBER_KEY key"""
loader = yaml.Loader(data)
def compose_node(parent, index):
# the line number where the previous token has ended (plus empty lines)
line = loader.line
node = Composer.compose_node(loader, parent, index)
node.__line__ = line + 1
return node
def construct_mapping(node, deep=False):
mapping = Constructor.construct_mapping(loader, node, deep=deep)
mapping[LINE_NUMBER_KEY] = node.__line__
return mapping
loader.compose_node = compose_node
loader.construct_mapping = construct_mapping
data = loader.get_single_data()
return data
| sumara/ansible-lint-deb | deb_dist/ansible-lint-2.1.3/lib/ansiblelint/utils.py | Python | gpl-2.0 | 9,195 | 0.000218 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-10-04 21:23
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("peering", "0003_auto_20170903_1235")]
operations = [
migrations.AlterField(
model_name="autonomoussystem",
name="ipv4_as_set",
field=models.CharField(blank=True, max_length=128, null=True),
),
migrations.AlterField(
model_name="autonomoussystem",
name="ipv4_max_prefixes",
field=models.PositiveIntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name="autonomoussystem",
name="ipv6_as_set",
field=models.CharField(blank=True, max_length=128, null=True),
),
migrations.AlterField(
model_name="autonomoussystem",
name="ipv6_max_prefixes",
field=models.PositiveIntegerField(blank=True, null=True),
),
]
| respawner/peering-manager | peering/migrations/0004_auto_20171004_2323.py | Python | apache-2.0 | 1,014 | 0 |
#!/usr/bin/python3
#from __future__ import print_function
from setuptools import setup, Extension
import sys
import os
import psutil
# monkey-patch for parallel compilation
def parallelCCompile(self, sources, output_dir=None, macros=None, include_dirs=None, debug=0, extra_preargs=None, extra_postargs=None, depends=None):
# those lines are copied from distutils.ccompiler.CCompiler directly
macros, objects, extra_postargs, pp_opts, build = self._setup_compile(output_dir, macros, include_dirs, sources, depends, extra_postargs)
cc_args = self._get_cc_args(pp_opts, debug, extra_preargs)
# parallel code
N = psutil.cpu_count(logical=False) # number of parallel compilations
import multiprocessing.pool
def _single_compile(obj):
try: src, ext = build[obj]
except KeyError: return
self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts)
# convert to list, imap is evaluated on-demand
list(multiprocessing.pool.ThreadPool(N).imap(_single_compile,objects))
return objects
#import distutils.ccompiler
#distutils.ccompiler.CCompiler.compile=parallelCCompile
''' Note:
to build Boost.Python on Windows with mingw
bjam target-os=windows/python=3.4 toolset=gcc variant=debug,release link=static,shared threading=multi runtime-link=shared cxxflags="-include cmath "
also insert this on top of boost/python.hpp :
#include <cmath> //fix cmath:1096:11: error: '::hypot' has not been declared
'''
def getExtensions():
platform = sys.platform
extensionsList = []
sources = ['src/Genome.cpp',
'src/Innovation.cpp',
'src/NeuralNetwork.cpp',
'src/Parameters.cpp',
'src/PhenotypeBehavior.cpp',
'src/Population.cpp',
'src/Random.cpp',
'src/Species.cpp',
'src/Substrate.cpp',
'src/Utils.cpp']
extra = ['-march=native',
'-mtune=native',
'-g',
]
if platform == 'darwin':
extra += ['-stdlib=libc++',
'-std=c++11',]
else:
extra += ['-std=gnu++11']
is_windows = 'win' in platform and platform != 'darwin'
if is_windows:
extra.append('/EHsc')
else:
extra.append('-w')
prefix = os.getenv('PREFIX')
if prefix and len(prefix) > 0:
extra += ["-I{}/include".format(prefix)]
build_sys = os.getenv('MN_BUILD')
if build_sys is None:
if os.path.exists('_MultiNEAT.cpp'):
sources.insert(0, '_MultiNEAT.cpp')
extra.append('-O3')
extensionsList.extend([Extension('MultiNEAT._MultiNEAT',
sources,
extra_compile_args=extra)],
)
else:
print('Source file is missing and MN_BUILD environment variable is not set.\n'
'Specify either \'cython\' or \'boost\'. Example to build in Linux with Cython:\n'
'\t$ export MN_BUILD=cython')
exit(1)
elif build_sys == 'cython':
from Cython.Build import cythonize
sources.insert(0, '_MultiNEAT.pyx')
extra.append('-O3')
extensionsList.extend(cythonize([Extension('MultiNEAT._MultiNEAT',
sources,
extra_compile_args=extra)],
))
elif build_sys == 'boost':
is_python_2 = sys.version_info[0] < 3
sources.insert(0, 'src/PythonBindings.cpp')
if is_windows:
if is_python_2:
raise RuntimeError("Python prior to version 3 is not supported on Windows due to limits of VC++ compiler version")
libs = ['boost_system', 'boost_serialization']
if is_python_2:
libs += ['boost_python', "boost_numpy"]
else:
# with boost 1.67 you need boost_python3x and boost_numpy3x where x is python version 3.x
libs += ['boost_python36', "boost_numpy36"] # in Ubuntu 14 there is only 'boost_python-py34'
# for Windows with mingw
# libraries= ['libboost_python-mgw48-mt-1_58',
# 'libboost_serialization-mgw48-mt-1_58'],
# include_dirs = ['C:/MinGW/include', 'C:/Users/Peter/Desktop/boost_1_58_0'],
# library_dirs = ['C:/MinGW/lib', 'C:/Users/Peter/Desktop/boost_1_58_0/stage/lib'],
extra.extend(['-DUSE_BOOST_PYTHON', '-DUSE_BOOST_RANDOM', #'-O0',
#'-DVDEBUG',
])
exx = Extension('MultiNEAT._MultiNEAT',
sources,
libraries=libs,
extra_compile_args=extra)
print(dir(exx))
print(exx)
print(exx.extra_compile_args)
extensionsList.append(exx)
else:
raise AttributeError('Unknown tool: {}'.format(build_sys))
return extensionsList
setup(name='multineat',
version='0.5', # Update version in conda/meta.yaml as well
packages=['MultiNEAT'],
ext_modules=getExtensions())
| peter-ch/MultiNEAT | setup.py | Python | lgpl-3.0 | 5,190 | 0.005202 |
#!/usr/bin/env python
import unittest
from sqlbuilder import smartsql
from ascetic import exceptions, validators
from ascetic.databases import databases
from ascetic.mappers import Mapper, mapper_registry
from ascetic.relations import ForeignKey
Author = Book = None
class TestMapper(unittest.TestCase):
maxDiff = None
create_sql = {
'postgresql': """
DROP TABLE IF EXISTS ascetic_tests_author CASCADE;
CREATE TABLE ascetic_tests_author (
id serial NOT NULL PRIMARY KEY,
first_name VARCHAR(40) NOT NULL,
last_name VARCHAR(40) NOT NULL,
bio TEXT
);
DROP TABLE IF EXISTS books CASCADE;
CREATE TABLE books (
id serial NOT NULL PRIMARY KEY,
title VARCHAR(255),
author_id integer REFERENCES ascetic_tests_author(id) ON DELETE CASCADE
);
""",
'mysql': """
DROP TABLE IF EXISTS ascetic_tests_author CASCADE;
CREATE TABLE ascetic_tests_author (
id INT(11) NOT NULL auto_increment,
first_name VARCHAR(40) NOT NULL,
last_name VARCHAR(40) NOT NULL,
bio TEXT,
PRIMARY KEY (id)
);
DROP TABLE IF EXISTS books CASCADE;
CREATE TABLE books (
id INT(11) NOT NULL auto_increment,
title VARCHAR(255),
author_id INT(11),
FOREIGN KEY (author_id) REFERENCES ascetic_tests_author(id),
PRIMARY KEY (id)
);
""",
'sqlite3': """
DROP TABLE IF EXISTS ascetic_tests_author;
CREATE TABLE ascetic_tests_author (
id INTEGER PRIMARY KEY AUTOINCREMENT,
first_name VARCHAR(40) NOT NULL,
last_name VARCHAR(40) NOT NULL,
bio TEXT
);
DROP TABLE IF EXISTS books;
CREATE TABLE books (
id INTEGER PRIMARY KEY AUTOINCREMENT,
title VARCHAR(255),
author_id INT(11),
FOREIGN KEY (author_id) REFERENCES ascetic_tests_author(id)
);
"""
}
@classmethod
def create_models(cls):
class Author(object):
def __init__(self, id=None, first_name=None, last_name=None, bio=None):
self.id = id
self.first_name = first_name
self.last_name = last_name
self.bio = bio
class AuthorMapper(Mapper):
db_table = 'ascetic_tests_author'
defaults = {'bio': 'No bio available'}
validations = {'first_name': validators.Length(),
'last_name': (validators.Length(), lambda x: x != 'BadGuy!' or 'Bad last name', )}
AuthorMapper(Author)
class Book(object):
def __init__(self, id=None, title=None, author_id=None):
self.id = id
self.title = title
self.author_id = author_id
class BookMapper(Mapper):
db_table = 'books'
relationships = {
'author': ForeignKey(Author, related_name='books')
}
BookMapper(Book)
return locals()
@classmethod
def setUpClass(cls):
db = databases['default']
db.cursor().execute(cls.create_sql[db.engine])
for model_name, model in cls.create_models().items():
globals()[model_name] = model
def setUp(self):
db = databases['default']
db.identity_map.disable()
for table in ('ascetic_tests_author', 'books'):
db.execute('DELETE FROM {0}'.format(db.qn(table)))
author_mapper = mapper_registry[Author]
book_mapper = mapper_registry[Book]
james = Author(first_name='James', last_name='Joyce')
author_mapper.save(james)
kurt = Author(first_name='Kurt', last_name='Vonnegut')
author_mapper.save(kurt)
tom = Author(first_name='Tom', last_name='Robbins')
author_mapper.save(tom)
book_mapper.save(Book(title='Ulysses', author_id=james.id))
book_mapper.save(Book(title='Slaughter-House Five', author_id=kurt.id))
book_mapper.save(Book(title='Jitterbug Perfume', author_id=tom.id))
slww = Book(title='Still Life with Woodpecker', author_id=tom.id)
book_mapper.save(slww)
self.data = {
'james': james,
'kurt': kurt,
'tom': tom,
'slww': slww,
}
def test_pk(self):
book_mapper = mapper_registry[Book]
tom, slww = self.data['tom'], self.data['slww']
pk = book_mapper.get_pk(slww)
self.assertEqual(book_mapper.get_pk(slww), slww.id)
book_mapper.set_pk(slww, tom.id)
self.assertEqual(book_mapper.get_pk(slww), tom.id)
book_mapper.set_pk(slww, pk)
self.assertEqual(book_mapper.get_pk(slww), pk)
# self.assertTrue(kurt == author_mapper.get(kurt.id))
# self.assertTrue(kurt != tom)
def test_fk(self):
kurt, tom, slww = self.data['kurt'], self.data['tom'], self.data['slww']
self.assertEqual(slww.author.first_name, 'Tom')
slww.author = kurt
self.assertEqual(slww.author.first_name, 'Kurt')
del slww.author
self.assertEqual(slww.author, None)
slww.author = None
self.assertEqual(slww.author, None)
slww.author = tom.id
self.assertEqual(slww.author.first_name, 'Tom')
def test_o2m(self):
tom = self.data['tom']
self.assertEqual(len(list(tom.books)), 2)
def test_retrieval(self):
author_mapper, book_mapper = mapper_registry[Author], mapper_registry[Book]
tom = self.data['tom']
# Test retrieval
b = book_mapper.get(title='Ulysses')
a = author_mapper.get(id=b.author_id)
self.assertEqual(a.id, b.author_id)
a = author_mapper.query.where(author_mapper.sql_table.id == b.id)[:]
# self.assert_(isinstance(a, list))
self.assert_(isinstance(a, smartsql.Q))
self.assertEqual(len(list(tom.books)), 2)
def test_update(self):
author_mapper = mapper_registry[Author]
kurt = self.data['kurt']
kid = kurt.id
new_last_name = 'Vonnegut, Jr.'
a = author_mapper.get(id=kid)
a.last_name = new_last_name
author_mapper.save(a)
a = author_mapper.get(kid)
self.assertEqual(a.last_name, new_last_name)
def test_count(self):
author_mapper, book_mapper = mapper_registry[Author], mapper_registry[Book]
self.assertEqual(author_mapper.query.count(), 3)
self.assertEqual(len(book_mapper.query.clone()), 4)
self.assertEqual(len(book_mapper.query.clone()[1:4]), 3)
def test_delete(self):
author_mapper, book_mapper = mapper_registry[Author], mapper_registry[Book]
kurt = self.data['kurt']
author_mapper.delete(kurt)
self.assertEqual(author_mapper.query.count(), 2)
self.assertEqual(len(book_mapper.query.clone()), 3)
def test_validation(self):
author_mapper = mapper_registry[Author]
a = Author(first_name='', last_name='Ted')
self.assertRaises(exceptions.ValidationError, author_mapper.validate, a)
def test_defaults(self):
author_mapper = mapper_registry[Author]
a = Author(first_name='Bill and', last_name='Ted')
author_mapper.save(a)
self.assertEqual(a.bio, 'No bio available')
a = Author(first_name='I am a', last_name='BadGuy!')
self.assertRaises(exceptions.ValidationError, author_mapper.validate, a)
def test_smartsql(self):
author_mapper, book_mapper = mapper_registry[Author], mapper_registry[Book]
slww = self.data['slww']
fields = [smartsql.compile(i)[0] for i in author_mapper.get_sql_fields()]
self.assertListEqual(
fields,
['"ascetic_tests_author"."id"',
'"ascetic_tests_author"."first_name"',
'"ascetic_tests_author"."last_name"',
'"ascetic_tests_author"."bio"', ]
)
# self.assertEqual(smartsql.compile(book_mapper.sql_table.author)[0], '"books"."author_id"')
smartsql.auto_name.counter = 0
self.assertEqual(
smartsql.compile(book_mapper.query.where(book_mapper.sql_table.author.id == 1)),
('SELECT "books"."id", "books"."title", "books"."author_id" FROM "books" INNER '
'JOIN "ascetic_tests_author" AS "_auto_1" ON ("books"."author_id" = '
'"_auto_1"."id") WHERE "_auto_1"."id" = %s',
[1])
)
smartsql.auto_name.counter = 0
self.assertEqual(
smartsql.compile(author_mapper.query.where(
(book_mapper.sql_table.author.id == 1) & (book_mapper.sql_table.author.first_name == 'Ivan')
)),
('SELECT "ascetic_tests_author"."id", "ascetic_tests_author"."first_name", '
'"ascetic_tests_author"."last_name", "ascetic_tests_author"."bio" FROM '
'"ascetic_tests_author" INNER JOIN "ascetic_tests_author" AS "_auto_1" ON '
'("books"."author_id" = "_auto_1"."id") INNER JOIN "ascetic_tests_author" AS '
'"_auto_2" ON ("books"."author_id" = "_auto_2"."id") WHERE "_auto_1"."id" = '
'%s AND "_auto_2"."first_name" = %s',
[1, 'Ivan'])
)
smartsql.auto_name.counter = 0
author_table = book_mapper.sql_table.author
self.assertEqual(
smartsql.compile(author_mapper.query.where(
(author_table.id == 1) & (author_table.first_name == 'Ivan')
)),
('SELECT "ascetic_tests_author"."id", "ascetic_tests_author"."first_name", '
'"ascetic_tests_author"."last_name", "ascetic_tests_author"."bio" FROM '
'"ascetic_tests_author" INNER JOIN "ascetic_tests_author" AS "_auto_1" ON '
'("books"."author_id" = "_auto_1"."id") WHERE "_auto_1"."id" = %s AND '
'"_auto_1"."first_name" = %s',
[1, 'Ivan'])
)
q = author_mapper.query
self.assertEqual(smartsql.compile(q)[0], '''SELECT "ascetic_tests_author"."id", "ascetic_tests_author"."first_name", "ascetic_tests_author"."last_name", "ascetic_tests_author"."bio" FROM "ascetic_tests_author"''')
self.assertEqual(len(q), 3)
for obj in q:
self.assertTrue(isinstance(obj, Author))
q = q.where(author_mapper.sql_table.id == slww.author_id)
self.assertEqual(smartsql.compile(q)[0], """SELECT "ascetic_tests_author"."id", "ascetic_tests_author"."first_name", "ascetic_tests_author"."last_name", "ascetic_tests_author"."bio" FROM "ascetic_tests_author" WHERE "ascetic_tests_author"."id" = %s""")
self.assertEqual(len(q), 1)
self.assertTrue(isinstance(q[0], Author))
def test_prefetch(self):
author_mapper, book_mapper = mapper_registry[Author], mapper_registry[Book]
q = book_mapper.query.prefetch('author').order_by(book_mapper.sql_table.id)
for obj in q:
self.assertTrue(hasattr(obj, '_cache'))
self.assertTrue('author' in obj._cache)
self.assertEqual(obj._cache['author'], obj.author)
for obj in author_mapper.query.prefetch('books').order_by(author_mapper.sql_table.id):
self.assertTrue(hasattr(obj, '_cache'))
self.assertTrue('books' in obj._cache)
self.assertEqual(len(obj._cache['books']._cache), len(obj.books))
for i in obj._cache['books']._cache:
self.assertEqual(i._cache['author'], obj)
| emacsway/ascetic | ascetic/tests/test_mappers.py | Python | mit | 11,813 | 0.00254 |
# cs.???? = currentstate, any variable on the status tab in the planner can be used.
# Script = options are
# Script.Sleep(ms)
# Script.ChangeParam(name,value)
# Script.GetParam(name)
# Script.ChangeMode(mode) - same as displayed in mode setup screen 'AUTO'
# Script.WaitFor(string,timeout)
# Script.SendRC(channel,pwm,sendnow)
#
print 'Start Script'
for chan in range(1,9):
Script.SendRC(chan,1500,False)
Script.SendRC(3,Script.GetParam('RC3_MIN'),True)
Script.Sleep(5000)
while cs.lat == 0:
print 'Waiting for GPS'
Script.Sleep(1000)
print 'Got GPS'
jo = 10 * 13
print jo
Script.SendRC(3,1000,False)
Script.SendRC(4,2000,True)
cs.messages.Clear()
Script.WaitFor('ARMING MOTORS',30000)
Script.SendRC(4,1500,True)
print 'Motors Armed!'
Script.SendRC(3,1700,True)
while cs.alt < 50:
Script.Sleep(50)
Script.SendRC(5,2000,True) # acro
Script.SendRC(1,2000,False) # roll
Script.SendRC(3,1370,True) # throttle
while cs.roll > -45: # top hald 0 - 180
Script.Sleep(5)
while cs.roll < -45: # -180 - -45
Script.Sleep(5)
Script.SendRC(5,1500,False) # stabalise
Script.SendRC(1,1500,True) # level roll
Script.Sleep(2000) # 2 sec to stabalise
Script.SendRC(3,1300,True) # throttle back to land
thro = 1350 # will decend
while cs.alt > 0.1:
Script.Sleep(300)
Script.SendRC(3,1000,False)
Script.SendRC(4,1000,True)
Script.WaitFor('DISARMING MOTORS',30000)
Script.SendRC(4,1500,True)
print 'Roll complete' | vizual54/MissionPlanner | Scripts/example1.py | Python | gpl-3.0 | 1,491 | 0.031565 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Automatic config nagios configurations.
Copyright (C) 2015 Canux CHENG
All rights reserved
Name: __init__.py
Author: Canux [email protected]
Version: V1.0
Time: Wed 09 Sep 2015 09:20:51 PM EDT
Exaple:
./nagios -h
"""
__version__ = "3.1.0.0"
__description__ = """Config nagios automatic. Any question contact the author Canux CHENG. Email: [email protected]."""
__author__ = "Canux CHENG"
| crazy-canux/xnagios | nagios/__init__.py | Python | apache-2.0 | 451 | 0.002217 |
# -*- coding: utf-8 -*-
from __future__ import division, print_function
import fnmatch
import logging
import numpy as np
from ._transit import CythonSolver
__all__ = ["Central", "Body", "System"]
try:
from itertools import izip, imap
except ImportError:
izip, imap = zip, map
# Newton's constant in $R_\odot^3 M_\odot^{-1} {days}^{-2}$.
_G = 2945.4625385377644
# A constant to convert between solar radii per day and m/s.
_rvconv = 1.242271746944644082e-04
# Solar mass & radius in cgs units
_Msun = 1.9891e33
_Rsun = 6.95508e10
class Central(object):
"""
The "central"---in this context---is the massive central body in a
:class:`System`.
:param mass:
The mass of the body measured in Solar masses. (default: ``1.0``)
:param radius:
The radius of the body measured in Solar radii. (default: ``1.0``)
:param flux:
The un-occulted flux measured in whatever units you feel like using.
(default: ``1.0``)
**Limb darkening** can be specified using ``(mu1, mu2)`` or ``(q1, q2)``.
TODO: explain.
"""
def __init__(self, mass=1.0, radius=1.0, flux=1.0, dilution=0.0,
q1=None, q2=None, mu1=None, mu2=None):
self.mass = mass
self.radius = radius
self.flux = flux
if not 0.0 <= dilution <= 1.0:
raise ValueError("'dilution' must be between 0 and 1")
self.dilution = dilution
# Allow different limb darkening parameters.
if mu1 is not None and mu2 is not None:
if q1 is not None or q2 is not None:
raise RuntimeError("You can't use *both* limb-darkening "
"parameterizations!")
self.coeffs = (mu1, mu2)
else:
self.q1 = q1 if q1 is not None else 0.5
self.q2 = q2 if q2 is not None else 0.5
@property
def q1(self):
return self._q1
@q1.setter
def q1(self, v):
if not 0 <= v <= 1:
raise ValueError("Invalid limb darkening coefficient")
self._q1 = v
@property
def q2(self):
return self._q2
@q2.setter
def q2(self, v):
if not 0 <= v <= 1:
raise ValueError("Invalid limb darkening coefficient")
self._q2 = v
@property
def coeffs(self):
q1, q2 = self.q1, self.q2
q1 = np.sqrt(np.abs(q1))
return 2*q1*q2, q1*(1-2*q2)
@coeffs.setter
def coeffs(self, value):
u1, u2 = value
u2 = u1+u2
self.q1, self.q2 = u2*u2, 0.5*u1/u2
@property
def density(self):
"""Stellar density in CGS units
"""
r = self.radius * _Rsun
m = self.mass * _Msun
return 0.75 * m / (np.pi * r * r * r)
@density.setter
def density(self, rho):
r = self.radius * _Rsun
m = np.pi * rho * r * r * r / 0.75
self.mass = m / _Msun
class Body(object):
r"""
A "body"---in this context---is a (possibly) massive body orbiting a
:class:`Central` in a :class:`System`. There are several ways to
initialize this and once it has been added to a system using the
:func:`System.add_body` method, they should all be equivalent. The orbital
elements specified either specify a Keplerian orbit. This object includes
all sorts of magic for converting between different specifications when
needed but the base description of the planet and the orbit is
parameterized by the parameters listed by
:func:`System.get_parameter_vector`.
:param r:
The radius measured in Solar radii. (default: ``0.0``)
:param mass:
The mass in Solar masses. (default: ``0.0``)
:param a:
The semi-major axis of the orbit measured in Solar radii. Either this
parameter or ``period`` must be provided but not both.
:param period:
The period of the orbit in days. Either this parameter or ``a`` must
be provided but not both.
:param t0:
The epoch of the orbit in days. (default: ``0.0``)
:param e:
The eccentricity of the orbit. (default: ``0.0``)
:param omega:
The orientation of the orbital ellipse in radians as defined by Winn
(2010). (default: ``0.0``)
:param ix:
The relative inclination of the orbital plane along the line-of-sight
in degrees. This angle is measured differently than you're used to:
zero degrees is edge on and 90 degrees in face on. This angle will be
subtracted from the base inclination of the planetary system to get
the standard measurement of the inclination. Either this parameter
or ``b`` can be specified but not both. (default: ``0.0``)
:param incl:
An alternative to `ix` but defined in the standard way (90-deg is edge
on).
:param b:
The mean impact parameter of the orbit measured in stellar radii (not
Solar radii). Specifically, this impact parameter is defined as
.. math::
b = \frac{a}{R_\star} \cos i \,
\left(\frac{1 - e^2}{1+e\,\sin\omega} \right)
(default: ``0.0``)
"""
def __init__(self,
radius=0.0,
mass=0.0,
a=None,
period=None,
t0=0.0,
e=0.0,
omega=0.0,
ix=None,
incl=None,
b=None,
# Deprecated:
r=None,
pomega=None):
# Deprecation warnings.
if r is not None:
logging.warn("the argument 'r' is deprecated. "
"Use 'radius' instead")
if pomega is not None:
logging.warn("the argument 'pomega' is deprecated. "
"Use 'omega' instead")
# Check the supplied arguments.
assert sum((a is None, period is None)) == 1, \
"you must provide one (and only one) of 'a' and 'period'"
assert sum((b is None, ix is None, incl is None)) >= 2, \
"you can give a value for up to one of 'b', 'ix', or 'incl'"
if ix is None and b is None and incl is None:
self._ix = 0.0
# Base parameters.
self.radius = radius if r is None else r
self._a = a
self._period = period
self.mass = mass
self.t0 = t0
self.e = e
self.omega = omega if pomega is None else pomega
self._b = b
self._ix = ix
self._incl = incl
def _check_ps(self):
if not hasattr(self, "system"):
raise RuntimeError("You must add this body to a system "
"before getting the period.")
@property
def radius(self):
return self._radius
@radius.setter
def radius(self, r):
if r < 0:
raise ValueError("Invalid planet radius (must be non-negative)")
self._radius = r
@property
def r(self):
return self.radius
@r.setter
def r(self, r):
self.radius = r
@property
def period(self):
# If we already have a period, return that.
if self._period is not None:
return self._period
# If not, check to make sure that we're already part of a system
# and then compute the period based on the star's mass.
self._check_ps()
mstar = self.system.central.mass
a = self._a
return 2 * np.pi * np.sqrt(a * a * a / _G / (mstar + self.mass))
@period.setter
def period(self, P):
if P <= 0.0:
raise ValueError("Invalid period (must be positive)")
self._check_ps()
mstar = self.system.central.mass
self._a = (_G*P*P*(self.mass+mstar)/(4*np.pi*np.pi)) ** (1./3)
self._period = None
@property
def a(self):
if self._a is None:
self.period = self._period
return self._a
@a.setter
def a(self, a):
self._period = None
self._a = a
@property
def incl(self):
"""
The standard definition of inclination: 90-deg is edge on.
"""
if self._incl is not None:
return self._incl
self._check_ps()
return self.system.iobs + self.ix
@incl.setter
def incl(self, v):
self._check_ps()
self.ix = v - self.system.iobs
self._incl = None
@property
def b(self):
# If we already have an impact parameter, return that.
if self._b is not None:
return self._b
# If not, check to make sure that we're already part of a system
# and then compute the impact parameter based on the star's radius.
self._check_ps()
rstar = self.system.central.radius
incl = np.radians(self.incl)
# Compute contribution due to eccentricity.
factor = 1.0
e = self.e
if e > 0.0:
factor = (1 - e * e) / (1 + e * np.sin(self.omega))
return self.a * np.cos(incl) / rstar * factor
@b.setter
def b(self, b):
if b < 0.0:
raise ValueError("Invalid impact parameter (must be non-negative)")
self._check_ps()
rstar = self.system.central.radius
# Compute contribution due to eccentricity.
factor = 1.0
e = self.e
if e > 0.0:
factor = (1 + e * np.sin(self.omega)) / (1 - e * e)
arg = b * factor * rstar / self.a
if arg > 1.0:
raise ValueError("Invalid impact parameter")
self.incl = np.degrees(np.arccos(arg))
self._b = None
@property
def ix(self):
if self._ix is None:
if self._b is not None:
self.b = self._b
elif self._incl is not None:
self.incl = self._incl
else:
raise RuntimeError("Something went wrong.")
return self._ix
@ix.setter
def ix(self, ix):
self._b = None
self._ix = ix
@property
def duration(self):
"""
The approximate duration of the transit :math:`T_\mathrm{tot}` from
Equation (14) in Winn (2010).
"""
self._check_ps()
rstar = self.system.central.radius
k = self.r/rstar
dur = self.period / np.pi
arg = rstar/self.a * np.sqrt((1+k)**2 - self.b**2)
arg /= np.sin(np.radians(self.incl))
dur *= np.arcsin(arg)
if self.e > 0.0:
dur *= np.sqrt(1 - self.e**2) / (1 + self.e * np.sin(self.omega))
return dur
@property
def e(self):
return self._e
@e.setter
def e(self, e):
if not 0 <= e < 1.0:
raise ValueError("Only bound orbits are permitted (0 <= e < 1)")
self._e = e
self._b = None
@property
def omega(self, hp=0.5*np.pi):
return self.pomega
@omega.setter
def omega(self, v, hp=0.5*np.pi):
self.pomega = v
class System(object):
"""
A "planetary system" contains a "central" bright :class:`Central` and some
number (``>= 0``) :class:`Body` objects orbiting. The default orbits
are purely Keplerian but sub-classes can include more sophisticated
solvers.
:param central:
A :class:`Central` that specifies the central bright object.
:para iobs:
The inclination of the mean orbital plane in degrees. This is
measured in the standard way with zero inclination meaning face on and
90 degrees is edge on. (default: ``90.0``)
"""
def __init__(self, central, iobs=90.0):
self.central = central
self.central.system = self
self.bodies = []
self.iobs = iobs
self.unfrozen = np.zeros(6, dtype=bool)
def add_body(self, body):
"""
Add a :class:`Body` to the system. This function also sets the
``system`` attribute of the body.
:param body:
The :class:`Body` to add.
"""
body.system = self
self.bodies.append(body)
self.unfrozen = np.concatenate((
self.unfrozen[:-2], np.zeros(7, dtype=bool), self.unfrozen[-2:]
))
def _get_solver(self):
return CythonSolver()
def light_curve(self, t, texp=0.0, tol=1e-8, maxdepth=4, use_batman=False):
"""
Get the light curve evaluated at a list of times using the current
model.
:param t:
The times where the light curve should be evaluated (in days).
:param tol:
The stopping criterion for the exposure time integration.
:param maxdepth:
The maximum recursion depth of the exposure time integrator.
"""
t = np.atleast_1d(t)
if len(self.bodies) == 0:
return self.central.flux + np.zeros_like(t)
return CythonSolver().kepler_light_curve(len(self.bodies),
self._get_params(),
t, texp, tol, maxdepth,
use_batman=use_batman)
def light_curve_gradient(self, t, texp=0.0, tol=1e-8, maxdepth=4):
"""
Get the light curve evaluated at a list of times using the current
model.
:param t:
The times where the light curve should be evaluated (in days).
:param tol:
The stopping criterion for the exposure time integration.
:param maxdepth:
The maximum recursion depth of the exposure time integrator.
"""
t = np.atleast_1d(t)
if len(self.bodies) == 0:
grad = np.zeros((len(t), 5), dtype=float)
grad[:, 0] = 1.0
return self.central.flux + np.zeros_like(t), grad[:, self.unfrozen]
f, df = CythonSolver().kepler_gradient(len(self.bodies),
self._get_params(),
t, texp, tol, maxdepth)
return f, df[:, self.unfrozen].T
def __len__(self):
return np.sum(self.unfrozen)
def _parameter_names(self):
names = ["central:ln_flux", "central:ln_radius", "central:ln_mass"]
for i, body in enumerate(self.bodies):
names += map("bodies[{0}]:{{0}}".format(i).format,
("ln_radius", "ln_mass", "t0",
"sqrt_e_cos_omega", "sqrt_e_sin_omega",
"sqrt_a_cos_i", "sqrt_a_sin_i"))
names += ["central:q1", "central:q2", "central:dilution"]
return names
def get_parameter_names(self, full=False):
if full:
return self._parameter_names()
return [n for n, f in zip(self._parameter_names(), self.unfrozen)
if f]
def check_vector(self, vector):
params = self._get_params()
params[self.unfrozen] = vector
for i, body in enumerate(self.bodies):
n = 3 + 7 * i
ecosp, esinp = params[n+3:n+5]
e = ecosp**2 + esinp**2
if not 0 <= e < 1.0:
return False
return True
def get_vector(self):
return self._get_params()[self.unfrozen]
def _get_params(self):
params = np.empty(6+7*len(self.bodies))
params[0] = np.log(self.central.flux)
params[1] = np.log(self.central.radius)
params[2] = np.log(self.central.mass)
params[-3] = np.log(self.central.q1)-np.log(1.0-self.central.q1)
params[-2] = np.log(self.central.q2)-np.log(1.0-self.central.q2)
params[-1] = self.central.dilution
# np.log(self.central.dilution)-np.log(1.0-self.central.dilution)
for i, body in enumerate(self.bodies):
n = 3 + 7 * i
params[n] = np.log(body.r)
params[n+1] = np.log(max(body.mass, 1e-14))
params[n+2] = body.t0
params[n+3] = np.sqrt(body.e) * np.cos(body.omega)
params[n+4] = np.sqrt(body.e) * np.sin(body.omega)
sa = np.sqrt(body.a)
ix = np.radians(self.iobs + body.ix)
params[n+5] = sa * np.cos(ix)
params[n+6] = sa * np.sin(ix)
return params
def set_vector(self, vector):
params = self._get_params()
params[self.unfrozen] = vector
self._set_params(params)
def _set_params(self, params):
self.central.flux = np.exp(params[0])
self.central.radius = np.exp(params[1])
self.central.mass = np.exp(params[2])
self.central.q1 = max(0.0, min(1.0, 1.0 / (1. + np.exp(-params[-3]))))
self.central.q2 = max(0.0, min(1.0, 1.0 / (1. + np.exp(-params[-2]))))
self.central.dilution = params[-1]
# max(0.0, min(1.0, 1.0 / (1. + np.exp(-params[-1]))))
for i, body in enumerate(self.bodies):
n = 3 + 7 * i
body.r = np.exp(params[n])
body.mass = np.exp(params[n+1])
body.t0 = params[n+2]
ecosp, esinp = params[n+3:n+5]
body.e = ecosp**2 + esinp**2
body.omega = np.arctan2(esinp, ecosp)
ax, ay = params[n+5:n+7]
body.a = ax**2 + ay**2
body.ix = np.degrees(np.arctan2(ay, ax)) - self.iobs
def get_value(self, t, **kwargs):
return self.light_curve(t, **kwargs)
def get_gradient(self, t, **kwargs):
return self.light_curve_gradient(t, **kwargs)[1]
def freeze_parameter(self, parameter_name):
any_ = False
for i, k in enumerate(self._parameter_names()):
if not fnmatch.fnmatch(k, parameter_name):
continue
any_ = True
self.unfrozen[i] = False
if not any_:
raise ValueError("unknown parameter '{0}'".format(parameter_name))
def thaw_parameter(self, parameter_name):
any_ = False
for i, k in enumerate(self._parameter_names()):
if not fnmatch.fnmatch(k, parameter_name):
continue
any_ = True
self.unfrozen[i] = True
if not any_:
raise ValueError("unknown parameter '{0}'".format(parameter_name))
def get_parameter(self, parameter_name):
vector = self._get_params()
params = []
for i, k in enumerate(self._parameter_names()):
if not fnmatch.fnmatch(k, parameter_name):
continue
params.append(vector[i])
if len(params) == 0:
raise ValueError("unknown parameter '{0}'".format(parameter_name))
if len(params) == 1:
return params[0]
return np.array(params)
def set_parameter(self, parameter_name, value):
vector = self._get_params()
any_ = False
for i, k in enumerate(self._parameter_names()):
if not fnmatch.fnmatch(k, parameter_name):
continue
any_ = True
vector[i] = value
if not any_:
raise ValueError("unknown parameter '{0}'".format(parameter_name))
self._set_params(vector)
def get_bounds(self):
return [(None, None) for _ in range(len(self))]
def jacobian(self):
star = self.central
j = 0.0
names = self.get_parameter_names()
if "central:q1" in names:
q = star.q1
j += np.log(q) + np.log(1.0 - q)
if "central:q2" in names:
q = star.q2
j += np.log(q) + np.log(1.0 - q)
# if "central:dilution" in names:
# q = star.dilution
# j += np.log(q) + np.log(1.0 - q)
return j
def jacobian_gradient(self):
names = self.get_parameter_names()
j = np.zeros(len(names))
if "central:q1" in names:
q = self.central.q1
j[names.index("central:q1")] = 1. - 2*q
if "central:q2" in names:
q = self.central.q2
j[names.index("central:q2")] = 1. - 2*q
# if "central:dilution" in names:
# q = self.central.dilution
# j[names.index("central:dilution")] = 1. - 2*q
return j
| dfm/transit | transit/transit.py | Python | mit | 20,191 | 0.00005 |
#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: nxos_pim
version_added: "2.2"
short_description: Manages configuration of a PIM instance.
description:
- Manages configuration of a Protocol Independent Multicast (PIM) instance.
author: Gabriele Gerbino (@GGabriele)
extends_documentation_fragment: nxos
options:
ssm_range:
description:
- Configure group ranges for Source Specific Multicast (SSM).
Valid values are multicast addresses or the keyword 'none'.
required: true
'''
EXAMPLES = '''
- nxos_pim:
ssm_range: "232.0.0.0/8"
username: "{{ un }}"
password: "{{ pwd }}"
host: "{{ inventory_hostname }}"
'''
RETURN = '''
proposed:
description: k/v pairs of parameters passed into module
returned: verbose mode
type: dict
sample: {"ssm_range": "232.0.0.0/8"}
existing:
description: k/v pairs of existing PIM configuration
returned: verbose mode
type: dict
sample: {"ssm_range": none}
end_state:
description: k/v pairs of BGP configuration after module execution
returned: verbose mode
type: dict
sample: {"ssm_range": "232.0.0.0/8"}
updates:
description: commands sent to the device
returned: always
type: list
sample: ["ip pim ssm range 232.0.0.0/8"]
changed:
description: check to see if a change was made on the device
returned: always
type: boolean
sample: true
'''
# COMMON CODE FOR MIGRATION
import re
from ansible.module_utils.basic import get_exception
from ansible.module_utils.netcfg import NetworkConfig, ConfigLine
from ansible.module_utils.shell import ShellError
try:
from ansible.module_utils.nxos import get_module
except ImportError:
from ansible.module_utils.nxos import NetworkModule
def to_list(val):
if isinstance(val, (list, tuple)):
return list(val)
elif val is not None:
return [val]
else:
return list()
class CustomNetworkConfig(NetworkConfig):
def expand_section(self, configobj, S=None):
if S is None:
S = list()
S.append(configobj)
for child in configobj.children:
if child in S:
continue
self.expand_section(child, S)
return S
def get_object(self, path):
for item in self.items:
if item.text == path[-1]:
parents = [p.text for p in item.parents]
if parents == path[:-1]:
return item
def to_block(self, section):
return '\n'.join([item.raw for item in section])
def get_section(self, path):
try:
section = self.get_section_objects(path)
return self.to_block(section)
except ValueError:
return list()
def get_section_objects(self, path):
if not isinstance(path, list):
path = [path]
obj = self.get_object(path)
if not obj:
raise ValueError('path does not exist in config')
return self.expand_section(obj)
def add(self, lines, parents=None):
"""Adds one or lines of configuration
"""
ancestors = list()
offset = 0
obj = None
## global config command
if not parents:
for line in to_list(lines):
item = ConfigLine(line)
item.raw = line
if item not in self.items:
self.items.append(item)
else:
for index, p in enumerate(parents):
try:
i = index + 1
obj = self.get_section_objects(parents[:i])[0]
ancestors.append(obj)
except ValueError:
# add parent to config
offset = index * self.indent
obj = ConfigLine(p)
obj.raw = p.rjust(len(p) + offset)
if ancestors:
obj.parents = list(ancestors)
ancestors[-1].children.append(obj)
self.items.append(obj)
ancestors.append(obj)
# add child objects
for line in to_list(lines):
# check if child already exists
for child in ancestors[-1].children:
if child.text == line:
break
else:
offset = len(parents) * self.indent
item = ConfigLine(line)
item.raw = line.rjust(len(line) + offset)
item.parents = ancestors
ancestors[-1].children.append(item)
self.items.append(item)
def get_network_module(**kwargs):
try:
return get_module(**kwargs)
except NameError:
return NetworkModule(**kwargs)
def get_config(module, include_defaults=False):
config = module.params['config']
if not config:
try:
config = module.get_config()
except AttributeError:
defaults = module.params['include_defaults']
config = module.config.get_config(include_defaults=defaults)
return CustomNetworkConfig(indent=2, contents=config)
def load_config(module, candidate):
config = get_config(module)
commands = candidate.difference(config)
commands = [str(c).strip() for c in commands]
save_config = module.params['save']
result = dict(changed=False)
if commands:
if not module.check_mode:
try:
module.configure(commands)
except AttributeError:
module.config(commands)
if save_config:
try:
module.config.save_config()
except AttributeError:
module.execute(['copy running-config startup-config'])
result['changed'] = True
result['updates'] = commands
return result
# END OF COMMON CODE
PARAM_TO_COMMAND_KEYMAP = {
'ssm_range': 'ip pim ssm range'
}
PARAM_TO_DEFAULT_KEYMAP = {}
WARNINGS = []
def invoke(name, *args, **kwargs):
func = globals().get(name)
if func:
return func(*args, **kwargs)
def get_value(arg, config, module):
REGEX = re.compile(r'(?:{0}\s)(?P<value>.*)$'.format(PARAM_TO_COMMAND_KEYMAP[arg]), re.M)
value = ''
if PARAM_TO_COMMAND_KEYMAP[arg] in config:
value = REGEX.search(config).group('value')
return value
def get_existing(module, args):
existing = {}
config = str(get_config(module))
for arg in args:
existing[arg] = get_value(arg, config, module)
return existing
def apply_key_map(key_map, table):
new_dict = {}
for key, value in table.items():
new_key = key_map.get(key)
if new_key:
value = table.get(key)
if value:
new_dict[new_key] = value
else:
new_dict[new_key] = value
return new_dict
def get_commands(module, existing, proposed, candidate):
commands = list()
proposed_commands = apply_key_map(PARAM_TO_COMMAND_KEYMAP, proposed)
existing_commands = apply_key_map(PARAM_TO_COMMAND_KEYMAP, existing)
for key, value in proposed_commands.items():
command = '{0} {1}'.format(key, value)
commands.append(command)
if commands:
candidate.add(commands, parents=[])
def main():
argument_spec = dict(
ssm_range=dict(required=True, type='str'),
m_facts=dict(required=False, default=False, type='bool'),
include_defaults=dict(default=False),
config=dict(),
save=dict(type='bool', default=False)
)
module = get_network_module(argument_spec=argument_spec,
supports_check_mode=True)
splitted_ssm_range = module.params['ssm_range'].split('.')
if len(splitted_ssm_range) != 4 and module.params['ssm_range'] != 'none':
module.fail_json(msg="Valid ssm_range values are multicast addresses "
"or the keyword 'none'.")
args = [
'ssm_range'
]
existing = invoke('get_existing', module, args)
end_state = existing
proposed = dict((k, v) for k, v in module.params.items()
if v is not None and k in args)
result = {}
candidate = CustomNetworkConfig(indent=3)
invoke('get_commands', module, existing, proposed, candidate)
try:
response = load_config(module, candidate)
result.update(response)
except ShellError:
exc = get_exception()
module.fail_json(msg=str(exc))
result['connected'] = module.connected
if module._verbosity > 0:
end_state = invoke('get_existing', module, args)
result['end_state'] = end_state
result['existing'] = existing
result['proposed'] = proposed
if WARNINGS:
result['warnings'] = WARNINGS
module.exit_json(**result)
if __name__ == '__main__':
main()
| ColOfAbRiX/ansible | lib/ansible/modules/network/nxos/nxos_pim.py | Python | gpl-3.0 | 9,817 | 0.001834 |
import numpy as np
from datetime import datetime
import pytz
from zipline.algorithm import TradingAlgorithm
#from zipline.utils.factory import load_from_yahoo
from pulley.zp.data.loader import load_bars_from_yahoo
from zipline.finance import commission
#STOCKS = ['AMD', 'CERN', 'COST', 'DELL', 'GPS', 'INTC', 'MMM']
STOCKS = ['EEM', 'EFA', 'EWJ', 'ICF', 'IEF', 'IEV', 'IWM', 'IVV', 'TIP', 'TLT']
# On-Line Portfolio Moving Average Reversion
# More info can be found in the corresponding paper:
# http://icml.cc/2012/papers/168.pdf
def initialize(algo, eps=1, window_length=5):
algo.stocks = STOCKS
algo.sids = [algo.symbol(symbol) for symbol in algo.stocks]
algo.m = len(algo.stocks)
algo.price = {}
algo.b_t = np.ones(algo.m) / algo.m
algo.last_desired_port = np.ones(algo.m) / algo.m
algo.eps = eps
algo.init = True
algo.days = 0
algo.window_length = window_length
algo.add_transform('mavg', 5)
algo.set_commission(commission.PerShare(cost=0))
def handle_data(algo, data):
algo.days += 1
if algo.days < algo.window_length + 1:
return
if algo.init:
rebalance_portfolio(algo, data, algo.b_t)
algo.init = False
return
m = algo.m
x_tilde = np.zeros(m)
b = np.zeros(m)
# find relative moving average price for each assets
for i, sid in enumerate(algo.sids):
price = data[sid].price
# Relative mean deviation
x_tilde[i] = data[sid].mavg(algo.window_length) / price # <NP> Transform/mavg broken
###########################
# Inside of OLMAR (algo 2)
x_bar = x_tilde.mean()
# market relative deviation
mark_rel_dev = x_tilde - x_bar
# Expected return with current portfolio
exp_return = np.dot(algo.b_t, x_tilde)
weight = algo.eps - exp_return
variability = (np.linalg.norm(mark_rel_dev)) ** 2
# test for divide-by-zero case
if variability == 0.0:
step_size = 0
else:
step_size = max(0, weight / variability)
b = algo.b_t + step_size * mark_rel_dev
b_norm = simplex_projection(b)
np.testing.assert_almost_equal(b_norm.sum(), 1)
rebalance_portfolio(algo, data, b_norm)
# update portfolio
algo.b_t = b_norm
def rebalance_portfolio(algo, data, desired_port):
# rebalance portfolio
desired_amount = np.zeros_like(desired_port)
current_amount = np.zeros_like(desired_port)
prices = np.zeros_like(desired_port)
if algo.init:
positions_value = algo.portfolio.starting_cash
else:
positions_value = algo.portfolio.positions_value + \
algo.portfolio.cash
for i, sid in enumerate(algo.sids):
current_amount[i] = algo.portfolio.positions[sid].amount
prices[i] = data[sid].price
desired_amount = np.round(desired_port * positions_value / prices)
algo.last_desired_port = desired_port
diff_amount = desired_amount - current_amount
for i, sid in enumerate(algo.sids):
algo.order(sid, diff_amount[i])
def simplex_projection(v, b=1):
"""Projection vectors to the simplex domain
Implemented according to the paper: Efficient projections onto the
l1-ball for learning in high dimensions, John Duchi, et al. ICML 2008.
Implementation Time: 2011 June 17 by Bin@libin AT pmail.ntu.edu.sg
Optimization Problem: min_{w}\| w - v \|_{2}^{2}
s.t. sum_{i=1}^{m}=z, w_{i}\geq 0
Input: A vector v \in R^{m}, and a scalar z > 0 (default=1)
Output: Projection vector w
:Example:
>>> proj = simplex_projection([.4 ,.3, -.4, .5])
>>> print(proj)
array([ 0.33333333, 0.23333333, 0. , 0.43333333])
>>> print(proj.sum())
1.0
Original matlab implementation: John Duchi ([email protected])
Python-port: Copyright 2013 by Thomas Wiecki ([email protected]).
"""
v = np.asarray(v)
p = len(v)
# Sort v into u in descending order
v = (v > 0) * v
u = np.sort(v)[::-1]
sv = np.cumsum(u)
rho = np.where(u > (sv - b) / np.arange(1, p + 1))[0][-1]
theta = np.max([0, (sv[rho] - b) / (rho + 1)])
w = (v - theta)
w[w < 0] = 0
return w
# # Note: this function can be removed if running
# # this algorithm on quantopian.com
# def analyze(context=None, results=None):
# import matplotlib.pyplot as plt
# fig = plt.figure()
# ax = fig.add_subplot(111)
# results.portfolio_value.plot(ax=ax)
# ax.set_ylabel('Portfolio value (USD)')
# plt.show()
# Note: this if-block should be removed if running
# this algorithm on quantopian.com
if __name__ == '__main__':
start = datetime(2007, 1, 1, 0, 0, 0, 0, pytz.utc)
end = datetime(2008, 1, 1, 0, 0, 0, 0, pytz.utc)
if True:
# Load price data from yahoo.
data, bad_syms = load_bars_from_yahoo(stocks=STOCKS, indexes={}, start=start, end=end)
data = data.dropna()
else:
from pulley.utils.data_io import load_pickle
data = load_pickle('/media/ssd/quant-quote/df-minbar-11-etfs-20040102-20140506-close-only-prepro.pkl')
data = data.loc[STOCKS, start:end, 'price']
#data.items = np.arange(0, len(data.items)) # for sid's
# Create and run the algorithm.
olmar = TradingAlgorithm(handle_data=handle_data,
initialize=initialize,
identifiers=STOCKS)
results = olmar.run(data)
# Plot the portfolio data.
# analyze(results=results)
| jimgoo/zipline-fork | zipline/examples/olmar.py | Python | apache-2.0 | 5,460 | 0.002198 |
#coding: utf-8
import os
import sys
PWD = os.path.dirname(os.path.realpath(__file__))
WORKDIR = os.path.join(PWD, '../')
BINARYS = {
'REDIS_SERVER_BINS' : os.path.join(WORKDIR, '_binaries/redis-*'),
'REDIS_CLI' : os.path.join(WORKDIR, '_binaries/redis-cli'),
'MEMCACHED_BINS' : os.path.join(WORKDIR, '_binaries/memcached'),
'NUTCRACKER_BINS' : os.path.join(WORKDIR, '_binaries/nutcrackers'),
}
| vipshop/twemproxies | tests/conf/conf.py | Python | apache-2.0 | 436 | 0.013761 |
# Copyright 2012 Nebula, Inc.
# Copyright 2014 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import reverse
from django import forms
from django import http
from django import shortcuts
from django.template import defaultfilters
from mox3.mox import IsA # noqa
from horizon import tables
from horizon.tables import formset as table_formset
from horizon.tables import views as table_views
from horizon.test import helpers as test
class FakeObject(object):
def __init__(self, id, name, value, status, optional=None, excluded=None):
self.id = id
self.name = name
self.value = value
self.status = status
self.optional = optional
self.excluded = excluded
self.extra = "extra"
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, self.name)
TEST_DATA = (
FakeObject('1', 'object_1', 'value_1', 'up', 'optional_1', 'excluded_1'),
FakeObject('2', 'object_2', '<strong>evil</strong>', 'down', 'optional_2'),
FakeObject('3', 'object_3', 'value_3', 'up'),
)
TEST_DATA_2 = (
FakeObject('1', 'object_1', 'value_1', 'down', 'optional_1', 'excluded_1'),
)
TEST_DATA_3 = (
FakeObject('1', 'object_1', 'value_1', 'up', 'optional_1', 'excluded_1'),
)
TEST_DATA_4 = (
FakeObject('1', 'object_1', 2, 'up'),
FakeObject('2', 'object_2', 4, 'up'),
)
TEST_DATA_5 = (
FakeObject('1', 'object_1', 'value_1',
'A Status that is longer than 35 characters!', 'optional_1'),
)
TEST_DATA_6 = (
FakeObject('1', 'object_1', 'DELETED', 'down'),
FakeObject('2', 'object_2', 'CREATED', 'up'),
FakeObject('3', 'object_3', 'STANDBY', 'standby'),
)
TEST_DATA_7 = (
FakeObject('1', 'wrapped name', 'wrapped value', 'status',
'not wrapped optional'),
)
class MyLinkAction(tables.LinkAction):
name = "login"
verbose_name = "Log In"
url = "login"
attrs = {
"class": "ajax-modal",
}
def get_link_url(self, datum=None, *args, **kwargs):
return reverse(self.url)
class MyAction(tables.Action):
name = "delete"
verbose_name = "Delete Me"
verbose_name_plural = "Delete Them"
def allowed(self, request, obj=None):
return getattr(obj, 'status', None) != 'down'
def handle(self, data_table, request, object_ids):
return shortcuts.redirect('http://example.com/?ids=%s'
% ",".join(object_ids))
class MyColumn(tables.Column):
pass
class MyRowSelectable(tables.Row):
ajax = True
def can_be_selected(self, datum):
return datum.value != 'DELETED'
class MyRow(tables.Row):
ajax = True
@classmethod
def get_data(cls, request, obj_id):
return TEST_DATA_2[0]
class MyBatchAction(tables.BatchAction):
name = "batch"
action_present = "Batch"
action_past = "Batched"
data_type_singular = "Item"
data_type_plural = "Items"
def action(self, request, object_ids):
pass
class MyBatchActionWithHelpText(MyBatchAction):
name = "batch_help"
help_text = "this is help."
action_present = "BatchHelp"
action_past = "BatchedHelp"
class MyToggleAction(tables.BatchAction):
name = "toggle"
action_present = ("Down", "Up")
action_past = ("Downed", "Upped")
data_type_singular = "Item"
data_type_plural = "Items"
def allowed(self, request, obj=None):
if not obj:
return False
self.down = getattr(obj, 'status', None) == 'down'
if self.down:
self.current_present_action = 1
return self.down or getattr(obj, 'status', None) == 'up'
def action(self, request, object_ids):
if self.down:
# up it
self.current_past_action = 1
class MyDisabledAction(MyToggleAction):
def allowed(self, request, obj=None):
return False
class MyFilterAction(tables.FilterAction):
def filter(self, table, objs, filter_string):
q = filter_string.lower()
def comp(obj):
if q in obj.name.lower():
return True
return False
return filter(comp, objs)
class MyServerFilterAction(tables.FilterAction):
filter_type = 'server'
filter_choices = (('name', 'Name', False),
('status', 'Status', True))
needs_preloading = True
def filter(self, table, items, filter_string):
filter_field = table.get_filter_field()
if filter_field == 'name' and filter_string:
return [item for item in items
if filter_string in item.name]
return items
class MyUpdateAction(tables.UpdateAction):
def allowed(self, *args):
return True
def update_cell(self, *args):
pass
class MyUpdateActionNotAllowed(MyUpdateAction):
def allowed(self, *args):
return False
def get_name(obj):
return "custom %s" % obj.name
def get_link(obj):
return reverse('login')
class MyTable(tables.DataTable):
tooltip_dict = {'up': {'title': 'service is up and running',
'style': 'color:green;cursor:pointer'},
'down': {'title': 'service is not available',
'style': 'color:red;cursor:pointer'}}
id = tables.Column('id', hidden=True, sortable=False)
name = tables.Column(get_name,
verbose_name="Verbose Name",
sortable=True,
form_field=forms.CharField(required=True),
form_field_attributes={'class': 'test'},
update_action=MyUpdateAction)
value = tables.Column('value',
sortable=True,
link='http://example.com/',
attrs={'class': 'green blue'},
summation="average",
link_classes=('link-modal',),
link_attrs={'data-type': 'modal dialog',
'data-tip': 'click for dialog'})
status = tables.Column('status', link=get_link, truncate=35,
cell_attributes_getter=tooltip_dict.get)
optional = tables.Column('optional', empty_value='N/A')
excluded = tables.Column('excluded')
class Meta(object):
name = "my_table"
verbose_name = "My Table"
status_columns = ["status"]
columns = ('id', 'name', 'value', 'optional', 'status')
row_class = MyRow
column_class = MyColumn
table_actions = (MyFilterAction, MyAction, MyBatchAction,
MyBatchActionWithHelpText)
row_actions = (MyAction, MyLinkAction, MyBatchAction, MyToggleAction,
MyBatchActionWithHelpText)
class MyServerFilterTable(MyTable):
class Meta(object):
name = "my_table"
verbose_name = "My Table"
status_columns = ["status"]
columns = ('id', 'name', 'value', 'optional', 'status')
row_class = MyRow
column_class = MyColumn
table_actions = (MyServerFilterAction, MyAction, MyBatchAction)
row_actions = (MyAction, MyLinkAction, MyBatchAction, MyToggleAction,
MyBatchActionWithHelpText)
class MyTableSelectable(MyTable):
class Meta(object):
name = "my_table"
columns = ('id', 'name', 'value', 'status')
row_class = MyRowSelectable
status_columns = ["status"]
multi_select = True
class MyTableNotAllowedInlineEdit(MyTable):
name = tables.Column(get_name,
verbose_name="Verbose Name",
sortable=True,
form_field=forms.CharField(required=True),
form_field_attributes={'class': 'test'},
update_action=MyUpdateActionNotAllowed)
class Meta(object):
name = "my_table"
columns = ('id', 'name', 'value', 'optional', 'status')
row_class = MyRow
class MyTableWrapList(MyTable):
name = tables.Column('name',
form_field=forms.CharField(required=True),
form_field_attributes={'class': 'test'},
update_action=MyUpdateActionNotAllowed,
wrap_list=True)
value = tables.Column('value',
wrap_list=True)
optional = tables.Column('optional',
wrap_list=False)
class NoActionsTable(tables.DataTable):
id = tables.Column('id')
class Meta(object):
name = "no_actions_table"
verbose_name = "No Actions Table"
table_actions = ()
row_actions = ()
class DisabledActionsTable(tables.DataTable):
id = tables.Column('id')
class Meta(object):
name = "disabled_actions_table"
verbose_name = "Disabled Actions Table"
table_actions = (MyDisabledAction,)
row_actions = ()
multi_select = True
class DataTableTests(test.TestCase):
def test_table_instantiation(self):
"""Tests everything that happens when the table is instantiated."""
self.table = MyTable(self.request, TEST_DATA)
# Properties defined on the table
self.assertEqual(TEST_DATA, self.table.data)
self.assertEqual("my_table", self.table.name)
# Verify calculated options that weren't specified explicitly
self.assertTrue(self.table._meta.actions_column)
self.assertTrue(self.table._meta.multi_select)
# Test for verbose_name
self.assertEqual(u"My Table", unicode(self.table))
# Column ordering and exclusion.
# This should include auto-columns for multi_select and actions,
# but should not contain the excluded column.
# Additionally, auto-generated columns should use the custom
# column class specified on the table.
self.assertQuerysetEqual(self.table.columns.values(),
['<MyColumn: multi_select>',
'<Column: id>',
'<Column: name>',
'<Column: value>',
'<Column: optional>',
'<Column: status>',
'<MyColumn: actions>'])
# Actions (these also test ordering)
self.assertQuerysetEqual(self.table.base_actions.values(),
['<MyBatchAction: batch>',
'<MyBatchActionWithHelpText: batch_help>',
'<MyAction: delete>',
'<MyFilterAction: filter>',
'<MyLinkAction: login>',
'<MyToggleAction: toggle>'])
self.assertQuerysetEqual(self.table.get_table_actions(),
['<MyFilterAction: filter>',
'<MyAction: delete>',
'<MyBatchAction: batch>',
'<MyBatchActionWithHelpText: batch_help>'])
self.assertQuerysetEqual(self.table.get_row_actions(TEST_DATA[0]),
['<MyAction: delete>',
'<MyLinkAction: login>',
'<MyBatchAction: batch>',
'<MyToggleAction: toggle>',
'<MyBatchActionWithHelpText: batch_help>'])
# Auto-generated columns
multi_select = self.table.columns['multi_select']
self.assertEqual("multi_select", multi_select.auto)
self.assertEqual("multi_select_column",
multi_select.get_final_attrs().get('class', ""))
actions = self.table.columns['actions']
self.assertEqual("actions", actions.auto)
self.assertEqual("actions_column",
actions.get_final_attrs().get('class', ""))
# In-line edit action on column.
name_column = self.table.columns['name']
self.assertEqual(MyUpdateAction, name_column.update_action)
self.assertEqual(forms.CharField, name_column.form_field.__class__)
self.assertEqual({'class': 'test'}, name_column.form_field_attributes)
def test_table_force_no_multiselect(self):
class TempTable(MyTable):
class Meta(object):
columns = ('id',)
table_actions = (MyFilterAction, MyAction,)
row_actions = (MyAction, MyLinkAction,)
multi_select = False
self.table = TempTable(self.request, TEST_DATA)
self.assertQuerysetEqual(self.table.columns.values(),
['<Column: id>',
'<Column: actions>'])
def test_table_force_no_actions_column(self):
class TempTable(MyTable):
class Meta(object):
columns = ('id',)
table_actions = (MyFilterAction, MyAction,)
row_actions = (MyAction, MyLinkAction,)
actions_column = False
self.table = TempTable(self.request, TEST_DATA)
self.assertQuerysetEqual(self.table.columns.values(),
['<Column: multi_select>',
'<Column: id>'])
def test_table_natural_no_inline_editing(self):
class TempTable(MyTable):
name = tables.Column(get_name,
verbose_name="Verbose Name",
sortable=True)
class Meta(object):
name = "my_table"
columns = ('id', 'name', 'value', 'optional', 'status')
self.table = TempTable(self.request, TEST_DATA_2)
name_column = self.table.columns['name']
self.assertIsNone(name_column.update_action)
self.assertIsNone(name_column.form_field)
self.assertEqual({}, name_column.form_field_attributes)
def test_table_natural_no_actions_column(self):
class TempTable(MyTable):
class Meta(object):
columns = ('id',)
table_actions = (MyFilterAction, MyAction,)
self.table = TempTable(self.request, TEST_DATA)
self.assertQuerysetEqual(self.table.columns.values(),
['<Column: multi_select>',
'<Column: id>'])
def test_table_natural_no_multiselect(self):
class TempTable(MyTable):
class Meta(object):
columns = ('id',)
row_actions = (MyAction, MyLinkAction,)
self.table = TempTable(self.request, TEST_DATA)
self.assertQuerysetEqual(self.table.columns.values(),
['<Column: id>',
'<Column: actions>'])
def test_table_column_inheritance(self):
class TempTable(MyTable):
extra = tables.Column('extra')
class Meta(object):
name = "temp_table"
table_actions = (MyFilterAction, MyAction,)
row_actions = (MyAction, MyLinkAction,)
self.table = TempTable(self.request, TEST_DATA)
self.assertQuerysetEqual(self.table.columns.values(),
['<Column: multi_select>',
'<Column: id>',
'<Column: name>',
'<Column: value>',
'<Column: status>',
'<Column: optional>',
'<Column: excluded>',
'<Column: extra>',
'<Column: actions>'])
def test_table_construction(self):
self.table = MyTable(self.request, TEST_DATA)
# Verify we retrieve the right columns for headers
columns = self.table.get_columns()
self.assertQuerysetEqual(columns, ['<MyColumn: multi_select>',
'<Column: id>',
'<Column: name>',
'<Column: value>',
'<Column: optional>',
'<Column: status>',
'<MyColumn: actions>'])
# Verify we retrieve the right rows from our data
rows = self.table.get_rows()
self.assertQuerysetEqual(rows, ['<MyRow: my_table__row__1>',
'<MyRow: my_table__row__2>',
'<MyRow: my_table__row__3>'])
# Verify each row contains the right cells
self.assertQuerysetEqual(rows[0].get_cells(),
['<Cell: multi_select, my_table__row__1>',
'<Cell: id, my_table__row__1>',
'<Cell: name, my_table__row__1>',
'<Cell: value, my_table__row__1>',
'<Cell: optional, my_table__row__1>',
'<Cell: status, my_table__row__1>',
'<Cell: actions, my_table__row__1>'])
def test_table_column(self):
self.table = MyTable(self.request, TEST_DATA)
row = self.table.get_rows()[0]
row3 = self.table.get_rows()[2]
id_col = self.table.columns['id']
name_col = self.table.columns['name']
value_col = self.table.columns['value']
# transform
self.assertEqual('1', row.cells['id'].data) # Standard attr access
self.assertEqual('custom object_1', row.cells['name'].data) # Callable
# name and verbose_name
self.assertEqual("Id", unicode(id_col))
self.assertEqual("Verbose Name", unicode(name_col))
# sortable
self.assertEqual(False, id_col.sortable)
self.assertNotIn("sortable", id_col.get_final_attrs().get('class', ""))
self.assertEqual(True, name_col.sortable)
self.assertIn("sortable", name_col.get_final_attrs().get('class', ""))
# hidden
self.assertEqual(True, id_col.hidden)
self.assertIn("hide", id_col.get_final_attrs().get('class', ""))
self.assertEqual(False, name_col.hidden)
self.assertNotIn("hide", name_col.get_final_attrs().get('class', ""))
# link, link_classes, link_attrs, and get_link_url
self.assertIn('href="http://example.com/"', row.cells['value'].value)
self.assertIn('class="link-modal"', row.cells['value'].value)
self.assertIn('data-type="modal dialog"', row.cells['value'].value)
self.assertIn('data-tip="click for dialog"', row.cells['value'].value)
self.assertIn('href="/auth/login/"', row.cells['status'].value)
# empty_value
self.assertEqual("N/A", row3.cells['optional'].value)
# classes
self.assertEqual("green blue sortable anchor normal_column",
value_col.get_final_attrs().get('class', ""))
# status
cell_status = row.cells['status'].status
self.assertEqual(True, cell_status)
self.assertEqual('status_up',
row.cells['status'].get_status_class(cell_status))
# status_choices
id_col.status = True
id_col.status_choices = (('1', False), ('2', True), ('3', None))
cell_status = row.cells['id'].status
self.assertEqual(False, cell_status)
self.assertEqual('status_down',
row.cells['id'].get_status_class(cell_status))
cell_status = row3.cells['id'].status
self.assertIsNone(cell_status)
self.assertEqual('status_unknown',
row.cells['id'].get_status_class(cell_status))
# Ensure data is not cached on the column across table instances
self.table = MyTable(self.request, TEST_DATA_2)
row = self.table.get_rows()[0]
self.assertTrue("down" in row.cells['status'].value)
def test_table_row(self):
self.table = MyTable(self.request, TEST_DATA)
row = self.table.get_rows()[0]
self.assertEqual(self.table, row.table)
self.assertEqual(TEST_DATA[0], row.datum)
self.assertEqual('my_table__row__1', row.id)
# Verify row status works even if status isn't set on the column
self.assertEqual(True, row.status)
self.assertEqual('status_up', row.status_class)
# Check the cells as well
cell_status = row.cells['status'].status
self.assertEqual(True, cell_status)
self.assertEqual('status_up',
row.cells['status'].get_status_class(cell_status))
def test_table_column_truncation(self):
self.table = MyTable(self.request, TEST_DATA_5)
row = self.table.get_rows()[0]
self.assertEqual(35, len(row.cells['status'].data))
self.assertEqual(u'A Status that is longer than 35 ...',
row.cells['status'].data)
def test_table_rendering(self):
self.table = MyTable(self.request, TEST_DATA)
# Table actions
table_actions = self.table.render_table_actions()
resp = http.HttpResponse(table_actions)
self.assertContains(resp, "table_search", 1)
self.assertContains(resp, "my_table__filter__q", 1)
self.assertContains(resp, "my_table__delete", 1)
self.assertContains(resp, 'id="my_table__action_delete"', 1)
# Table BatchActions
self.assertContains(resp, 'id="my_table__action_batch_help"', 1)
self.assertContains(resp, 'help_text="this is help."', 1)
self.assertContains(resp, 'BatchHelp Item', 1)
# Row actions
row_actions = self.table.render_row_actions(TEST_DATA[0])
resp = http.HttpResponse(row_actions)
self.assertContains(resp, "<li", 4)
self.assertContains(resp, "my_table__delete__1", 1)
self.assertContains(resp, "my_table__toggle__1", 1)
self.assertContains(resp, "/auth/login/", 1)
self.assertContains(resp, "ajax-modal", 1)
self.assertContains(resp, 'id="my_table__row_1__action_delete"', 1)
# Row BatchActions
row_actions = self.table.render_row_actions(TEST_DATA[0])
resp = http.HttpResponse(row_actions)
self.assertContains(resp, 'id="my_table__row_1__action_batch_help"', 1)
self.assertContains(resp, 'help_text="this is help."', 1)
self.assertContains(resp, 'value="my_table__batch_help__1"', 1)
self.assertContains(resp, 'BatchHelp Item', 1)
# Whole table
resp = http.HttpResponse(self.table.render())
self.assertContains(resp, '<table id="my_table"', 1)
self.assertContains(resp, '<th ', 8)
self.assertContains(resp, 'id="my_table__row__1"', 1)
self.assertContains(resp, 'id="my_table__row__2"', 1)
self.assertContains(resp, 'id="my_table__row__3"', 1)
update_string = "action=row_update&table=my_table&obj_id="
self.assertContains(resp, update_string, 3)
self.assertContains(resp, "data-update-interval", 3)
# Verify no table heading
self.assertNotContains(resp, "<h3 class='table_title'")
# Verify our XSS protection
self.assertContains(resp, '<a href="http://example.com/" '
'data-tip="click for dialog" '
'data-type="modal dialog" '
'class="link-modal">'
'<strong>evil</strong></a>', 1)
# Hidden Title = False shows the table title
self.table._meta.hidden_title = False
resp = http.HttpResponse(self.table.render())
self.assertContains(resp, "<h3 class='table_title'", 1)
# Filter = False hides the search box
self.table._meta.filter = False
table_actions = self.table.render_table_actions()
resp = http.HttpResponse(table_actions)
self.assertContains(resp, "table_search", 0)
def test_wrap_list_rendering(self):
self.table = MyTableWrapList(self.request, TEST_DATA_7)
row = self.table.get_rows()[0]
name_cell = row.cells['name']
value_cell = row.cells['value']
optional_cell = row.cells['optional']
# Check if is cell is rendered correctly.
name_cell_rendered = name_cell.render()
value_cell_rendered = value_cell.render()
optional_cell_rendered = optional_cell.render()
resp_name = http.HttpResponse(name_cell_rendered)
resp_value = http.HttpResponse(value_cell_rendered)
resp_optional = http.HttpResponse(optional_cell_rendered)
self.assertContains(resp_name, '<ul>wrapped name</ul>', 1)
self.assertContains(resp_value, '<ul>wrapped value</ul>', 1)
self.assertContains(resp_optional, 'not wrapped optional', 1)
self.assertNotContains(resp_optional, '<ul>')
self.assertNotContains(resp_optional, '</ul>')
def test_inline_edit_available_cell_rendering(self):
self.table = MyTable(self.request, TEST_DATA_2)
row = self.table.get_rows()[0]
name_cell = row.cells['name']
# Check if in-line edit is available in the cell,
# but is not in inline_edit_mod.
self.assertEqual(True,
name_cell.inline_edit_available)
self.assertEqual(False,
name_cell.inline_edit_mod)
# Check if is cell is rendered correctly.
name_cell_rendered = name_cell.render()
resp = http.HttpResponse(name_cell_rendered)
self.assertContains(resp, '<td', 1)
self.assertContains(resp, 'inline_edit_available', 1)
self.assertContains(resp,
'data-update-url="?action=cell_update&'
'table=my_table&cell_name=name&obj_id=1"',
1)
self.assertContains(resp, 'table_cell_wrapper', 1)
self.assertContains(resp, 'table_cell_data_wrapper', 1)
self.assertContains(resp, 'table_cell_action', 1)
self.assertContains(resp, 'ajax-inline-edit', 1)
def test_inline_edit_available_not_allowed_cell_rendering(self):
self.table = MyTableNotAllowedInlineEdit(self.request, TEST_DATA_2)
row = self.table.get_rows()[0]
name_cell = row.cells['name']
# Check if in-line edit is available in the cell,
# but is not in inline_edit_mod.
self.assertEqual(True,
name_cell.inline_edit_available)
self.assertEqual(False,
name_cell.inline_edit_mod)
# Check if is cell is rendered correctly.
name_cell_rendered = name_cell.render()
resp = http.HttpResponse(name_cell_rendered)
self.assertContains(resp, '<td', 1)
self.assertContains(resp, 'inline_edit_available', 1)
self.assertContains(resp,
'data-update-url="?action=cell_update&'
'table=my_table&cell_name=name&obj_id=1"',
1)
self.assertContains(resp, 'table_cell_wrapper', 0)
self.assertContains(resp, 'table_cell_data_wrapper', 0)
self.assertContains(resp, 'table_cell_action', 0)
self.assertContains(resp, 'ajax-inline-edit', 0)
def test_inline_edit_mod_cell_rendering(self):
self.table = MyTable(self.request, TEST_DATA_2)
name_col = self.table.columns['name']
name_col.auto = "form_field"
row = self.table.get_rows()[0]
name_cell = row.cells['name']
name_cell.inline_edit_mod = True
# Check if in-line edit is available in the cell,
# and is in inline_edit_mod, also column auto must be
# set as form_field.
self.assertEqual(True,
name_cell.inline_edit_available)
self.assertEqual(True,
name_cell.inline_edit_mod)
self.assertEqual('form_field',
name_col.auto)
# Check if is cell is rendered correctly.
name_cell_rendered = name_cell.render()
resp = http.HttpResponse(name_cell_rendered)
self.assertContains(resp,
'<input class="test" id="name__1" name="name__1"'
' type="text" value="custom object_1" />',
count=1, html=True)
self.assertContains(resp, '<td', 1)
self.assertContains(resp, 'inline_edit_available', 1)
self.assertContains(resp,
'data-update-url="?action=cell_update&'
'table=my_table&cell_name=name&obj_id=1"',
1)
self.assertContains(resp, 'table_cell_wrapper', 1)
self.assertContains(resp, 'inline-edit-error', 1)
self.assertContains(resp, 'inline-edit-form', 1)
self.assertContains(resp, 'inline-edit-actions', 1)
self.assertContains(resp, 'inline-edit-submit', 1)
self.assertContains(resp, 'inline-edit-cancel', 1)
def test_inline_edit_mod_checkbox_with_label(self):
class TempTable(MyTable):
name = tables.Column(get_name,
verbose_name="Verbose Name",
sortable=True,
form_field=forms.BooleanField(
required=True,
label="Verbose Name"),
form_field_attributes={'class': 'test'},
update_action=MyUpdateAction)
class Meta(object):
name = "my_table"
columns = ('id', 'name', 'value', 'optional', 'status')
self.table = TempTable(self.request, TEST_DATA_2)
name_col = self.table.columns['name']
name_col.auto = "form_field"
row = self.table.get_rows()[0]
name_cell = row.cells['name']
name_cell.inline_edit_mod = True
# Check if is cell is rendered correctly.
name_cell_rendered = name_cell.render()
resp = http.HttpResponse(name_cell_rendered)
self.assertContains(resp,
'<input checked="checked" class="test" '
'id="name__1" name="name__1" type="checkbox" '
'value="custom object_1" />',
count=1, html=True)
self.assertContains(resp,
'<label class="inline-edit-label" for="name__1">'
'Verbose Name</label>',
count=1, html=True)
def test_table_search_action(self):
class TempTable(MyTable):
class Meta(object):
name = "my_table"
table_actions = (tables.NameFilterAction,)
# with the filter string 2, it should return 2nd item
action_string = "my_table__filter__q"
req = self.factory.post('/my_url/', {action_string: '2'})
self.table = TempTable(req, TEST_DATA)
self.assertQuerysetEqual(self.table.get_table_actions(),
['<NameFilterAction: filter>'])
handled = self.table.maybe_handle()
self.assertIsNone(handled)
self.assertQuerysetEqual(self.table.filtered_data,
['<FakeObject: object_2>'])
# with empty filter string, it should return all data
req = self.factory.post('/my_url/', {action_string: ''})
self.table = TempTable(req, TEST_DATA)
handled = self.table.maybe_handle()
self.assertIsNone(handled)
self.assertQuerysetEqual(self.table.filtered_data,
['<FakeObject: object_1>',
'<FakeObject: object_2>',
'<FakeObject: object_3>'])
# with unknown value it should return empty list
req = self.factory.post('/my_url/', {action_string: 'horizon'})
self.table = TempTable(req, TEST_DATA)
handled = self.table.maybe_handle()
self.assertIsNone(handled)
self.assertQuerysetEqual(self.table.filtered_data, [])
def test_inline_edit_mod_textarea(self):
class TempTable(MyTable):
name = tables.Column(get_name,
verbose_name="Verbose Name",
sortable=True,
form_field=forms.CharField(
widget=forms.Textarea(),
required=False),
form_field_attributes={'class': 'test'},
update_action=MyUpdateAction)
class Meta(object):
name = "my_table"
columns = ('id', 'name', 'value', 'optional', 'status')
self.table = TempTable(self.request, TEST_DATA_2)
name_col = self.table.columns['name']
name_col.auto = "form_field"
row = self.table.get_rows()[0]
name_cell = row.cells['name']
name_cell.inline_edit_mod = True
# Check if is cell is rendered correctly.
name_cell_rendered = name_cell.render()
resp = http.HttpResponse(name_cell_rendered)
self.assertContains(resp,
'<textarea class="test" cols="40" id="name__1" '
'name="name__1" rows="10">\r\ncustom object_1'
'</textarea>',
count=1, html=True)
def test_table_actions(self):
# Single object action
action_string = "my_table__delete__1"
req = self.factory.post('/my_url/', {'action': action_string})
self.table = MyTable(req, TEST_DATA)
self.assertEqual(('my_table', 'delete', '1'),
self.table.parse_action(action_string))
handled = self.table.maybe_handle()
self.assertEqual(302, handled.status_code)
self.assertEqual("http://example.com/?ids=1", handled["location"])
# Batch action (without toggle) conjugation behavior
req = self.factory.get('/my_url/')
self.table = MyTable(req, TEST_DATA_3)
toggle_action = self.table.get_row_actions(TEST_DATA_3[0])[2]
self.assertEqual("Batch Item", unicode(toggle_action.verbose_name))
# Batch action with custom help text
req = self.factory.get('/my_url/')
self.table = MyTable(req, TEST_DATA_3)
toggle_action = self.table.get_row_actions(TEST_DATA_3[0])[4]
self.assertEqual("BatchHelp Item", unicode(toggle_action.verbose_name))
# Single object toggle action
# GET page - 'up' to 'down'
req = self.factory.get('/my_url/')
self.table = MyTable(req, TEST_DATA_3)
self.assertEqual(5, len(self.table.get_row_actions(TEST_DATA_3[0])))
toggle_action = self.table.get_row_actions(TEST_DATA_3[0])[3]
self.assertEqual("Down Item", unicode(toggle_action.verbose_name))
# Toggle from status 'up' to 'down'
# POST page
action_string = "my_table__toggle__1"
req = self.factory.post('/my_url/', {'action': action_string})
self.table = MyTable(req, TEST_DATA)
self.assertEqual(('my_table', 'toggle', '1'),
self.table.parse_action(action_string))
handled = self.table.maybe_handle()
self.assertEqual(302, handled.status_code)
self.assertEqual("/my_url/", handled["location"])
self.assertEqual(u"Downed Item: object_1",
list(req._messages)[0].message)
# Toggle from status 'down' to 'up'
# GET page - 'down' to 'up'
req = self.factory.get('/my_url/')
self.table = MyTable(req, TEST_DATA_2)
self.assertEqual(4, len(self.table.get_row_actions(TEST_DATA_2[0])))
toggle_action = self.table.get_row_actions(TEST_DATA_2[0])[2]
self.assertEqual("Up Item", unicode(toggle_action.verbose_name))
# POST page
action_string = "my_table__toggle__2"
req = self.factory.post('/my_url/', {'action': action_string})
self.table = MyTable(req, TEST_DATA)
self.assertEqual(('my_table', 'toggle', '2'),
self.table.parse_action(action_string))
handled = self.table.maybe_handle()
self.assertEqual(302, handled.status_code)
self.assertEqual("/my_url/", handled["location"])
self.assertEqual(u"Upped Item: object_2",
list(req._messages)[0].message)
# there are underscore in object-id.
# (because swift support custom object id)
action_string = "my_table__toggle__2__33__$$"
req = self.factory.post('/my_url/', {'action': action_string})
self.table = MyTable(req, TEST_DATA)
self.assertEqual(('my_table', 'toggle', '2__33__$$'),
self.table.parse_action(action_string))
# Multiple object action
action_string = "my_table__delete"
req = self.factory.post('/my_url/', {'action': action_string,
'object_ids': [1, 2]})
self.table = MyTable(req, TEST_DATA)
self.assertEqual(('my_table', 'delete', None),
self.table.parse_action(action_string))
handled = self.table.maybe_handle()
self.assertEqual(302, handled.status_code)
self.assertEqual("http://example.com/?ids=1,2", handled["location"])
# Action with nothing selected
req = self.factory.post('/my_url/', {'action': action_string})
self.table = MyTable(req, TEST_DATA)
self.assertEqual(('my_table', 'delete', None),
self.table.parse_action(action_string))
handled = self.table.maybe_handle()
self.assertIsNone(handled)
self.assertEqual("Please select a row before taking that action.",
list(req._messages)[0].message)
# Action with specific id and multiple ids favors single id
action_string = "my_table__delete__3"
req = self.factory.post('/my_url/', {'action': action_string,
'object_ids': [1, 2]})
self.table = MyTable(req, TEST_DATA)
self.assertEqual(('my_table', 'delete', '3'),
self.table.parse_action(action_string))
handled = self.table.maybe_handle()
self.assertEqual(302, handled.status_code)
self.assertEqual("http://example.com/?ids=3",
handled["location"])
# At least one object in table
# BatchAction is available
req = self.factory.get('/my_url/')
self.table = MyTable(req, TEST_DATA_2)
self.assertQuerysetEqual(self.table.get_table_actions(),
['<MyFilterAction: filter>',
'<MyAction: delete>',
'<MyBatchAction: batch>',
'<MyBatchActionWithHelpText: batch_help>'])
# Zero objects in table
# BatchAction not available
req = self.factory.get('/my_url/')
self.table = MyTable(req, None)
self.assertQuerysetEqual(self.table.get_table_actions(),
['<MyFilterAction: filter>',
'<MyAction: delete>'])
# Filtering
action_string = "my_table__filter__q"
req = self.factory.post('/my_url/', {action_string: '2'})
self.table = MyTable(req, TEST_DATA)
handled = self.table.maybe_handle()
self.assertIsNone(handled)
self.assertQuerysetEqual(self.table.filtered_data,
['<FakeObject: object_2>'])
# Ensure fitering respects the request method, e.g. no filter here
req = self.factory.get('/my_url/', {action_string: '2'})
self.table = MyTable(req, TEST_DATA)
handled = self.table.maybe_handle()
self.assertIsNone(handled)
self.assertQuerysetEqual(self.table.filtered_data,
['<FakeObject: object_1>',
'<FakeObject: object_2>',
'<FakeObject: object_3>'])
# Updating and preemptive actions
params = {"table": "my_table", "action": "row_update", "obj_id": "1"}
req = self.factory.get('/my_url/',
params,
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.table = MyTable(req)
resp = self.table.maybe_preempt()
self.assertEqual(200, resp.status_code)
# Make sure the data returned differs from the original
self.assertContains(resp, "my_table__row__1")
self.assertContains(resp, "status_down")
# Verify that we don't get a response for a valid action with the
# wrong method.
params = {"table": "my_table", "action": "delete", "obj_id": "1"}
req = self.factory.get('/my_url/', params)
self.table = MyTable(req)
resp = self.table.maybe_preempt()
self.assertIsNone(resp)
resp = self.table.maybe_handle()
self.assertIsNone(resp)
# Verbose names
table_actions = self.table.get_table_actions()
self.assertEqual("Filter", unicode(table_actions[0].verbose_name))
self.assertEqual("Delete Me", unicode(table_actions[1].verbose_name))
row_actions = self.table.get_row_actions(TEST_DATA[0])
self.assertEqual("Delete Me", unicode(row_actions[0].verbose_name))
self.assertEqual("Log In", unicode(row_actions[1].verbose_name))
def test_server_filtering(self):
filter_value_param = "my_table__filter__q"
filter_field_param = '%s_field' % filter_value_param
# Server Filtering
req = self.factory.post('/my_url/')
req.session[filter_value_param] = '2'
req.session[filter_field_param] = 'name'
self.table = MyServerFilterTable(req, TEST_DATA)
handled = self.table.maybe_handle()
self.assertIsNone(handled)
self.assertQuerysetEqual(self.table.filtered_data,
['<FakeObject: object_2>'])
# Ensure API filtering does not filter on server, e.g. no filter here
req = self.factory.post('/my_url/')
req.session[filter_value_param] = 'up'
req.session[filter_field_param] = 'status'
self.table = MyServerFilterTable(req, TEST_DATA)
handled = self.table.maybe_handle()
self.assertIsNone(handled)
self.assertQuerysetEqual(self.table.filtered_data,
['<FakeObject: object_1>',
'<FakeObject: object_2>',
'<FakeObject: object_3>'])
def test_inline_edit_update_action_get_non_ajax(self):
# Non ajax inline edit request should return None.
url = ('/my_url/?action=cell_update'
'&table=my_table&cell_name=name&obj_id=1')
req = self.factory.get(url, {})
self.table = MyTable(req, TEST_DATA_2)
handled = self.table.maybe_preempt()
# Checking the response header.
self.assertIsNone(handled)
def test_inline_edit_update_action_get(self):
# Get request should return td field with data.
url = ('/my_url/?action=cell_update'
'&table=my_table&cell_name=name&obj_id=1')
req = self.factory.get(url, {},
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.table = MyTable(req, TEST_DATA_2)
handled = self.table.maybe_preempt()
# Checking the response header.
self.assertEqual(200, handled.status_code)
# Checking the response content.
resp = handled
self.assertContains(resp, '<td', 1)
self.assertContains(resp, 'inline_edit_available', 1)
self.assertContains(
resp,
'data-update-url="/my_url/?action=cell_update&'
'table=my_table&cell_name=name&obj_id=1"',
1)
self.assertContains(resp, 'table_cell_wrapper', 1)
self.assertContains(resp, 'table_cell_data_wrapper', 1)
self.assertContains(resp, 'table_cell_action', 1)
self.assertContains(resp, 'ajax-inline-edit', 1)
def test_inline_edit_update_action_get_not_allowed(self):
# Name column has required validation, sending blank
# will return error.
url = ('/my_url/?action=cell_update'
'&table=my_table&cell_name=name&obj_id=1')
req = self.factory.post(url, {})
self.table = MyTableNotAllowedInlineEdit(req, TEST_DATA_2)
handled = self.table.maybe_preempt()
# Checking the response header.
self.assertEqual(401, handled.status_code)
def test_inline_edit_update_action_get_inline_edit_mod(self):
# Get request in inline_edit_mode should return td with form field.
url = ('/my_url/?inline_edit_mod=true&action=cell_update'
'&table=my_table&cell_name=name&obj_id=1')
req = self.factory.get(url, {},
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.table = MyTable(req, TEST_DATA_2)
handled = self.table.maybe_preempt()
# Checking the response header.
self.assertEqual(200, handled.status_code)
# Checking the response content.
resp = handled
self.assertContains(resp,
'<input class="test" id="name__1" name="name__1"'
' type="text" value="custom object_1" />',
count=1, html=True)
self.assertContains(resp, '<td', 1)
self.assertContains(resp, 'inline_edit_available', 1)
self.assertContains(
resp,
'data-update-url="/my_url/?action=cell_update&'
'table=my_table&cell_name=name&obj_id=1"',
1)
self.assertContains(resp, 'table_cell_wrapper', 1)
self.assertContains(resp, 'inline-edit-error', 1)
self.assertContains(resp, 'inline-edit-form', 1)
self.assertContains(resp, 'inline-edit-actions', 1)
self.assertContains(resp, '<button', 2)
self.assertContains(resp, 'inline-edit-submit', 1)
self.assertContains(resp, 'inline-edit-cancel', 1)
def test_inline_edit_update_action_post(self):
# Post request should invoke the cell update table action.
url = ('/my_url/?action=cell_update'
'&table=my_table&cell_name=name&obj_id=1')
req = self.factory.post(url, {'name__1': 'test_name'})
self.table = MyTable(req, TEST_DATA_2)
# checking the response header
handled = self.table.maybe_preempt()
self.assertEqual(200, handled.status_code)
def test_inline_edit_update_action_post_not_allowed(self):
# Post request should invoke the cell update table action.
url = ('/my_url/?action=cell_update'
'&table=my_table&cell_name=name&obj_id=1')
req = self.factory.post(url, {'name__1': 'test_name'})
self.table = MyTableNotAllowedInlineEdit(req, TEST_DATA_2)
# checking the response header
handled = self.table.maybe_preempt()
self.assertEqual(401, handled.status_code)
def test_inline_edit_update_action_post_validation_error(self):
# Name column has required validation, sending blank
# will return error.
url = ('/my_url/?action=cell_update'
'&table=my_table&cell_name=name&obj_id=1')
req = self.factory.post(url, {})
self.table = MyTable(req, TEST_DATA_2)
handled = self.table.maybe_preempt()
# Checking the response header.
self.assertEqual(400, handled.status_code)
self.assertEqual(('Content-Type', 'application/json'),
handled._headers['content-type'])
# Checking the response content.
resp = handled
self.assertContains(resp,
'"message": "This field is required."',
count=1, status_code=400)
def test_column_uniqueness(self):
table1 = MyTable(self.request)
table2 = MyTable(self.request)
# Regression test for launchpad bug 964345.
self.assertNotEqual(id(table1), id(table2))
self.assertNotEqual(id(table1.columns), id(table2.columns))
t1cols = table1.columns.values()
t2cols = table2.columns.values()
self.assertEqual(t1cols[0].name, t2cols[0].name)
self.assertNotEqual(id(t1cols[0]), id(t2cols[0]))
self.assertNotEqual(id(t1cols[0].table),
id(t2cols[0].table))
self.assertNotEqual(id(t1cols[0].table._data_cache),
id(t2cols[0].table._data_cache))
def test_summation_row(self):
# Test with the "average" method.
table = MyTable(self.request, TEST_DATA_4)
res = http.HttpResponse(table.render())
self.assertContains(res, '<tr class="summation"', 1)
self.assertContains(res, '<td>Summary</td>', 1)
self.assertContains(res, '<td>3.0</td>', 1)
# Test again with the "sum" method.
table.columns['value'].summation = "sum"
res = http.HttpResponse(table.render())
self.assertContains(res, '<tr class="summation"', 1)
self.assertContains(res, '<td>Summary</td>', 1)
self.assertContains(res, '<td>6</td>', 1)
# One last test with no summation.
table.columns['value'].summation = None
table.needs_summary_row = False
res = http.HttpResponse(table.render())
self.assertNotContains(res, '<tr class="summation"')
self.assertNotContains(res, '<td>3.0</td>')
self.assertNotContains(res, '<td>6</td>')
# Even if "average" summation method is specified,
# we have summation fields but no value is provided
# if the provided data cannot be summed.
table = MyTable(self.request, TEST_DATA)
res = http.HttpResponse(table.render())
self.assertContains(res, '<tr class="summation"')
self.assertNotContains(res, '<td>3.0</td>')
self.assertNotContains(res, '<td>6</td>')
def test_table_action_attributes(self):
table = MyTable(self.request, TEST_DATA)
self.assertTrue(table.has_actions)
self.assertTrue(table.needs_form_wrapper)
res = http.HttpResponse(table.render())
self.assertContains(res, "<form")
table = MyTable(self.request, TEST_DATA, needs_form_wrapper=False)
self.assertTrue(table.has_actions)
self.assertFalse(table.needs_form_wrapper)
res = http.HttpResponse(table.render())
self.assertNotContains(res, "<form")
table = NoActionsTable(self.request, TEST_DATA)
self.assertFalse(table.has_actions)
self.assertFalse(table.needs_form_wrapper)
res = http.HttpResponse(table.render())
self.assertNotContains(res, "<form")
def test_table_actions_not_allowed_hide_multiselect(self):
table = DisabledActionsTable(self.request, TEST_DATA)
self.assertFalse(table.has_actions)
self.assertFalse(table.needs_form_wrapper)
res = http.HttpResponse(table.render())
self.assertContains(res, "multi_select_column hidden")
def test_table_action_object_display_is_id(self):
action_string = "my_table__toggle__1"
req = self.factory.post('/my_url/', {'action': action_string})
self.table = MyTable(req, TEST_DATA)
self.mox.StubOutWithMock(self.table, 'get_object_display')
self.table.get_object_display(IsA(FakeObject)).AndReturn(None)
self.mox.ReplayAll()
self.assertEqual(('my_table', 'toggle', '1'),
self.table.parse_action(action_string))
handled = self.table.maybe_handle()
self.assertEqual(302, handled.status_code)
self.assertEqual("/my_url/", handled["location"])
self.assertEqual(u"Downed Item: 1",
list(req._messages)[0].message)
def test_table_column_can_be_selected(self):
self.table = MyTableSelectable(self.request, TEST_DATA_6)
# non selectable row
row = self.table.get_rows()[0]
# selectable
row1 = self.table.get_rows()[1]
row2 = self.table.get_rows()[2]
id_col = self.table.columns['id']
name_col = self.table.columns['name']
value_col = self.table.columns['value']
# transform
self.assertEqual('1', row.cells['id'].data) # Standard attr access
self.assertEqual('custom object_1', row.cells['name'].data) # Callable
# name and verbose_name
self.assertEqual("Id", unicode(id_col))
self.assertEqual("Verbose Name", unicode(name_col))
self.assertIn("sortable", name_col.get_final_attrs().get('class', ""))
# hidden
self.assertEqual(True, id_col.hidden)
self.assertIn("hide", id_col.get_final_attrs().get('class', ""))
self.assertEqual(False, name_col.hidden)
self.assertNotIn("hide", name_col.get_final_attrs().get('class', ""))
# link, link_classes, link_attrs and get_link_url
self.assertIn('href="http://example.com/"', row.cells['value'].value)
self.assertIn('class="link-modal"', row.cells['value'].value)
self.assertIn('data-type="modal dialog"', row.cells['value'].value)
self.assertIn('data-tip="click for dialog"', row.cells['value'].value)
self.assertIn('href="/auth/login/"', row.cells['status'].value)
# classes
self.assertEqual("green blue sortable anchor normal_column",
value_col.get_final_attrs().get('class', ""))
self.assertQuerysetEqual(row.get_cells(),
['<Cell: multi_select, my_table__row__1>',
'<Cell: id, my_table__row__1>',
'<Cell: name, my_table__row__1>',
'<Cell: value, my_table__row__1>',
'<Cell: status, my_table__row__1>',
])
# can_be_selected = False
self.assertTrue(row.get_cells()[0].data == "")
# can_be_selected = True
self.assertIn('checkbox', row1.get_cells()[0].data)
# status
cell_status = row.cells['status'].status
self.assertEqual('status_down',
row.cells['status'].get_status_class(cell_status))
self.assertEqual(row.cells['status'].data, 'down')
self.assertEqual(row.cells['status'].attrs,
{'title': 'service is not available',
'style': 'color:red;cursor:pointer'})
self.assertEqual(row1.cells['status'].data, 'up')
self.assertEqual(row1.cells['status'].attrs,
{'title': 'service is up and running',
'style': 'color:green;cursor:pointer'})
self.assertEqual(row2.cells['status'].data, 'standby')
self.assertEqual(row2.cells['status'].attrs, {})
status_rendered = row.cells['status'].render()
resp = http.HttpResponse(status_rendered)
self.assertContains(resp, 'style="color:red;cursor:pointer"', 1)
self.assertContains(resp, 'title="service is not available"', 1)
status_rendered = row1.cells['status'].render()
resp = http.HttpResponse(status_rendered)
self.assertContains(resp, 'style="color:green;cursor:pointer"', 1)
self.assertContains(resp, 'title="service is up and running"', 1)
# status_choices
id_col.status = True
id_col.status_choices = (('1', False), ('2', True))
cell_status = row.cells['id'].status
self.assertEqual(False, cell_status)
self.assertEqual('status_down',
row.cells['id'].get_status_class(cell_status))
# Ensure data is not cached on the column across table instances
self.table = MyTable(self.request, TEST_DATA_6)
row = self.table.get_rows()[0]
self.assertTrue("down" in row.cells['status'].value)
def test_broken_filter(self):
class MyTableBrokenFilter(MyTable):
value = tables.Column('value',
filters=(defaultfilters.timesince,))
value = "not_a_date"
data = TEST_DATA[0]
data.value = value
table = MyTableBrokenFilter(self.request, [data])
resp = http.HttpResponse(table.render())
self.assertContains(resp, value)
class SingleTableView(table_views.DataTableView):
table_class = MyTable
name = "Single Table"
slug = "single"
template_name = "horizon/common/_detail_table.html"
def get_data(self):
return TEST_DATA
class APIFilterTableView(SingleTableView):
table_class = MyServerFilterTable
class TableWithPermissions(tables.DataTable):
id = tables.Column('id')
class Meta(object):
name = "table_with_permissions"
permissions = ('horizon.test',)
class SingleTableViewWithPermissions(SingleTableView):
table_class = TableWithPermissions
class MultiTableView(tables.MultiTableView):
table_classes = (TableWithPermissions, MyTable)
def get_table_with_permissions_data(self):
return TEST_DATA
def get_my_table_data(self):
return TEST_DATA
class DataTableViewTests(test.TestCase):
def _prepare_view(self, cls, *args, **kwargs):
req = self.factory.get('/my_url/')
req.user = self.user
view = cls()
view.request = req
view.args = args
view.kwargs = kwargs
return view
def test_data_table_view(self):
view = self._prepare_view(SingleTableView)
context = view.get_context_data()
self.assertEqual(SingleTableView.table_class,
context['table'].__class__)
def test_data_table_view_not_authorized(self):
view = self._prepare_view(SingleTableViewWithPermissions)
context = view.get_context_data()
self.assertNotIn('table', context)
def test_data_table_view_authorized(self):
view = self._prepare_view(SingleTableViewWithPermissions)
self.set_permissions(permissions=['test'])
context = view.get_context_data()
self.assertIn('table', context)
self.assertEqual(SingleTableViewWithPermissions.table_class,
context['table'].__class__)
def test_multi_table_view_not_authorized(self):
view = self._prepare_view(MultiTableView)
context = view.get_context_data()
self.assertEqual(MyTable, context['my_table_table'].__class__)
self.assertNotIn('table_with_permissions_table', context)
def test_multi_table_view_authorized(self):
view = self._prepare_view(MultiTableView)
self.set_permissions(permissions=['test'])
context = view.get_context_data()
self.assertEqual(MyTable, context['my_table_table'].__class__)
self.assertEqual(TableWithPermissions,
context['table_with_permissions_table'].__class__)
fil_value_param = "my_table__filter__q"
fil_field_param = '%s_field' % fil_value_param
def _test_filter_setup_view(self, request):
view = APIFilterTableView()
view.request = request
view.kwargs = {}
view.handle_server_filter(request)
return view
def test_api_filter_table_view(self):
req = self.factory.post('/my_url/', {self.fil_value_param: 'up',
self.fil_field_param: 'status'})
req.user = self.user
view = self._test_filter_setup_view(req)
data = view.get_data()
context = view.get_context_data()
self.assertEqual(context['table'].__class__, MyServerFilterTable)
self.assertQuerysetEqual(data,
['<FakeObject: object_1>',
'<FakeObject: object_2>',
'<FakeObject: object_3>'])
self.assertEqual(req.session.get(self.fil_value_param), 'up')
self.assertEqual(req.session.get(self.fil_field_param), 'status')
def test_filter_changed_deleted(self):
req = self.factory.post('/my_url/', {self.fil_value_param: '',
self.fil_field_param: 'status'})
req.session[self.fil_value_param] = 'up'
req.session[self.fil_field_param] = 'status'
req.user = self.user
view = self._test_filter_setup_view(req)
context = view.get_context_data()
self.assertEqual(context['table'].__class__, MyServerFilterTable)
self.assertEqual(req.session.get(self.fil_value_param), '')
self.assertEqual(req.session.get(self.fil_field_param), 'status')
def test_filter_changed_nothing_sent(self):
req = self.factory.post('/my_url/', {})
req.session[self.fil_value_param] = 'up'
req.session[self.fil_field_param] = 'status'
req.user = self.user
view = self._test_filter_setup_view(req)
context = view.get_context_data()
self.assertEqual(context['table'].__class__, MyServerFilterTable)
self.assertEqual(req.session.get(self.fil_value_param), 'up')
self.assertEqual(req.session.get(self.fil_field_param), 'status')
def test_filter_changed_new_filter_sent(self):
req = self.factory.post('/my_url/', {self.fil_value_param: 'down',
self.fil_field_param: 'status'})
req.session[self.fil_value_param] = 'up'
req.session[self.fil_field_param] = 'status'
req.user = self.user
view = self._test_filter_setup_view(req)
context = view.get_context_data()
self.assertEqual(context['table'].__class__, MyServerFilterTable)
self.assertEqual(req.session.get(self.fil_value_param), 'down')
self.assertEqual(req.session.get(self.fil_field_param), 'status')
class FormsetTableTests(test.TestCase):
def test_populate(self):
"""Create a FormsetDataTable and populate it with data."""
class TableForm(forms.Form):
name = forms.CharField()
value = forms.IntegerField()
TableFormset = forms.formsets.formset_factory(TableForm, extra=0)
class Table(table_formset.FormsetDataTable):
formset_class = TableFormset
name = tables.Column('name')
value = tables.Column('value')
class Meta(object):
name = 'table'
table = Table(self.request)
table.data = TEST_DATA_4
formset = table.get_formset()
self.assertEqual(2, len(formset))
form = formset[0]
form_data = form.initial
self.assertEqual('object_1', form_data['name'])
self.assertEqual(2, form_data['value'])
| watonyweng/horizon | horizon/test/tests/tables.py | Python | apache-2.0 | 64,222 | 0 |
# -*- coding: utf-8 -*-
#
# Copyright 2015 Telefonica Investigación y Desarrollo, S.A.U
#
# This file is part of fiware-cygnus (FI-WARE project).
#
# fiware-cygnus is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General
# Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any
# later version.
# fiware-cygnus is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along with fiware-cygnus. If not, see
# http://www.gnu.org/licenses/.
#
# For those usages not covered by the GNU Affero General Public License please contact:
# iot_support at tid.es
#
__author__ = 'Iván Arias León (ivan.ariasleon at telefonica dot com)'
from integration.notifications.common_steps.multi_instances import * # steps to multi-instances
from integration.notifications.common_steps.configuration import * # steps to pre-configurations
from integration.notifications.common_steps.notifications import * # steps to notifications
from integration.notifications.common_steps.grouping_rules import * # steps to grouping rules
# ----------------------------------- COMMON STEPS ------------------------------------
# ---------------------------- configuration.py --------------------------------------
# @step (u'copy properties.json file from "([^"]*)" to test "([^"]*)" and sudo local "([^"]*)"')
# @step (u'configuration of cygnus instances with different ports "([^"]*)", agents files quantity "([^"]*)", id "([^"]*)" and in "([^"]*)" mode')
# @step (u'copy flume-env.sh, grouping rules file from "([^"]*)", log4j.properties, krb5.conf and restart cygnus service. This execution is only once "([^"]*)"')
# @step (u'verify if cygnus is installed correctly')
# @step (u'reinitialize log file')
# @step (u'check in log, label "([^"]*)" and text "([^"]*)"')
# @step (u'delete grouping rules file')
# --------------------------- notifications.py ------------------------------------
# @step (u'service "([^"]*)", service path "([^"]*)", entity type "([^"]*)", entity id "([^"]*)", with attribute number "([^"]*)", attribute name "([^"]*)" and attribute type "([^"]*)"')
# @step(u'receives a notification with attributes value "([^"]*)", metadata value "([^"]*)" and content "([^"]*)"')
# @step (u'receives "([^"]*)" notifications with consecutive values beginning with "([^"]*)" and with one step')
# @step (u'receives multiples notifications one by instance and the port defined incremented with attributes value "([^"]*)", metadata value "([^"]*)" and content "([^"]*)"')
# @step(u'receive an "([^"]*)" http code')
# --------------------------- grouping_rules.py -----------------------------------
# @step (u'update real values in resource "([^"]*)" and service path "([^"]*)" to notification request')
# @step (u'changes new destination "([^"]*)" where to verify in dataset "([^"]*)"')
# --------------------------- multi_instances.py ----------------------------------
# @step (u'delete instances files')
#----------------------------------------------------------------------------------
#----------------------------------------------------------------------------------
@step(u'verify if mysql is installed correctly')
def mysql_is_installed_correctly(step):
"""
verify that Mysql is installed correctly, version is controlled
:param step:
"""
world.mysql.connect()
world.mysql.verify_version()
@step(u'Close mysql connection')
def close_mysql_connection(step):
"""
Close mysql connection
:param step:
"""
world.cygnus.close_connection()
@step (u'create a new database and a table with attribute data type "([^"]*)" and metadata data type "([^"]*)"')
def create_a_new_table_with_service_attributes_attribute_type_attribute_data_type_and_metadata_data_type (step, attribute_data_type, metadata_data_type):
"""
create a new Database and a new table per column mode
:param step:
:param attribute_data_type:
:param metadata_data_type:
"""
world.cygnus.create_database()
world.cygnus.create_table (attribute_data_type, metadata_data_type)
# ------------------------------------------------------------------------------------------------------------------
@step (u'Verify that the attribute value is stored in mysql')
def verify_that_the_attribute_value_is_stored_in_mysql(step):
"""
Validate that the attribute value and type are stored in mysql per column
:param step:
"""
world.cygnus.verify_table_search_values_by_column()
@step (u'Verify the metadatas are stored in mysql')
def verify_the_metadatas_are_stored_in_mysql(step):
"""
Validate that the attribute metadata is stored in mysql per column
:param step:
"""
world.cygnus.verify_table_search_metadatas_values_by_column()
@step (u'Verify that is not stored in mysql "([^"]*)"')
def verify_that_is_not_stored_in_mysql (step, error_msg):
"""
Verify that is not stored in mysql
:param step:
:param error_msg:
"""
world.cygnus.verify_table_search_without_data (error_msg)
@step (u'Validate that the attribute value, metadata "([^"]*)" and type are stored in mysql')
def validate_that_the_attribute_value_and_type_are_stored_in_mysql (step, metadata):
"""
Validate that the attributes values and type are stored in mysql per row mode
:param step:
"""
world.cygnus.verify_table_search_values_by_row(metadata)
#----------------------------------------------------------------------------------
| jmcanterafonseca/fiware-cygnus | test/acceptance/integration/notifications/mysql/steps.py | Python | agpl-3.0 | 5,768 | 0.009193 |
from scipy.io.wavfile import read
import matplotlib.pyplot as plt
from pylab import *
import PIL
from PIL import Image, ImageOps
import wave, struct, sys
import glob, os
for file in os.listdir("./"):
if file.endswith(".wav"):
print(file)
outputfile = file[:-4] + '.png'
input_data = read(file)
audio = input_data[1]
fig=figure()
ax=fig.add_axes((0,0,1,1))
ax.set_axis_off()
ax.plot(audio[0:600]/1000.0, color="black")
fig.savefig(outputfile)
img = Image.open(outputfile)
img = img.resize((100, 40), PIL.Image.ANTIALIAS)
img = ImageOps.expand(img,border=1,fill='black')
img.save(outputfile)
# plt.axis('off')
# plt.plot(audio[0:600]/1000.0)
# #plt.show()
# plt.savefig('foo.png') | sinneb/pyo-patcher | webroot/transformer.py | Python | mit | 791 | 0.012642 |
from roetsjbaan.migrator import *
from roetsjbaan.versioner import *
| mivdnber/roetsjbaan | roetsjbaan/__init__.py | Python | mit | 69 | 0 |
#-*- coding: utf-8 -*-
#!/usr/bin/env python
"""
Flask-Mysqlpool
-----------
Adds support to flask to connect to a MySQL server using
mysqldb extension and a connection pool.
"""
from setuptools import setup
setup(
name='Flask-Mysqlpool',
version='0.1',
url='',
license='BSD',
author='Giorgos Komninos',
author_email='[email protected]',
description='Flask simple mysql client using a connection pool',
long_description=__doc__,
packages=[
'flask_mysqlpool',
],
zip_safe=False,
platforms='any',
install_requires=[
'Flask',
'mysql-python',
],
test_suite='test_mysqlpool',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
| gosom/flask-mysqlpool | setup.py | Python | bsd-3-clause | 1,227 | 0.002445 |
# -*- coding:utf-8 -*-
"""
Definition of physical dimensions.
Unit systems will be constructed on top of these dimensions.
Most of the examples in the doc use MKS system and are presented from the
computer point of view: from a human point, adding length to time is not legal
in MKS but it is in natural system; for a computer in natural system there is
no time dimension (but a velocity dimension instead) - in the basis - so the
question of adding time to length has no meaning.
"""
from __future__ import division
from copy import copy
import numbers
from sympy.core.compatibility import reduce
from sympy.core.containers import Tuple, Dict
from sympy import sympify, nsimplify, Number, Integer, Matrix, Expr
class Dimension(Expr):
"""
This class represent the dimension of a physical quantities.
The dimensions may have a name and a symbol. All other
arguments are dimensional powers. They represent a characteristic of a
quantity, giving an interpretation to it: for example (in classical
mechanics) we know that time is different from temperature, and dimensions
make this difference (but they do not provide any measure of these
quantites).
>>> from sympy.physics.unitsystems.dimensions import Dimension
>>> length = Dimension(length=1)
>>> length
{'length': 1}
>>> time = Dimension(time=1)
Dimensions behave like a dictionary where the key is the name and the value
corresponds to the exponent.
Dimensions can be composed using multiplication, division and
exponentiation (by a number) to give new dimensions. Addition and
subtraction is defined only when the two objects are the same dimension.
>>> velocity = length.div(time)
>>> velocity #doctest: +SKIP
{'length': 1, 'time': -1}
>>> length.add(length)
{'length': 1}
>>> length.pow(2)
{'length': 2}
Defining addition-like operations will help when doing dimensional analysis.
Note that two dimensions are equal if they have the same powers, even if
their names and/or symbols differ.
>>> Dimension(length=1) == Dimension(length=1, name="length")
True
>>> Dimension(length=1) == Dimension(length=1, symbol="L")
True
>>> Dimension(length=1) == Dimension(length=1, name="length",
... symbol="L")
True
"""
is_commutative = True
is_number = False
# make sqrt(M**2) --> M
is_positive = True
def __new__(cls, *args, **kwargs):
"""
Create a new dimension.
Possibilities are (examples given with list/tuple work also with
tuple/list):
>>> from sympy.physics.unitsystems.dimensions import Dimension
>>> Dimension(length=1)
{'length': 1}
>>> Dimension({"length": 1})
{'length': 1}
>>> Dimension([("length", 1), ("time", -1)]) #doctest: +SKIP
{'length': 1, 'time': -1}
"""
# before setting the dict, check if a name and/or a symbol are defined
# if so, remove them from the dict
name = kwargs.pop('name', None)
symbol = kwargs.pop('symbol', None)
# pairs of (dimension, power)
pairs = []
# add first items from args to the pairs
for arg in args:
# construction with {"length": 1}
if isinstance(arg, dict):
arg = copy(arg)
pairs.extend(arg.items())
elif isinstance(arg, (Tuple, tuple, list)):
#TODO: add construction with ("length", 1); not trivial because
# e.g. [("length", 1), ("time", -1)] has also length = 2
for p in arg:
#TODO: check that p is a tuple
if len(p) != 2:
raise ValueError("Length of iterable has to be 2; "
"'%d' found" % len(p))
# construction with [("length", 1), ...]
pairs.extend(arg)
else:
# error if the arg is not of previous types
raise TypeError("Positional arguments can only be: "
"dict, tuple, list; '%s' found" % type(arg))
pairs.extend(kwargs.items())
# check validity of dimension key and power
for pair in pairs:
#if not isinstance(p[0], str):
# raise TypeError("key %s is not a string." % p[0])
if not isinstance(pair[1], (numbers.Real, Number)):
raise TypeError("Power corresponding to '%s' is not a number"
% pair[0])
# filter dimensions set to zero; this avoid the following odd result:
# Dimension(length=1) == Dimension(length=1, mass=0) => False
# also simplify to avoid powers such as 2.00000
pairs = [(pair[0], nsimplify(pair[1])) for pair in pairs
if pair[1] != 0]
pairs.sort(key=str)
new = Expr.__new__(cls, Dict(*pairs))
new.name = name
new.symbol = symbol
new._dict = dict(pairs)
return new
def __getitem__(self, key):
"""x.__getitem__(y) <==> x[y]"""
return self._dict[key]
def __setitem__(self, key, value):
raise NotImplementedError("Dimension are Immutable")
def items(self):
"""D.items() -> list of D's (key, value) pairs, as 2-tuples"""
return self._dict.items()
def keys(self):
"""D.keys() -> list of D's keys"""
return self._dict.keys()
def values(self):
"""D.values() -> list of D's values"""
return self._dict.values()
def __iter__(self):
"""x.__iter__() <==> iter(x)"""
return iter(self._dict)
def __len__(self):
"""x.__len__() <==> len(x)"""
return self._dict.__len__()
def get(self, key, default=None):
"""D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None."""
return self._dict.get(key, default)
def __contains__(self, key):
"""D.__contains__(k) -> True if D has a key k, else False"""
return key in self._dict
def __lt__(self, other):
return self.args < other.args
def __str__(self):
"""
Display the string representation of the dimension.
Usually one will always use a symbol to denote the dimension. If no
symbol is defined then it uses the name or, if there is no name, the
default dict representation.
We do *not* want to use the dimension system to find the string
representation of a dimension because it would imply some magic in
order to guess the "best" form. It is better to do as if we do not
have a system, and then to design a specific function to take it into
account.
"""
if self.symbol is not None:
return self.symbol
elif self.name is not None:
return self.name
else:
return repr(self)
def __repr__(self):
return repr(self._dict)
def __neg__(self):
return self
def add(self, other):
"""
Define the addition for Dimension.
Addition of dimension has a sense only if the second object is the same
dimension (we don't add length to time).
"""
if not isinstance(other, Dimension):
raise TypeError("Only dimension can be added; '%s' is not valid"
% type(other))
elif isinstance(other, Dimension) and self != other:
raise ValueError("Only dimension which are equal can be added; "
"'%s' and '%s' are different" % (self, other))
return self
def sub(self, other):
# there is no notion of ordering (or magnitude) among dimension,
# subtraction is equivalent to addition when the operation is legal
return self.add(other)
def pow(self, other):
#TODO: be sure that it works with rational numbers (e.g. when dealing
# with dimension under a fraction)
#TODO: allow exponentiation by an abstract symbol x
# (if x.is_number is True)
# this would be a step toward using solve and absract powers
other = sympify(other)
if isinstance(other, (numbers.Real, Number)):
return Dimension([(x, y*other) for x, y in self.items()])
else:
raise TypeError("Dimensions can be exponentiated only with "
"numbers; '%s' is not valid" % type(other))
def mul(self, other):
if not isinstance(other, Dimension):
#TODO: improve to not raise error: 2*L could be a legal operation
# (the same comment apply for __div__)
raise TypeError("Only dimension can be multiplied; '%s' is not "
"valid" % type(other))
d = dict(self)
for key in other:
try:
d[key] += other[key]
except KeyError:
d[key] = other[key]
d = Dimension(d)
return d
def div(self, other):
if not isinstance(other, Dimension):
raise TypeError("Only dimension can be divided; '%s' is not valid"
% type(other))
d = dict(self)
for key in other:
try:
d[key] -= other[key]
except KeyError:
d[key] = -other[key]
d = Dimension(d)
return d
def rdiv(self, other):
return other * pow(self, -1)
@property
def is_dimensionless(self):
"""
Check if the dimension object really has a dimension.
A dimension should have at least one component with non-zero power.
"""
for key in self:
if self[key] != 0:
return False
else:
return True
@property
def has_integer_powers(self):
"""
Check if the dimension object has only integer powers.
All the dimension powers should be integers, but rational powers may
appear in intermediate steps. This method may be used to check that the
final result is well-defined.
"""
for key in self:
if not isinstance(self[key], Integer):
return False
else:
return True
class DimensionSystem(object):
"""
DimensionSystem represents a coherent set of dimensions.
In a system dimensions are of three types:
- base dimensions;
- derived dimensions: these are defined in terms of the base dimensions
(for example velocity is defined from the division of length by time);
- canonical dimensions: these are used to define systems because one has
to start somewhere: we can not build ex nihilo a system (see the
discussion in the documentation for more details).
All intermediate computations will use the canonical basis, but at the end
one can choose to print result in some other basis.
In a system dimensions can be represented as a vector, where the components
represent the powers associated to each base dimension.
"""
def __init__(self, base, dims=(), name="", descr=""):
"""
Initialize the dimension system.
It is important that base units have a name or a symbol such that
one can sort them in a unique way to define the vector basis.
"""
self.name = name
self.descr = descr
if (None, None) in [(d.name, d.symbol) for d in base]:
raise ValueError("Base dimensions must have a symbol or a name")
self._base_dims = self.sort_dims(base)
# base is first such that named dimension are keeped
self._dims = tuple(set(base) | set(dims))
self._can_transf_matrix = None
self._list_can_dims = None
if self.is_consistent is False:
raise ValueError("The system with basis '%s' is not consistent"
% str(self._base_dims))
def __str__(self):
"""
Return the name of the system.
If it does not exist, then it makes a list of symbols (or names) of
the base dimensions.
"""
if self.name != "":
return self.name
else:
return "(%s)" % ", ".join(str(d) for d in self._base_dims)
def __repr__(self):
return "<DimensionSystem: %s>" % repr(self._base_dims)
def __getitem__(self, key):
"""
Shortcut to the get_dim method, using key access.
"""
d = self.get_dim(key)
#TODO: really want to raise an error?
if d is None:
raise KeyError(key)
return d
def __call__(self, unit):
"""
Wrapper to the method print_dim_base
"""
return self.print_dim_base(unit)
def get_dim(self, dim):
"""
Find a specific dimension which is part of the system.
dim can be a string or a dimension object. If no dimension is found,
then return None.
"""
#TODO: if the argument is a list, return a list of all matching dims
found_dim = None
#TODO: use copy instead of direct assignment for found_dim?
if isinstance(dim, str):
for d in self._dims:
if dim in (d.name, d.symbol):
found_dim = d
break
elif isinstance(dim, Dimension):
try:
i = self._dims.index(dim)
found_dim = self._dims[i]
except ValueError:
pass
return found_dim
def extend(self, base, dims=(), name='', description=''):
"""
Extend the current system into a new one.
Take the base and normal units of the current system to merge
them to the base and normal units given in argument.
If not provided, name and description are overriden by empty strings.
"""
base = self._base_dims + tuple(base)
dims = self._dims + tuple(dims)
return DimensionSystem(base, dims, name, description)
@staticmethod
def sort_dims(dims):
"""
Sort dimensions given in argument using their str function.
This function will ensure that we get always the same tuple for a given
set of dimensions.
"""
return tuple(sorted(dims, key=str))
@property
def list_can_dims(self):
"""
List all canonical dimension names.
"""
if self._list_can_dims is None:
gen = reduce(lambda x, y: x.mul(y), self._base_dims)
self._list_can_dims = tuple(sorted(map(str, gen.keys())))
return self._list_can_dims
@property
def inv_can_transf_matrix(self):
"""
Compute the inverse transformation matrix from the base to the
canonical dimension basis.
It corresponds to the matrix where columns are the vector of base
dimensions in canonical basis.
This matrix will almost never be used because dimensions are always
define with respect to the canonical basis, so no work has to be done
to get them in this basis. Nonetheless if this matrix is not square
(or not invertible) it means that we have chosen a bad basis.
"""
matrix = reduce(lambda x, y: x.row_join(y),
[self.dim_can_vector(d) for d in self._base_dims])
return matrix
@property
def can_transf_matrix(self):
"""
Compute the canonical transformation matrix from the canonical to the
base dimension basis.
It is the inverse of the matrix computed with inv_can_transf_matrix().
"""
#TODO: the inversion will fail if the system is inconsistent, for
# example if the matrix is not a square
if self._can_transf_matrix is None:
self._can_transf_matrix = reduce(lambda x, y: x.row_join(y),
[self.dim_can_vector(d)
for d in self._base_dims]).inv()
return self._can_transf_matrix
def dim_can_vector(self, dim):
"""
Vector representation in terms of the canonical base dimensions.
"""
vec = []
for d in self.list_can_dims:
vec.append(dim.get(d, 0))
return Matrix(vec)
def dim_vector(self, dim):
"""
Vector representation in terms of the base dimensions.
"""
return self.can_transf_matrix * self.dim_can_vector(dim)
def print_dim_base(self, dim):
"""
Give the string expression of a dimension in term of the basis.
Dimensions are displayed by decreasing power.
"""
res = ""
for (d, p) in sorted(zip(self._base_dims, self.dim_vector(dim)),
key=lambda x: x[1], reverse=True):
if p == 0:
continue
elif p == 1:
res += "%s " % str(d)
else:
res += "%s^%d " % (str(d), p)
return res.strip()
@property
def dim(self):
"""
Give the dimension of the system.
That is return the number of dimensions forming the basis.
"""
return len(self._base_dims)
@property
def is_consistent(self):
"""
Check if the system is well defined.
"""
# not enough or too many base dimensions compared to independent
# dimensions
# in vector language: the set of vectors do not form a basis
if self.inv_can_transf_matrix.is_square is False:
return False
return True
| wxgeo/geophar | wxgeometrie/sympy/physics/unitsystems/dimensions.py | Python | gpl-2.0 | 17,933 | 0.000725 |
from __future__ import unicode_literals
import cgi
import codecs
import logging
import re
import sys
from io import BytesIO
from django import http
from django.conf import settings
from django.core import signals
from django.core.handlers import base
from django.urls import set_script_prefix
from django.utils import six
from django.utils.encoding import force_str, force_text
from django.utils.functional import cached_property
logger = logging.getLogger('django.request')
# encode() and decode() expect the charset to be a native string.
ISO_8859_1, UTF_8 = str('iso-8859-1'), str('utf-8')
_slashes_re = re.compile(br'/+')
class LimitedStream(object):
'''
LimitedStream wraps another stream in order to not allow reading from it
past specified amount of bytes.
'''
def __init__(self, stream, limit, buf_size=64 * 1024 * 1024):
self.stream = stream
self.remaining = limit
self.buffer = b''
self.buf_size = buf_size
def _read_limited(self, size=None):
if size is None or size > self.remaining:
size = self.remaining
if size == 0:
return b''
result = self.stream.read(size)
self.remaining -= len(result)
return result
def read(self, size=None):
if size is None:
result = self.buffer + self._read_limited()
self.buffer = b''
elif size < len(self.buffer):
result = self.buffer[:size]
self.buffer = self.buffer[size:]
else: # size >= len(self.buffer)
result = self.buffer + self._read_limited(size - len(self.buffer))
self.buffer = b''
return result
def readline(self, size=None):
while b'\n' not in self.buffer and \
(size is None or len(self.buffer) < size):
if size:
# since size is not None here, len(self.buffer) < size
chunk = self._read_limited(size - len(self.buffer))
else:
chunk = self._read_limited()
if not chunk:
break
self.buffer += chunk
sio = BytesIO(self.buffer)
if size:
line = sio.readline(size)
else:
line = sio.readline()
self.buffer = sio.read()
return line
class WSGIRequest(http.HttpRequest):
def __init__(self, environ):
script_name = get_script_name(environ)
path_info = get_path_info(environ)
if not path_info:
# Sometimes PATH_INFO exists, but is empty (e.g. accessing
# the SCRIPT_NAME URL without a trailing slash). We really need to
# operate as if they'd requested '/'. Not amazingly nice to force
# the path like this, but should be harmless.
path_info = '/'
self.environ = environ
self.path_info = path_info
# be careful to only replace the first slash in the path because of
# http://test/something and http://test//something being different as
# stated in http://www.ietf.org/rfc/rfc2396.txt
self.path = '%s/%s' % (script_name.rstrip('/'),
path_info.replace('/', '', 1))
self.META = environ
self.META['PATH_INFO'] = path_info
self.META['SCRIPT_NAME'] = script_name
self.method = environ['REQUEST_METHOD'].upper()
self.content_type, self.content_params = cgi.parse_header(environ.get('CONTENT_TYPE', ''))
if 'charset' in self.content_params:
try:
codecs.lookup(self.content_params['charset'])
except LookupError:
pass
else:
self.encoding = self.content_params['charset']
self._post_parse_error = False
try:
content_length = int(environ.get('CONTENT_LENGTH'))
except (ValueError, TypeError):
content_length = 0
self._stream = LimitedStream(self.environ['wsgi.input'], content_length)
self._read_started = False
self.resolver_match = None
def _get_scheme(self):
return self.environ.get('wsgi.url_scheme')
@cached_property
def GET(self):
# The WSGI spec says 'QUERY_STRING' may be absent.
raw_query_string = get_bytes_from_wsgi(self.environ, 'QUERY_STRING', '')
return http.QueryDict(raw_query_string, encoding=self._encoding)
def _get_post(self):
if not hasattr(self, '_post'):
self._load_post_and_files()
return self._post
def _set_post(self, post):
self._post = post
@cached_property
def COOKIES(self):
raw_cookie = get_str_from_wsgi(self.environ, 'HTTP_COOKIE', '')
return http.parse_cookie(raw_cookie)
def _get_files(self):
if not hasattr(self, '_files'):
self._load_post_and_files()
return self._files
POST = property(_get_post, _set_post)
FILES = property(_get_files)
class WSGIHandler(base.BaseHandler):
request_class = WSGIRequest
def __init__(self, *args, **kwargs):
super(WSGIHandler, self).__init__(*args, **kwargs)
self.load_middleware()
def __call__(self, environ, start_response):
set_script_prefix(get_script_name(environ))
signals.request_started.send(sender=self.__class__, environ=environ)
try:
request = self.request_class(environ)
except UnicodeDecodeError:
logger.warning(
'Bad Request (UnicodeDecodeError)',
exc_info=sys.exc_info(),
extra={
'status_code': 400,
}
)
response = http.HttpResponseBadRequest()
else:
response = self.get_response(request)
response._handler_class = self.__class__
status = '%s %s' % (response.status_code, response.reason_phrase)
response_headers = [(str(k), str(v)) for k, v in response.items()]
for c in response.cookies.values():
response_headers.append((str('Set-Cookie'), str(c.output(header=''))))
start_response(force_str(status), response_headers)
if getattr(response, 'file_to_stream', None) is not None and environ.get('wsgi.file_wrapper'):
response = environ['wsgi.file_wrapper'](response.file_to_stream)
return response
def get_path_info(environ):
"""
Returns the HTTP request's PATH_INFO as a unicode string.
"""
path_info = get_bytes_from_wsgi(environ, 'PATH_INFO', '/')
return path_info.decode(UTF_8)
def get_script_name(environ):
"""
Returns the equivalent of the HTTP request's SCRIPT_NAME environment
variable. If Apache mod_rewrite has been used, returns what would have been
the script name prior to any rewriting (so it's the script name as seen
from the client's perspective), unless the FORCE_SCRIPT_NAME setting is
set (to anything).
"""
if settings.FORCE_SCRIPT_NAME is not None:
return force_text(settings.FORCE_SCRIPT_NAME)
# If Apache's mod_rewrite had a whack at the URL, Apache set either
# SCRIPT_URL or REDIRECT_URL to the full resource URL before applying any
# rewrites. Unfortunately not every Web server (lighttpd!) passes this
# information through all the time, so FORCE_SCRIPT_NAME, above, is still
# needed.
script_url = get_bytes_from_wsgi(environ, 'SCRIPT_URL', '')
if not script_url:
script_url = get_bytes_from_wsgi(environ, 'REDIRECT_URL', '')
if script_url:
if b'//' in script_url:
# mod_wsgi squashes multiple successive slashes in PATH_INFO,
# do the same with script_url before manipulating paths (#17133).
script_url = _slashes_re.sub(b'/', script_url)
path_info = get_bytes_from_wsgi(environ, 'PATH_INFO', '')
script_name = script_url[:-len(path_info)] if path_info else script_url
else:
script_name = get_bytes_from_wsgi(environ, 'SCRIPT_NAME', '')
return script_name.decode(UTF_8)
def get_bytes_from_wsgi(environ, key, default):
"""
Get a value from the WSGI environ dictionary as bytes.
key and default should be str objects. Under Python 2 they may also be
unicode objects provided they only contain ASCII characters.
"""
value = environ.get(str(key), str(default))
# Under Python 3, non-ASCII values in the WSGI environ are arbitrarily
# decoded with ISO-8859-1. This is wrong for Django websites where UTF-8
# is the default. Re-encode to recover the original bytestring.
return value.encode(ISO_8859_1) if six.PY3 else value
def get_str_from_wsgi(environ, key, default):
"""
Get a value from the WSGI environ dictionary as str.
key and default should be str objects. Under Python 2 they may also be
unicode objects provided they only contain ASCII characters.
"""
value = get_bytes_from_wsgi(environ, key, default)
return value.decode(UTF_8, errors='replace') if six.PY3 else value
| filias/django | django/core/handlers/wsgi.py | Python | bsd-3-clause | 9,048 | 0.000553 |
SEQUENCE = [
'localsite_public',
'localsite_extra_data',
]
| reviewboard/reviewboard | reviewboard/site/evolutions/__init__.py | Python | mit | 67 | 0 |
# import sys
# sys.path.append('/home/openflow/frenetic/updates/examples')
from nxtopo import NetworkXTopo
from mininet.topo import Node
import networkx as nx
class MyTopo( NetworkXTopo ):
def __init__( self, enable_all = True ):
comp_graph = nx.complete_graph(32)
graph = nx.Graph()
for node in comp_graph:
graph.add_node(node+1)
for edge in comp_graph.edges():
(src,dst) = edge
graph.add_edge(src+1,dst+1)
host_location = {}
for host in range(1,graph.order()+1):
host_location[host+graph.order()] = (host, 4)
super( MyTopo, self ).__init__(graph, host_location)
topos = { 'mytopo': ( lambda: MyTopo() ) }
| XianliangJ/collections | CNUpdates/updates/examples/hypercube.py | Python | gpl-3.0 | 753 | 0.027888 |
#!/usr/bin/env python
from pkg_resources import require
from . import defer
__version__ = '1.1.3'
| SentimensRG/txCelery | txcelery/__init__.py | Python | mit | 100 | 0 |
"""General client side utilities.
This module contains utility functions, used primarily by advanced COM
programmers, or other COM modules.
"""
import pythoncom
from win32com.client import Dispatch, _get_good_object_
PyIDispatchType = pythoncom.TypeIIDs[pythoncom.IID_IDispatch]
def WrapEnum(ob, resultCLSID = None):
"""Wrap an object in a VARIANT enumerator.
All VT_DISPATCHs returned by the enumerator are converted to wrapper objects
(which may be either a class instance, or a dynamic.Dispatch type object).
"""
if type(ob) != pythoncom.TypeIIDs[pythoncom.IID_IEnumVARIANT]:
ob = ob.QueryInterface(pythoncom.IID_IEnumVARIANT)
return EnumVARIANT(ob, resultCLSID)
class Enumerator:
"""A class that provides indexed access into an Enumerator
By wrapping a PyIEnum* object in this class, you can perform
natural looping and indexing into the Enumerator.
Looping is very efficient, but it should be noted that although random
access is supported, the underlying object is still an enumerator, so
this will force many reset-and-seek operations to find the requested index.
"""
def __init__(self, enum):
self._oleobj_ = enum # a PyIEnumVARIANT
self.index = -1
def __getitem__(self, index):
return self.__GetIndex(index)
def __call__(self, index):
return self.__GetIndex(index)
def __GetIndex(self, index):
if type(index)!=type(0): raise TypeError("Only integer indexes are supported for enumerators")
# NOTE
# In this context, self.index is users purely as a flag to say
# "am I still in sequence". The user may call Next() or Reset() if they
# so choose, in which case self.index will not be correct (although we
# still want to stay in sequence)
if index != self.index + 1:
# Index requested out of sequence.
self._oleobj_.Reset()
if index: self._oleobj_.Skip(index) # if asked for item 1, must skip 1, Python always zero based.
self.index = index
result = self._oleobj_.Next(1)
if len(result):
return self._make_retval_(result[0])
raise IndexError("list index out of range")
def Next(self, count=1):
ret = self._oleobj_.Next(count)
realRets = []
for r in ret:
realRets.append(self._make_retval_(r))
return tuple(realRets) # Convert back to tuple.
def Reset(self):
return self._oleobj_.Reset()
def Clone(self):
return self.__class__( self._oleobj_.Clone(), self.resultCLSID)
def _make_retval_(self, result):
return result
class EnumVARIANT(Enumerator):
def __init__(self, enum, resultCLSID = None):
self.resultCLSID = resultCLSID
Enumerator.__init__(self, enum)
def _make_retval_(self, result):
return _get_good_object_(result, resultCLSID = self.resultCLSID)
class Iterator:
def __init__(self, enum, resultCLSID = None):
self.resultCLSID = resultCLSID
self._iter_ = iter(enum.QueryInterface(pythoncom.IID_IEnumVARIANT))
def __iter__(self):
return self
def next(self):
return _get_good_object_(self._iter_.next(), resultCLSID = self.resultCLSID)
| JulienMcJay/eclock | windows/Python27/Lib/site-packages/pywin32-218-py2.7-win32.egg/win32com/client/util.py | Python | gpl-2.0 | 2,965 | 0.03339 |
#!/usr/bin/env python
import argparse
import re
from sys import argv
#Globals
NT= ('A','C','G','T','U','R','Y','K','M','S','W','B','D','H','V','N', '-', '?')
AA =('A','B','C','D','E','F','G','H','I','K','L','M','N','P','Q','R','S','T','U','V','W','Y','Z','X', '-', '*', '?')
#dictionary of ambiguity:
Ambigs = {
'A': ['A'],
'G': ['G'],
'C': ['C'],
'T': ['T'],
'M': [ 'A', 'C'],
'R': [ 'A', 'G'],
'W': [ 'A', 'T'],
'S': [ 'C', 'G'],
'Y': [ 'C', 'T'],
'K': [ 'G', 'T'],
'V': [ 'A', 'C', 'G'],
'H': [ 'A', 'C', 'T'],
'D': [ 'A', 'G', 'T'],
'B': [ 'C', 'G', 'T'],
'N': [ 'G', 'A', 'T', 'C']
}
###############
def string_type(string):
if all (i in NT for i in list(string)):
return 'NT'
elif all (i in AA for i in list(string)):
return 'AA'
else:
return 'ERROR: NOT AA or NT'
def Is_NT_or_AA(Fasta_Dict):
''' Returns NT is the sequence is composed of Nucleotide symbols or AA if symbols are aminoacids'''
if all(string_type(Fasta_Dict[key]) == 'NT' for key in Fasta_Dict.iterkeys()):
return 'NT'
elif all(string_type(Fasta_Dict[key]) == 'AA' for key in Fasta_Dict.iterkeys()):
return 'AA'
else:
for k in Fasta_Dict.iterkeys():
for i in Fasta_Dict[k]:
if i not in AA:
print i
def return_amb(list_of_nuc):
"""Returns a one letter ambiguity code form a list of nucleotides. """
nts=[Ambigs[x] for x in list_of_nuc]
nts=[u for x in nts for u in x]
for code in Ambigs.iterkeys():
if set(Ambigs[code]) == set(nts):
return code
def is_ID(Line):
"""Evaluates if a string correspond to fasta identifier. Herein broadly defined by starting with th e '>' symbol"""
if Line.startswith('>'):
return True
else:
return False
def Fasta_to_Dict(File):
'''Creates a dictionary of FASTA sequences in a File, with seqIs as key to the sequences.'''
with open(File, 'r') as F:
Records = {}
Seqid='null'
Records['null']=''
for Line in F:
if Line.startswith('>'):
Seqid = Line.strip('>').strip('\n')
Seq= ''
Records[Seqid] = Seq
else:
Seq = Records[Seqid] + Line.strip('\n')
Records[Seqid] = Seq.upper()
del Records['null']
return Records
def make_Consensus(Dict, T):
'''This functiom returns the sites where all the aligemnet positions match on the same nucleotide. this is a T% consensus, for AA seqs, the most common aminoacid equal or greater than the threshold will be used, and ambiguities replaced by "?" '''
Type = Is_NT_or_AA(Dict)
ignore=['-', '?']
Consensus=''
for i in range(0, len(Dict[Dict.keys()[0]])):
compo = [seq[i] for seq in Dict.itervalues()]
compo = [x for x in compo if x not in ignore]
if len(compo) < 1:
Consensus+='-'
else:
MFB = max(set(compo), key=compo.count)
G = compo.count(MFB)
if float(G)/len(compo) >= T:
Consensus+=MFB
elif Type == 'NT':
AmbC = return_amb(compo)
Consensus+=str(AmbC)
else:
Consensus += 'X'
return Consensus
def Good_Blocks(Consensus, M):
'''This funcion takes as inputs a consensus sequence and returns blocks of M contiguous base pairs in that consensus (Conserved sites of a given length)'''
GoodBlocks =''
block = ''
for site in Consensus:
if site not in ['-','N', '?']:
block+=site
elif site in ['-','N', '?' ] and len(block)>0:
if len(block) >= M:
GoodBlocks += block.upper() + site
block = ''
else:
GoodBlocks += block.lower() + site
block = ''
else:
GoodBlocks += site
block = ''
GoodBlocks+=block.lower()
return GoodBlocks
###MAIN###
if __name__ =='__main__':
parser = argparse.ArgumentParser(description='This is a program to write consensus sequences')
parser.add_argument('-i', dest = 'alignments', type = str, nargs= '+', help = 'Input alignment(s) in FASTA format.')
parser.add_argument('-t', action= 'store', dest = 'percentage', default = 1.0, type = float, help='Specify percentage threshold to make consensus, default 1.0' )
parser.add_argument('-B', action = 'store', dest = 'blocks', default = 0, type = int, help='look for conserved regions in the alignement (blocks) of the minimum size provided')
parser.add_argument('-d', dest = 'delimiter', type = str, default = '|', help = 'Specify custom field delimiter character separating species name from other sequence identifiers. Species name should be the first element for proper parsing. Default is: "|".')
arguments= parser.parse_args()
#print arguments
T = arguments.percentage
M = arguments.blocks
D = arguments.delimiter
for File in arguments.alignments:
F = Fasta_to_Dict(File)
Con = make_Consensus(F, T)
with open ("%s_consensus.fasta" % File.split('.')[0], 'w') as out:
out.write('>%s consensus sequence\n%s\n' % (File, Con))
if M > 0:
Out = open ('Good_Blocks.fasta', 'w')
Res = Good_Blocks(Con, M)
if re.search(r'[ACGT]+', Res):
print 'Consensus from orthogroup %s have conserevd regions' % FileName[0]
Out.write('>' + FileName[0] + '\n')
Out.write(Res + '\n')
else:
print 'Consensus from orthogroup %s does not look promissing' % FileName[0]
Out.close()
| ballesterus/UPhO | Consensus.py | Python | gpl-3.0 | 5,860 | 0.025256 |
# Copyright (c) 2014 Alexander Bredo
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or
# without modification, are permitted provided that the
# following conditions are met:
#
# 1. Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# 2. Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials
# provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
# CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
# GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
# BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
# OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import time
from multiprocessing import Lock
class IndexedTimeCache():
'''
@param ttl: Maxmimum Time to live for inserted item (first one will be applied)
'''
lock = Lock()
def __init__(self, ttl=30):
self.cache = dict()
self.ttl = ttl
def insert(self, index, data, ignore_fields=[]):
IndexedTimeCache.lock.acquire()
if index in self.cache: # UPDATE + AGGREGATE
self.cache[index]['data'] = self.__aggregate(self.cache[index]['data'], data, ignore_fields)
else: # NEW
self.cache[index] = {
'timestamp': int(time.time()), # Insert Time
'data': data
}
IndexedTimeCache.lock.release()
def size(self):
return len(self.cache)
def getItemsOutOfTTL(self):
IndexedTimeCache.lock.acquire()
cache_outofdate = dict()
cache_new = dict()
for k,v in self.cache.items():
if v['timestamp'] < (time.time() - self.ttl):
cache_outofdate[k] = v
else:
cache_new[k] = v
self.cache = cache_new # Update Cache
IndexedTimeCache.lock.release()
#print(len(cache_outofdate), len(cache_new))
#print(cache_outofdate)
#print(cache_new)
return [item['data'] for item in cache_outofdate.values()]
# cache_outofdate: dict_values([{'data': {'b': 1, 'a': 2, 'c': 4}, 'timestamp': 1403523219}, {...} ])
# Return: [{'c': 2, 'b': 23, 'a': 25}, {'c': 2, 'b': 32, 'a': 29}, ...
def __aggregate(self, old, new, ignore_fields):
aggregated = old
for key, value in new.items():
if isinstance(value, dict):
for sub_key, sub_value in value.items():
if key in aggregated and (key not in ignore_fields or sub_key not in ignore_fields):
if sub_key in aggregated[key]:
aggregated[key][sub_key] += sub_value
else:
print("ERROR: Stats-Aggregation. Fields not found")
#aggregated[key][sub_key] = dict()
#aggregated[key][sub_key] = sub_value
else:
aggregated[key] = dict() #copy?
print("ERROR: Stats-Aggregation. Fields not found")
elif key not in ignore_fields:
aggregated[key] += new[key]
return aggregated
'''
import random
c = IndexedTimeCache(0)
for i in range(0,50):
c.insert((int(time.time() - random.randint(1, 5))), { 'a': random.randint(1, 5), 'b': random.randint(1, 5), 'c': random.randint(1, 5) }, ['c'])
print(c.size())
print("====", c.getItemsOutOfTTL())
print(c.size())
'''
'''
c = IndexedTimeCache(0)
c.insert('123456789Hamburg', {
"@timestamp": 123456789,
"networkLocation": "Hamburg",
"flow_request": {
"packetDeltaCountPerSec": 30,
"octetDeltaCountPerSec": 30,
"flowDurationMilliseconds": 300
}
})
c.insert('123456789Hamburg', {
"@timestamp": 123456789,
"networkLocation": "Hamburg",
"flow_request": {
"packetDeltaCountPerSec": 60,
"octetDeltaCountPerSec": 60,
"flowDurationMilliseconds": 600
}
})
c.insert('123456789Hamburg', {
"@timestamp": 123456789,
"networkLocation": "Hamburg",
"flow_request": {
"packetDeltaCountPerSec": 20,
"octetDeltaCountPerSec": 200,
"flowDurationMilliseconds": 2000
}
})
print(c.getItemsOutOfTTL())
''' | alexbredo/ipfix-receiver | base/cache.py | Python | bsd-2-clause | 4,389 | 0.030987 |
# -*- coding: utf-8 -*-
from subprocess import check_call
def test_shellstreaming_help():
check_call(["shellstreaming", "--help"])
| laysakura/shellstreaming | test/master/test_master_functional.py | Python | apache-2.0 | 139 | 0 |
#!/usr/bin/python3
"""
Analyze the word frequencies on the main articles of a website
"""
import argparse
import requests
from bs4 import BeautifulSoup
import re
import itertools
import string
from collections import defaultdict
import time
import json
import os
import operator
def load_ignored_words(words_file):
'''Load a list of words to ignore from a text file
'''
ignored_words = set()
# Read ignored words from file
if words_file is not None:
with open(words_file, 'r') as ignore_file:
lines = ignore_file.readlines()
lines = [line.strip() for line in lines]
ignored_words = [w for line in lines for w in line.split(' ')]
# Keep unique words
ignored_words = set(ignored_words)
print('[*] Ignoring the following words')
print(ignored_words)
return ignored_words
def retrieve_page(url, base):
'''Rertrieve the text contents from a URL
'''
if url is None:
return ''
if not url.startswith('http'):
url = base + url
try:
print('[+] Retrieving {0}'.format(url))
content = requests.get(url).text
except Exception as e:
print('[-] Error retrieving page content')
print('[-] {0}'.format(e))
return ''
time.sleep(0.2)
return content
def get_element_texts(content, element_type):
'''Get the contents of the requested elements
'''
soup = BeautifulSoup(content, 'html.parser')
elements = soup.find_all(element_type)
text = [element.get_text().strip() for element in elements]
return text
def get_links(content):
'''Get all the links of a page
'''
soup = BeautifulSoup(content, 'html.parser')
elements = soup.find_all('a')
links = [element.get('href') for element in elements]
return links
def create_word_list(elements, ignored_words=set()):
'''Create a list of words given a list of html elements
This function splits the sentenctes into words and merges them into one
single list. Moreover, it removes punctuation and turns all words to
lowercase in order to make frequency analysis easier.
If provided with a list of ignored words, it removes those words from
the final words list.
Args:
elements: List of HTML elements that the function gets the text from
ignored_words: Set of words remove from the final list
Returns:
A list of all the words contained in the given elements
'''
word_list = []
for element in elements:
element_words = element.split(' ')
if element_words is not None:
word_list += element_words
# Remove punctuation
removed_punctuation = [''.join(c for c in word if c not in string.punctuation)
for word in word_list]
# Make lowercase
lower_list = [w.lower() for w in removed_punctuation]
# Remove ignored words and words of length 1
final_list = [w for w in lower_list if len(w) > 1 and w not in ignored_words]
return final_list
def get_domain(url):
'''Get the domain name of a url (without prefix and suffix
'''
m = re.match(r'https?://(www\.)?(.+)\..+', url)
return m.group(2)
def follow_links(url):
'''Follow the links on a webpage and return the content
'''
cache_fname = '{domain}.json'.format(domain=get_domain(url))
if os.path.isfile(cache_fname):
print('[*] Loading from cache file {0}'.format(cache_fname))
with open(cache_fname, 'r') as cache_file:
pages = json.load(cache_file)
return pages
content = retrieve_page(url, url)
links = get_links(content)
pages = [retrieve_page(link, url) for link in links]
print('[*] Saving cache file {0}'.format(cache_fname))
with open(cache_fname, 'w') as cache_file:
json.dump(pages, cache_file)
return pages
def mine_url(url, ignored_words):
'''Given a url, follow all the links and return lists of words on each page
'''
pages = follow_links(url)
paragraph_list = [get_element_texts(page, 'p') for page in pages]
word_lists = [create_word_list(paragraphs, ignored_words) for paragraphs in paragraph_list]
return word_lists
def calculate_tf(word_list):
'''Calculate relative term frequencies for a list of words
'''
tf = defaultdict(int)
max_freq = 0
for word in word_list:
tf[word] += 1
if tf[word] > max_freq:
max_freq = tf[word]
for word, freq in tf.items():
tf[word] = round(tf[word] / max_freq, 3)
return tf
def main():
# Parse command line arguments
parser = argparse.ArgumentParser(description='Retrieve specified HTML'
' Elements from a URL')
parser.add_argument('url', help='The html page you want to retrieve all'
' the elements from')
parser.add_argument('-i', '--ignore', help='Path to ignored words list')
args = parser.parse_args()
# Add http if not already present in the url
if not re.match('^https?://*', args.url):
args.url = 'http://' + args.url
# Load ignored words
ignored_words = load_ignored_words(args.ignore)
# Parse content
word_lists = mine_url(args.url, ignored_words)
all_words = itertools.chain(*word_lists)
frequencies = calculate_tf(all_words)
print('[*] Most Frequent Words')
for i, w in enumerate(sorted(frequencies, key=frequencies.get, reverse=True)):
if i > 50:
break
print(' {0:_<20}: {1: 5}'.format(w, frequencies[w]))
if __name__ == '__main__':
main()
| dmpalyvos/web-scripts | spider.py | Python | gpl-2.0 | 5,655 | 0.001945 |
############################ Copyrights and license ############################
# #
# Copyright 2013 Vincent Jacques <[email protected]> #
# Copyright 2014 Vincent Jacques <[email protected]> #
# Copyright 2016 Peter Buckley <[email protected]> #
# Copyright 2018 Wan Liuyang <[email protected]> #
# Copyright 2018 sfdye <[email protected]> #
# #
# This file is part of PyGithub. #
# http://pygithub.readthedocs.io/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
################################################################################
import github.GithubObject
import github.NamedUser
class StatsContributor(github.GithubObject.NonCompletableGithubObject):
"""
This class represents StatsContributors. The reference can be found here https://docs.github.com/en/rest/reference/repos#get-all-contributor-commit-activity
"""
class Week(github.GithubObject.NonCompletableGithubObject):
"""
This class represents weekly statistics of a contributor.
"""
@property
def w(self):
"""
:type: datetime.datetime
"""
return self._w.value
@property
def a(self):
"""
:type: int
"""
return self._a.value
@property
def d(self):
"""
:type: int
"""
return self._d.value
@property
def c(self):
"""
:type: int
"""
return self._c.value
def _initAttributes(self):
self._w = github.GithubObject.NotSet
self._a = github.GithubObject.NotSet
self._d = github.GithubObject.NotSet
self._c = github.GithubObject.NotSet
def _useAttributes(self, attributes):
if "w" in attributes: # pragma no branch
self._w = self._makeTimestampAttribute(attributes["w"])
if "a" in attributes: # pragma no branch
self._a = self._makeIntAttribute(attributes["a"])
if "d" in attributes: # pragma no branch
self._d = self._makeIntAttribute(attributes["d"])
if "c" in attributes: # pragma no branch
self._c = self._makeIntAttribute(attributes["c"])
@property
def author(self):
"""
:type: :class:`github.NamedUser.NamedUser`
"""
return self._author.value
@property
def total(self):
"""
:type: int
"""
return self._total.value
@property
def weeks(self):
"""
:type: list of :class:`.Week`
"""
return self._weeks.value
def _initAttributes(self):
self._author = github.GithubObject.NotSet
self._total = github.GithubObject.NotSet
self._weeks = github.GithubObject.NotSet
def _useAttributes(self, attributes):
if "author" in attributes: # pragma no branch
self._author = self._makeClassAttribute(
github.NamedUser.NamedUser, attributes["author"]
)
if "total" in attributes: # pragma no branch
self._total = self._makeIntAttribute(attributes["total"])
if "weeks" in attributes: # pragma no branch
self._weeks = self._makeListOfClassesAttribute(
self.Week, attributes["weeks"]
)
| PyGithub/PyGithub | github/StatsContributor.py | Python | lgpl-3.0 | 4,872 | 0.005542 |
from openpyxl.styles.colors import Color
import pytest
@pytest.mark.parametrize("value", ['00FFFFFF', 'efefef'])
def test_argb(value):
from ..colors import aRGB_REGEX
assert aRGB_REGEX.match(value) is not None
class TestColor:
def test_ctor(self):
c = Color()
assert c.value == "00000000"
assert c.type == "rgb"
assert dict(c) == {'rgb': '00000000'}
def test_rgb(self):
c = Color(rgb="FFFFFFFF")
assert c.value == "FFFFFFFF"
assert c.type == "rgb"
assert dict(c) == {'rgb': 'FFFFFFFF'}
def test_indexed(self):
c = Color(indexed=4)
assert c.value == 4
assert c.type == "indexed"
assert dict(c) == {'indexed': "4"}
def test_auto(self):
c = Color(auto=1)
assert c.type is "auto"
assert c.value is True
assert dict(c) == {'auto': "1"}
def test_theme(self):
c = Color(theme="1")
assert c.value == 1
assert c.type == "theme"
assert dict(c) == {'theme': "1"}
def test_tint(self):
c = Color(tint=0.5)
assert c.tint == 0.5
assert dict(c) == {'rgb': '00000000', 'tint': "0.5"}
def test_highlander(self):
c = Color(rgb="FFFFFFF", indexed=4, theme=2, auto=False)
assert c.value == 4
assert c.type == "indexed"
def test_validation(self):
c = Color()
with pytest.raises(TypeError):
c.value = 4
def test_color_descriptor():
from ..colors import ColorDescriptor
class DummyStyle(object):
value = ColorDescriptor('value')
style = DummyStyle()
style.value = "efefef"
assert dict(style.value) == {'rgb': '00efefef'}
| Darthkpo/xtt | openpyxl/styles/tests/test_colors.py | Python | mit | 1,719 | 0.000582 |
# Parser
from util import sTu, getSFChar, sTr, sTup, checkAware
import world as w
import settings as s
from commands import movement
from commands import inform
from commands import admin
from commands import objects
command_list = {
'look':"1",
'score':"1",
'move':"1",
'sit':"1",
'stand':"1",
'sleep':"1",
'wake':"1",
'loadobj':"1",
'get':"1",
'drop':"1",
'inventory':"1",
'keys':"1",
}
alias_list = {
'l':'look',
'sc':'score',
'n':'move n',
's':'move s',
'e':'move e',
'w':'move w',
'sl':'sleep',
'wa':'wake',
'lo':'loadobj',
'i':'inventory',
'inv':'inventory',
'key':'keys',
}
function_list = { 'look': inform.c_look, 'score': inform.c_score, 'move': movement.c_move,
'sit': movement.c_sit, 'stand': movement.c_stand, 'sleep': movement.c_sleep,
'wake': movement.c_wake, 'loadobj': admin.c_loadObj, 'get': objects.c_get,
'drop': objects.c_drop, 'inventory': objects.c_inventory, 'keys': inform.c_keys
}
def cparse(ch, cmdStr): # Full Command String, character object
if cmdStr == "":
sTup(ch.sId)
return
# split up cmdStr into useful stuff.
if len(cmdStr.split(None, 1)) > 1:
firstword, rest = cmdStr.split(None, 1)
command = firstword.lower()
rawArgs = rest.strip()
else:
rawArgs = ""
command = cmdStr.lower()
commandRaw = cmdStr
if command in alias_list:
tmpcmd = alias_list[command]
sizearray = tmpcmd.split(" ")
if len(sizearray) == 2:
rawArgs = str(sizearray[1]) + " " + str(rawArgs)
command = sizearray[0]
else:
command = alias_list[command]
if command in command_list:
rawArgs = rawArgs.strip()
func = function_list[command]
func(ch,rawArgs)
else:
sTu(ch.sId,"Command not found",1)
| tellian/muddpy | muddpy/Commands.py | Python | gpl-3.0 | 1,696 | 0.055425 |
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Model classes for datastore objects and properties for models."""
import datetime
import inspect
import json
import pickle
import zlib
from google.cloud.datastore import helpers
from google.cloud.ndb import _datastore_types
from google.cloud.ndb import exceptions
from google.cloud.ndb import key as key_module
__all__ = [
"Key",
"BlobKey",
"GeoPt",
"Rollback",
"KindError",
"InvalidPropertyError",
"BadProjectionError",
"UnprojectedPropertyError",
"ReadonlyPropertyError",
"ComputedPropertyError",
"IndexProperty",
"Index",
"IndexState",
"ModelAdapter",
"make_connection",
"ModelAttribute",
"Property",
"ModelKey",
"BooleanProperty",
"IntegerProperty",
"FloatProperty",
"BlobProperty",
"TextProperty",
"StringProperty",
"GeoPtProperty",
"PickleProperty",
"JsonProperty",
"UserProperty",
"KeyProperty",
"BlobKeyProperty",
"DateTimeProperty",
"DateProperty",
"TimeProperty",
"StructuredProperty",
"LocalStructuredProperty",
"GenericProperty",
"ComputedProperty",
"MetaModel",
"Model",
"Expando",
"transaction",
"transaction_async",
"in_transaction",
"transactional",
"transactional_async",
"transactional_tasklet",
"non_transactional",
"get_multi_async",
"get_multi",
"put_multi_async",
"put_multi",
"delete_multi_async",
"delete_multi",
"get_indexes_async",
"get_indexes",
]
_MAX_STRING_LENGTH = 1500
Key = key_module.Key
BlobKey = _datastore_types.BlobKey
GeoPt = helpers.GeoPoint
Rollback = exceptions.Rollback
class KindError(exceptions.BadValueError):
"""Raised when an implementation for a kind can't be found.
May also be raised when the kind is not a byte string.
"""
class InvalidPropertyError(exceptions.Error):
"""Raised when a property is not applicable to a given use.
For example, a property must exist and be indexed to be used in a query's
projection or group by clause.
"""
BadProjectionError = InvalidPropertyError
"""This alias for :class:`InvalidPropertyError` is for legacy support."""
class UnprojectedPropertyError(exceptions.Error):
"""Raised when getting a property value that's not in the projection."""
class ReadonlyPropertyError(exceptions.Error):
"""Raised when attempting to set a property value that is read-only."""
class ComputedPropertyError(ReadonlyPropertyError):
"""Raised when attempting to set or delete a computed property."""
class IndexProperty:
"""Immutable object representing a single property in an index."""
__slots__ = ("_name", "_direction")
def __new__(cls, *, name, direction):
instance = super(IndexProperty, cls).__new__(cls)
instance._name = name
instance._direction = direction
return instance
@property
def name(self):
"""str: The property name being indexed."""
return self._name
@property
def direction(self):
"""str: The direction in the index, ``asc`` or ``desc``."""
return self._direction
def __repr__(self):
"""Return a string representation."""
return "{}(name={!r}, direction={!r})".format(
type(self).__name__, self.name, self.direction
)
def __eq__(self, other):
"""Compare two index properties for equality."""
if not isinstance(other, IndexProperty):
return NotImplemented
return self.name == other.name and self.direction == other.direction
def __ne__(self, other):
"""Inequality comparison operation."""
return not self == other
def __hash__(self):
return hash((self.name, self.direction))
class Index:
"""Immutable object representing an index."""
__slots__ = ("_kind", "_properties", "_ancestor")
def __new__(cls, *, kind, properties, ancestor):
instance = super(Index, cls).__new__(cls)
instance._kind = kind
instance._properties = properties
instance._ancestor = ancestor
return instance
@property
def kind(self):
"""str: The kind being indexed."""
return self._kind
@property
def properties(self):
"""List[IndexProperty]: The properties being indexed."""
return self._properties
@property
def ancestor(self):
"""bool: Indicates if this is an ancestor index."""
return self._ancestor
def __repr__(self):
"""Return a string representation."""
return "{}(kind={!r}, properties={!r}, ancestor={})".format(
type(self).__name__, self.kind, self.properties, self.ancestor
)
def __eq__(self, other):
"""Compare two indexes."""
if not isinstance(other, Index):
return NotImplemented
return (
self.kind == other.kind
and self.properties == other.properties
and self.ancestor == other.ancestor
)
def __ne__(self, other):
"""Inequality comparison operation."""
return not self == other
def __hash__(self):
return hash((self.kind, self.properties, self.ancestor))
class IndexState:
"""Immutable object representing an index and its state."""
__slots__ = ("_definition", "_state", "_id")
def __new__(cls, *, definition, state, id):
instance = super(IndexState, cls).__new__(cls)
instance._definition = definition
instance._state = state
instance._id = id
return instance
@property
def definition(self):
"""Index: The index corresponding to the tracked state."""
return self._definition
@property
def state(self):
"""str: The index state.
Possible values are ``error``, ``deleting``, ``serving`` or
``building``.
"""
return self._state
@property
def id(self):
"""int: The index ID."""
return self._id
def __repr__(self):
"""Return a string representation."""
return "{}(definition={!r}, state={!r}, id={:d})".format(
type(self).__name__, self.definition, self.state, self.id
)
def __eq__(self, other):
"""Compare two index states."""
if not isinstance(other, IndexState):
return NotImplemented
return (
self.definition == other.definition
and self.state == other.state
and self.id == other.id
)
def __ne__(self, other):
"""Inequality comparison operation."""
return not self == other
def __hash__(self):
return hash((self.definition, self.state, self.id))
class ModelAdapter:
__slots__ = ()
def __init__(self, *args, **kwargs):
raise NotImplementedError
def make_connection(*args, **kwargs):
raise NotImplementedError
class ModelAttribute:
"""Base for classes that implement a ``_fix_up()`` method."""
__slots__ = ()
def _fix_up(self, cls, code_name):
"""Fix-up property name. To be implemented by subclasses.
Args:
cls (type): The model class that owns the property.
code_name (str): The name of the :class:`Property` being fixed up.
"""
class _BaseValue:
"""A marker object wrapping a "base type" value.
This is used to be able to tell whether ``entity._values[name]`` is a
user value (i.e. of a type that the Python code understands) or a
base value (i.e of a type that serialization understands).
User values are unwrapped; base values are wrapped in a
:class:`_BaseValue` instance.
Args:
b_val (Any): The base value to be wrapped.
Raises:
TypeError: If ``b_val`` is :data:`None`.
TypeError: If ``b_val`` is a list.
"""
__slots__ = ("b_val",)
def __init__(self, b_val):
if b_val is None:
raise TypeError("Cannot wrap None")
if isinstance(b_val, list):
raise TypeError("Lists cannot be wrapped. Received", b_val)
self.b_val = b_val
def __repr__(self):
return "_BaseValue({!r})".format(self.b_val)
def __eq__(self, other):
"""Compare two :class:`_BaseValue` instances."""
if not isinstance(other, _BaseValue):
return NotImplemented
return self.b_val == other.b_val
def __ne__(self, other):
"""Inequality comparison operation."""
return not self == other
def __hash__(self):
raise TypeError("_BaseValue is not immutable")
class Property(ModelAttribute):
"""A class describing a typed, persisted attribute of an entity.
.. warning::
This is not to be confused with Python's ``@property`` built-in.
.. note::
This is just a base class; there are specific subclasses that
describe properties of various types (and :class:`GenericProperty`
which describes a dynamically typed property).
The :class:`Property` does not reserve any "public" names (i.e. names
that don't start with an underscore). This is intentional; the subclass
:class:`StructuredProperty` uses the public attribute namespace to refer to
nested property names (this is essential for specifying queries on
subproperties).
The :meth:`IN` attribute is provided as an alias for ``_IN``, but ``IN``
can be overridden if a subproperty has the same name.
The :class:`Property` class and its predefined subclasses allow easy
subclassing using composable (or stackable) validation and
conversion APIs. These require some terminology definitions:
* A **user value** is a value such as would be set and accessed by the
application code using standard attributes on the entity.
* A **base value** is a value such as would be serialized to
and deserialized from Cloud Datastore.
A property will be a member of a :class:`Model` and will be used to help
store values in an ``entity`` (i.e. instance of a model subclass). The
underlying stored values can be either user values or base values.
To interact with the composable conversion and validation API, a
:class:`Property` subclass can define
* ``_to_base_type()``
* ``_from_base_type()``
* ``_validate()``
These should **not** call their ``super()`` method, since the methods
are meant to be composed. For example with composable validation:
.. code-block:: python
class Positive(ndb.IntegerProperty):
def _validate(self, value):
if value < 1:
raise ndb.exceptions.BadValueError("Non-positive", value)
class SingleDigit(Positive):
def _validate(self, value):
if value > 9:
raise ndb.exceptions.BadValueError("Multi-digit", value)
neither ``_validate()`` method calls ``super()``. Instead, when a
``SingleDigit`` property validates a value, it composes all validation
calls in order:
* ``SingleDigit._validate``
* ``Positive._validate``
* ``IntegerProperty._validate``
The API supports "stacking" classes with ever more sophisticated
user / base conversions:
* the user to base conversion goes from more sophisticated to less
sophisticated
* the base to user conversion goes from less sophisticated to more
sophisticated
For example, see the relationship between :class:`BlobProperty`,
:class:`TextProperty` and :class:`StringProperty`.
The validation API distinguishes between "lax" and "strict" user values.
The set of lax values is a superset of the set of strict values. The
``_validate()`` method takes a lax value and if necessary converts it to
a strict value. For example, an integer (lax) can be converted to a
floating point (strict) value. This means that when setting the property
value, lax values are accepted, while when getting the property value, only
strict values will be returned. If no conversion is needed, ``_validate()``
may return :data:`None`. If the argument is outside the set of accepted lax
values, ``_validate()`` should raise an exception, preferably
:exc:`TypeError` or :exc:`.BadValueError`.
A class utilizing all three may resemble:
.. code-block:: python
class WidgetProperty(ndb.Property):
def _validate(self, value):
# Lax user value to strict user value.
if not isinstance(value, Widget):
raise nbd.exceptions.BadValueError(value)
def _to_base_type(self, value):
# (Strict) user value to base value.
if isinstance(value, Widget):
return value.to_internal()
def _from_base_type(self, value):
# Base value to (strict) user value.'
if not isinstance(value, _WidgetInternal):
return Widget(value)
There are some things that ``_validate()``, ``_to_base_type()`` and
``_from_base_type()`` do **not** need to handle:
* :data:`None`: They will not be called with :data:`None` (and if they
return :data:`None`, this means that the value does not need conversion).
* Repeated values: The infrastructure takes care of calling
``_from_base_type()`` or ``_to_base_type()`` for each list item in a
repeated value.
* Wrapping "base" values: The wrapping and unwrapping is taken care of by
the infrastructure that calls the composable APIs.
* Comparisons: The comparison operations call ``_to_base_type()`` on
their operand.
* Distinguishing between user and base values: the infrastructure
guarantees that ``_from_base_type()`` will be called with an
(unwrapped) base value, and that ``_to_base_type()`` will be called
with a user value.
* Returning the original value: if any of these return :data:`None`, the
original value is kept. (Returning a different value not equal to
:data:`None` will substitute the different value.)
Additionally, :meth:`_prepare_for_put` can be used to integrate with
datastore save hooks used by :class:`Model` instances.
.. automethod:: _prepare_for_put
Args:
name (str): The name of the property.
indexed (bool): Indicates if the value should be indexed.
repeated (bool): Indicates if this property is repeated, i.e. contains
multiple values.
required (bool): Indicates if this property is required on the given
model type.
default (Any): The default value for this property.
choices (Iterable[Any]): A container of allowed values for this
property.
validator (Callable[[~google.cloud.ndb.model.Property, Any], bool]): A
validator to be used to check values.
verbose_name (str): A longer, user-friendly name for this property.
write_empty_list (bool): Indicates if an empty list should be written
to the datastore.
"""
# Instance default fallbacks provided by class.
_code_name = None
_name = None
_indexed = True
_repeated = False
_required = False
_default = None
_choices = None
_validator = None
_verbose_name = None
_write_empty_list = False
# Non-public class attributes.
_FIND_METHODS_CACHE = {}
def __init__(
self,
name=None,
*,
indexed=None,
repeated=None,
required=None,
default=None,
choices=None,
validator=None,
verbose_name=None,
write_empty_list=None
):
# NOTE: These explicitly avoid setting the values so that the
# instances will fall back to the class on lookup.
if name is not None:
self._name = self._verify_name(name)
if indexed is not None:
self._indexed = indexed
if repeated is not None:
self._repeated = repeated
if required is not None:
self._required = required
if default is not None:
self._default = default
self._verify_repeated()
if choices is not None:
self._choices = self._verify_choices(choices)
if validator is not None:
self._validator = self._verify_validator(validator)
if verbose_name is not None:
self._verbose_name = verbose_name
if write_empty_list is not None:
self._write_empty_list = write_empty_list
@staticmethod
def _verify_name(name):
"""Verify the name of the property.
Args:
name (str): The name of the property.
Returns:
str: The ``name`` passed in.
Raises:
TypeError: If the ``name`` is not a string.
ValueError: If the name contains a ``.``.
"""
if not isinstance(name, str):
raise TypeError("Name {!r} is not a string".format(name))
if "." in name:
raise ValueError(
"Name {!r} cannot contain period characters".format(name)
)
return name
def _verify_repeated(self):
"""Checks if the repeated / required / default values are compatible.
Raises:
ValueError: If ``repeated`` is :data:`True` but one of
``required`` or ``default`` is set.
"""
if self._repeated and (self._required or self._default is not None):
raise ValueError(
"repeated is incompatible with required or default"
)
@staticmethod
def _verify_choices(choices):
"""Verify the choices for a property with a limited set of values.
Args:
choices (Union[list, tuple, set, frozenset]): An iterable of
allowed values for the property.
Returns:
frozenset: The ``choices`` cast to a frozen set.
Raises:
TypeError: If ``choices`` is not one of the expected container
types.
"""
if not isinstance(choices, (list, tuple, set, frozenset)):
raise TypeError(
"choices must be a list, tuple or set; received {!r}".format(
choices
)
)
return frozenset(choices)
@staticmethod
def _verify_validator(validator):
"""Verify the validator for a property.
The validator will be called as follows:
.. code-block:: python
value = validator(prop, value)
The ``validator`` should be idempotent, i.e. calling it a second time
should not further modify the value. So a validator that returns e.g.
``value.lower()`` or ``value.strip()`` is fine, but one that returns
``value + "$"`` is not.
Args:
validator (Callable[[Property, Any], bool]): A callable that can
validate a property value.
Returns:
Callable[[Property, Any], bool]: The ``validator``.
Raises:
TypeError: If ``validator`` is not callable. This is determined by
checking is the attribute ``__call__`` is defined.
"""
# NOTE: Checking for ``_call__`` is done to match the original
# implementation. It's not clear why ``callable()`` was not used.
if getattr(validator, "__call__", None) is None:
raise TypeError(
"validator must be callable or None; received {!r}".format(
validator
)
)
return validator
def _constructor_info(self):
"""Helper for :meth:`__repr__`.
Yields:
Tuple[str, bool]: Pairs of argument name and a boolean indicating
if that argument is a keyword.
"""
signature = inspect.signature(self.__init__)
for name, parameter in signature.parameters.items():
is_keyword = parameter.kind == inspect.Parameter.KEYWORD_ONLY
yield name, is_keyword
def __repr__(self):
"""Return a compact unambiguous string representation of a property.
This cycles through all stored attributes and displays the ones that
differ from the default values.
"""
args = []
cls = type(self)
for name, is_keyword in self._constructor_info():
attr = "_{}".format(name)
instance_val = getattr(self, attr)
default_val = getattr(cls, attr)
if instance_val is not default_val:
if isinstance(instance_val, type):
as_str = instance_val.__qualname__
else:
as_str = repr(instance_val)
if is_keyword:
as_str = "{}={}".format(name, as_str)
args.append(as_str)
return "{}({})".format(cls.__name__, ", ".join(args))
def _datastore_type(self, value):
"""Internal hook used by property filters.
Sometimes the low-level query interface needs a specific data type
in order for the right filter to be constructed. See
:meth:`_comparison`.
Args:
value (Any): The value to be converted to a low-level type.
Returns:
Any: The passed-in ``value``, always. Subclasses may alter this
behavior.
"""
return value
def _comparison(self, op, value):
"""Internal helper for comparison operators.
Args:
op (str): The comparison operator. One of ``=``, ``!=``, ``<``,
``<=``, ``>``, ``>=`` or ``in``.
value (Any): The value to compare against.
Returns:
FilterNode: A FilterNode instance representing the requested
comparison.
Raises:
BadFilterError: If the current property is not indexed.
"""
# Import late to avoid circular imports.
from google.cloud.ndb import query
if not self._indexed:
raise exceptions.BadFilterError(
"Cannot query for unindexed property {}".format(self._name)
)
if value is not None:
value = self._do_validate(value)
value = self._call_to_base_type(value)
value = self._datastore_type(value)
return query.FilterNode(self._name, op, value)
# Comparison operators on Property instances don't compare the
# properties; instead they return ``FilterNode``` instances that can be
# used in queries.
def __eq__(self, value):
"""FilterNode: Represents the ``=`` comparison."""
return self._comparison("=", value)
def __ne__(self, value):
"""FilterNode: Represents the ``!=`` comparison."""
return self._comparison("!=", value)
def __lt__(self, value):
"""FilterNode: Represents the ``<`` comparison."""
return self._comparison("<", value)
def __le__(self, value):
"""FilterNode: Represents the ``<=`` comparison."""
return self._comparison("<=", value)
def __gt__(self, value):
"""FilterNode: Represents the ``>`` comparison."""
return self._comparison(">", value)
def __ge__(self, value):
"""FilterNode: Represents the ``>=`` comparison."""
return self._comparison(">=", value)
def _IN(self, value):
"""For the ``in`` comparison operator.
The ``in`` operator cannot be overloaded in the way we want
to, so we define a method. For example:
.. code-block:: python
Employee.query(Employee.rank.IN([4, 5, 6]))
Note that the method is called ``_IN()`` but may normally be invoked
as ``IN()``; ``_IN()`` is provided for the case that a
:class:`.StructuredProperty` refers to a model that has a property
named ``IN``.
Args:
value (Iterable[Any]): The set of values that the property value
must be contained in.
Returns:
Union[~google.cloud.ndb.query.DisjunctionNode, \
~google.cloud.ndb.query.FilterNode, \
~google.cloud.ndb.query.FalseNode]: A node corresponding
to the desired in filter.
* If ``value`` is empty, this will return a :class:`.FalseNode`
* If ``len(value) == 1``, this will return a :class:`.FilterNode`
* Otherwise, this will return a :class:`.DisjunctionNode`
Raises:
~google.cloud.ndb.exceptions.BadFilterError: If the current
property is not indexed.
~google.cloud.ndb.exceptions.BadArgumentError: If ``value`` is not
a basic container (:class:`list`, :class:`tuple`, :class:`set`
or :class:`frozenset`).
"""
# Import late to avoid circular imports.
from google.cloud.ndb import query
if not self._indexed:
raise exceptions.BadFilterError(
"Cannot query for unindexed property {}".format(self._name)
)
if not isinstance(value, (list, tuple, set, frozenset)):
raise exceptions.BadArgumentError(
"Expected list, tuple or set, got {!r}".format(value)
)
values = []
for sub_value in value:
if sub_value is not None:
sub_value = self._do_validate(sub_value)
sub_value = self._call_to_base_type(sub_value)
sub_value = self._datastore_type(sub_value)
values.append(sub_value)
return query.FilterNode(self._name, "in", values)
IN = _IN
"""Used to check if a property value is contained in a set of values.
For example:
.. code-block:: python
Employee.query(Employee.rank.IN([4, 5, 6]))
"""
def __neg__(self):
"""Return a descending sort order on this property.
For example:
.. code-block:: python
Employee.query().order(-Employee.rank)
Raises:
NotImplementedError: Always, the original implementation relied on
a low-level datastore query module.
"""
raise NotImplementedError("Missing datastore_query.PropertyOrder")
def __pos__(self):
"""Return an ascending sort order on this property.
Note that this is redundant but provided for consistency with
:meth:`__neg__`. For example, the following two are equivalent:
.. code-block:: python
Employee.query().order(+Employee.rank)
Employee.query().order(Employee.rank)
Raises:
NotImplementedError: Always, the original implementation relied on
a low-level datastore query module.
"""
raise NotImplementedError("Missing datastore_query.PropertyOrder")
def _do_validate(self, value):
"""Call all validations on the value.
This transforms the ``value`` via:
* Calling the derived ``_validate()`` method(s) (on subclasses that
don't define ``_to_base_type()``),
* Calling the custom validator function
After transforming, it checks if the transformed value is in
``choices`` (if defined).
It's possible that one of the ``_validate()`` methods will raise
an exception.
If ``value`` is a base-value, this will do nothing and return it.
.. note::
This does not call all composable ``_validate()`` methods.
It only calls ``_validate()`` methods up to the
first class in the hierarchy that defines a ``_to_base_type()``
method, when the MRO is traversed looking for ``_validate()`` and
``_to_base_type()`` methods.
.. note::
For a repeated property this method should be called
for each value in the list, not for the list as a whole.
Args:
value (Any): The value to be converted / validated.
Returns:
Any: The transformed ``value``, possibly modified in an idempotent
way.
"""
if isinstance(value, _BaseValue):
return value
value = self._call_shallow_validation(value)
if self._validator is not None:
new_value = self._validator(self, value)
if new_value is not None:
value = new_value
if self._choices is not None:
if value not in self._choices:
raise exceptions.BadValueError(
"Value {!r} for property {} is not an allowed "
"choice".format(value, self._name)
)
return value
def _fix_up(self, cls, code_name):
"""Internal helper called to tell the property its name.
This is called by :meth:`_fix_up_properties`, which is called by
:class:`MetaModel` when finishing the construction of a :class:`Model`
subclass. The name passed in is the name of the class attribute to
which the current property is assigned (a.k.a. the code name). Note
that this means that each property instance must be assigned to (at
most) one class attribute. E.g. to declare three strings, you must
call create three :class:`StringProperty` instances:
.. code-block:: python
class MyModel(ndb.Model):
foo = ndb.StringProperty()
bar = ndb.StringProperty()
baz = ndb.StringProperty()
you cannot write:
.. code-block:: python
class MyModel(ndb.Model):
foo = bar = baz = ndb.StringProperty()
Args:
cls (type): The class that the property is stored on. This argument
is unused by this method, but may be used by subclasses.
code_name (str): The name (on the class) that refers to this
property.
"""
self._code_name = code_name
if self._name is None:
self._name = code_name
def _store_value(self, entity, value):
"""Store a value in an entity for this property.
This assumes validation has already taken place. For a repeated
property the value should be a list.
Args:
entity (Model): An entity to set a value on.
value (Any): The value to be stored for this property.
"""
entity._values[self._name] = value
def _set_value(self, entity, value):
"""Set a value in an entity for a property.
This performs validation first. For a repeated property the value
should be a list (or similar container).
Args:
entity (Model): An entity to set a value on.
value (Any): The value to be stored for this property.
Raises:
ReadonlyPropertyError: If the ``entity`` is the result of a
projection query.
.BadValueError: If the current property is repeated but the
``value`` is not a basic container (:class:`list`,
:class:`tuple`, :class:`set` or :class:`frozenset`).
"""
if entity._projection:
raise ReadonlyPropertyError(
"You cannot set property values of a projection entity"
)
if self._repeated:
if not isinstance(value, (list, tuple, set, frozenset)):
raise exceptions.BadValueError(
"Expected list or tuple, got {!r}".format(value)
)
value = [self._do_validate(v) for v in value]
else:
if value is not None:
value = self._do_validate(value)
self._store_value(entity, value)
def _has_value(self, entity, unused_rest=None):
"""Determine if the entity has a value for this property.
Args:
entity (Model): An entity to check if the current property has
a value set.
unused_rest (None): An always unused keyword.
"""
return self._name in entity._values
def _retrieve_value(self, entity, default=None):
"""Retrieve the value for this property from an entity.
This returns :data:`None` if no value is set, or the ``default``
argument if given. For a repeated property this returns a list if a
value is set, otherwise :data:`None`. No additional transformations
are applied.
Args:
entity (Model): An entity to get a value from.
default (Optional[Any]): The default value to use as fallback.
"""
return entity._values.get(self._name, default)
def _get_user_value(self, entity):
"""Return the user value for this property of the given entity.
This implies removing the :class:`_BaseValue` wrapper if present, and
if it is, calling all ``_from_base_type()`` methods, in the reverse
method resolution order of the property's class. It also handles
default values and repeated properties.
Args:
entity (Model): An entity to get a value from.
Returns:
Any: The original value (if not :class:`_BaseValue`) or the wrapped
value converted from the base type.
"""
return self._apply_to_values(entity, self._opt_call_from_base_type)
def _get_base_value(self, entity):
"""Return the base value for this property of the given entity.
This implies calling all ``_to_base_type()`` methods, in the method
resolution order of the property's class, and adding a
:class:`_BaseValue` wrapper, if one is not already present. (If one
is present, no work is done.) It also handles default values and
repeated properties.
Args:
entity (Model): An entity to get a value from.
Returns:
Union[_BaseValue, List[_BaseValue]]: The original value
(if :class:`_BaseValue`) or the value converted to the base type
and wrapped.
"""
return self._apply_to_values(entity, self._opt_call_to_base_type)
def _get_base_value_unwrapped_as_list(self, entity):
"""Like _get_base_value(), but always returns a list.
Args:
entity (Model): An entity to get a value from.
Returns:
List[Any]: The unwrapped base values. For an unrepeated
property, if the value is missing or :data:`None`, returns
``[None]``; for a repeated property, if the original value is
missing or :data:`None` or empty, returns ``[]``.
"""
wrapped = self._get_base_value(entity)
if self._repeated:
return [w.b_val for w in wrapped]
else:
if wrapped is None:
return [None]
return [wrapped.b_val]
def _opt_call_from_base_type(self, value):
"""Call ``_from_base_type()`` if necessary.
If ``value`` is a :class:`_BaseValue`, unwrap it and call all
:math:`_from_base_type` methods. Otherwise, return the value
unchanged.
Args:
value (Any): The value to invoke :meth:`_call_from_base_type`
for.
Returns:
Any: The original value (if not :class:`_BaseValue`) or the value
converted from the base type.
"""
if isinstance(value, _BaseValue):
value = self._call_from_base_type(value.b_val)
return value
def _value_to_repr(self, value):
"""Turn a value (base or not) into its repr().
This exists so that property classes can override it separately.
This manually applies ``_from_base_type()`` so as not to have a side
effect on what's contained in the entity. Printing a value should not
change it.
Args:
value (Any): The value to convert to a pretty-print ``repr``.
Returns:
str: The ``repr`` of the "true" value.
"""
val = self._opt_call_from_base_type(value)
return repr(val)
def _opt_call_to_base_type(self, value):
"""Call ``_to_base_type()`` if necessary.
If ``value`` is a :class:`_BaseValue`, return it unchanged.
Otherwise, call all ``_validate()`` and ``_to_base_type()`` methods
and wrap it in a :class:`_BaseValue`.
Args:
value (Any): The value to invoke :meth:`_call_to_base_type`
for.
Returns:
_BaseValue: The original value (if :class:`_BaseValue`) or the
value converted to the base type and wrapped.
"""
if not isinstance(value, _BaseValue):
value = _BaseValue(self._call_to_base_type(value))
return value
def _call_from_base_type(self, value):
"""Call all ``_from_base_type()`` methods on the value.
This calls the methods in the reverse method resolution order of
the property's class.
Args:
value (Any): The value to be converted.
Returns:
Any: The transformed ``value``.
"""
methods = self._find_methods("_from_base_type", reverse=True)
call = self._apply_list(methods)
return call(value)
def _call_to_base_type(self, value):
"""Call all ``_validate()`` and ``_to_base_type()`` methods on value.
This calls the methods in the method resolution order of the
property's class. For example, given the hierarchy
.. code-block:: python
class A(Property):
def _validate(self, value):
...
def _to_base_type(self, value):
...
class B(A):
def _validate(self, value):
...
def _to_base_type(self, value):
...
class C(B):
def _validate(self, value):
...
the full list of methods (in order) is:
* ``C._validate()``
* ``B._validate()``
* ``B._to_base_type()``
* ``A._validate()``
* ``A._to_base_type()``
Args:
value (Any): The value to be converted / validated.
Returns:
Any: The transformed ``value``.
"""
methods = self._find_methods("_validate", "_to_base_type")
call = self._apply_list(methods)
return call(value)
def _call_shallow_validation(self, value):
"""Call the "initial" set of ``_validate()`` methods.
This is similar to :meth:`_call_to_base_type` except it only calls
those ``_validate()`` methods that can be called without needing to
call ``_to_base_type()``.
An example: suppose the class hierarchy is
.. code-block:: python
class A(Property):
def _validate(self, value):
...
def _to_base_type(self, value):
...
class B(A):
def _validate(self, value):
...
def _to_base_type(self, value):
...
class C(B):
def _validate(self, value):
...
The full list of methods (in order) called by
:meth:`_call_to_base_type` is:
* ``C._validate()``
* ``B._validate()``
* ``B._to_base_type()``
* ``A._validate()``
* ``A._to_base_type()``
whereas the full list of methods (in order) called here stops once
a ``_to_base_type()`` method is encountered:
* ``C._validate()``
* ``B._validate()``
Args:
value (Any): The value to be converted / validated.
Returns:
Any: The transformed ``value``.
"""
methods = []
for method in self._find_methods("_validate", "_to_base_type"):
# Stop if ``_to_base_type()`` is encountered.
if method.__name__ != "_validate":
break
methods.append(method)
call = self._apply_list(methods)
return call(value)
@classmethod
def _find_methods(cls, *names, reverse=False):
"""Compute a list of composable methods.
Because this is a common operation and the class hierarchy is
static, the outcome is cached (assuming that for a particular list
of names the reversed flag is either always on, or always off).
Args:
names (Tuple[str, ...]): One or more method names to look up on
the current class or base classes.
reverse (bool): Optional flag, default False; if True, the list is
reversed.
Returns:
List[Callable]: Class method objects.
"""
# Get cache on current class / set cache if it doesn't exist.
key = "{}.{}".format(cls.__module__, cls.__qualname__)
cache = cls._FIND_METHODS_CACHE.setdefault(key, {})
hit = cache.get(names)
if hit is not None:
if reverse:
return list(reversed(hit))
else:
return hit
methods = []
for klass in cls.__mro__:
for name in names:
method = klass.__dict__.get(name)
if method is not None:
methods.append(method)
cache[names] = methods
if reverse:
return list(reversed(methods))
else:
return methods
def _apply_list(self, methods):
"""Chain together a list of callables for transforming a value.
.. note::
Each callable in ``methods`` is an unbound instance method, e.g.
accessed via ``Property.foo`` rather than ``instance.foo``.
Therefore, calling these methods will require ``self`` as the
first argument.
If one of the method returns :data:`None`, the previous value is kept;
otherwise the last value is replace.
Exceptions thrown by a method in ``methods`` are not caught, so it
is up to the caller to catch them.
Args:
methods (Iterable[Callable[[Any], Any]]): An iterable of methods
to apply to a value.
Returns:
Callable[[Any], Any]: A callable that takes a single value and
applies each method in ``methods`` to it.
"""
def call(value):
for method in methods:
new_value = method(self, value)
if new_value is not None:
value = new_value
return value
return call
def _apply_to_values(self, entity, function):
"""Apply a function to the property value / values of a given entity.
This retrieves the property value, applies the function, and then
stores the value back. For a repeated property, the function is
applied separately to each of the values in the list. The
resulting value or list of values is both stored back in the
entity and returned from this method.
Args:
entity (Model): An entity to get a value from.
function (Callable[[Any], Any]): A transformation to apply to
the value.
Returns:
Any: The transformed value store on the entity for this property.
"""
value = self._retrieve_value(entity, self._default)
if self._repeated:
if value is None:
value = []
self._store_value(entity, value)
else:
# NOTE: This assumes, but does not check, that ``value`` is
# iterable. This relies on ``_set_value`` having checked
# and converted to a ``list`` for a repeated property.
value[:] = map(function, value)
else:
if value is not None:
new_value = function(value)
if new_value is not None and new_value is not value:
self._store_value(entity, new_value)
value = new_value
return value
def _get_value(self, entity):
"""Get the value for this property from an entity.
For a repeated property this initializes the value to an empty
list if it is not set.
Args:
entity (Model): An entity to get a value from.
Returns:
Any: The user value stored for the current property.
Raises:
UnprojectedPropertyError: If the ``entity`` is the result of a
projection query and the current property is not one of the
projected properties.
"""
if entity._projection:
if self._name not in entity._projection:
raise UnprojectedPropertyError(
"Property {} is not in the projection".format(self._name)
)
return self._get_user_value(entity)
def _delete_value(self, entity):
"""Delete the value for this property from an entity.
.. note::
If no value exists this is a no-op; deleted values will not be
serialized but requesting their value will return :data:`None` (or
an empty list in the case of a repeated property).
Args:
entity (Model): An entity to get a value from.
"""
if self._name in entity._values:
del entity._values[self._name]
def _is_initialized(self, entity):
"""Ask if the entity has a value for this property.
This returns :data:`False` if a value is stored but the stored value
is :data:`None`.
Args:
entity (Model): An entity to get a value from.
"""
return not self._required or (
(self._has_value(entity) or self._default is not None)
and self._get_value(entity) is not None
)
def __get__(self, entity, unused_cls=None):
"""Descriptor protocol: get the value from the entity.
Args:
entity (Model): An entity to get a value from.
unused_cls (type): The class that owns this instance.
"""
if entity is None:
# Handle the case where ``__get__`` is called on the class
# rather than an instance.
return self
return self._get_value(entity)
def __set__(self, entity, value):
"""Descriptor protocol: set the value on the entity.
Args:
entity (Model): An entity to set a value on.
value (Any): The value to set.
"""
self._set_value(entity, value)
def __delete__(self, entity):
"""Descriptor protocol: delete the value from the entity.
Args:
entity (Model): An entity to delete a value from.
"""
self._delete_value(entity)
def _serialize(
self, entity, pb, prefix="", parent_repeated=False, projection=None
):
"""Serialize this property to a protocol buffer.
Some subclasses may override this method.
Args:
entity (Model): The entity that owns this property.
pb (google.cloud.datastore_v1.proto.entity_pb2.Entity): An existing
entity protobuf instance that we'll add a value to.
prefix (Optional[str]): Name prefix used for
:class:`StructuredProperty` (if present, must end in ``.``).
parent_repeated (Optional[bool]): Indicates if the parent (or an
earlier ancestor) is a repeated property.
projection (Optional[Union[list, tuple]]): An iterable of strings
representing the projection for the model instance, or
:data:`None` if the instance is not a projection.
Raises:
NotImplementedError: Always. This method is virtual.
"""
raise NotImplementedError
def _deserialize(self, entity, p, unused_depth=1):
"""Deserialize this property to a protocol buffer.
Some subclasses may override this method.
Args:
entity (Model): The entity that owns this property.
p (google.cloud.datastore_v1.proto.entity_pb2.Value): A property
value protobuf to be deserialized.
depth (int): Optional nesting depth, default 1 (unused here, but
used by some subclasses that override this method).
Raises:
NotImplementedError: Always. This method is virtual.
"""
raise NotImplementedError
def _prepare_for_put(self, entity):
"""Allow this property to define a pre-put hook.
This base class implementation does nothing, but subclasses may
provide hooks.
Args:
entity (Model): An entity with values.
"""
pass
def _check_property(self, rest=None, require_indexed=True):
"""Check this property for specific requirements.
Called by ``Model._check_properties()``.
Args:
rest: Optional subproperty to check, of the form
``name1.name2...nameN``.
required_indexed (bool): Indicates if the current property must
be indexed.
Raises:
InvalidPropertyError: If ``require_indexed`` is :data:`True`
but the current property is not indexed.
InvalidPropertyError: If a subproperty is specified via ``rest``
(:class:`StructuredProperty` overrides this method to handle
subproperties).
"""
if require_indexed and not self._indexed:
raise InvalidPropertyError(
"Property is unindexed {}".format(self._name)
)
if rest:
raise InvalidPropertyError(
"Referencing subproperty {}.{} but {} is not a structured "
"property".format(self._name, rest, self._name)
)
def _get_for_dict(self, entity):
"""Retrieve the value like ``_get_value()``.
This is intended to be processed for ``_to_dict()``.
Property subclasses can override this if they want the dictionary
returned by ``entity._to_dict()`` to contain a different value. The
main use case is allowing :class:`StructuredProperty` and
:class:`LocalStructuredProperty` to allow the default ``_get_value()``
behavior.
* If you override ``_get_for_dict()`` to return a different type, you
must override ``_validate()`` to accept values of that type and
convert them back to the original type.
* If you override ``_get_for_dict()``, you must handle repeated values
and :data:`None` correctly. However, ``_validate()`` does not need to
handle these.
Args:
entity (Model): An entity to get a value from.
Returns:
Any: The user value stored for the current property.
"""
return self._get_value(entity)
def _validate_key(value, entity=None):
"""Validate a key.
Args:
value (.Key): The key to be validated.
entity (Optional[Model]): The entity that the key is being validated
for.
Returns:
.Key: The passed in ``value``.
Raises:
.BadValueError: If ``value`` is not a :class:`.Key`.
KindError: If ``entity`` is specified, but the kind of the entity
doesn't match the kind of ``value``.
"""
if not isinstance(value, Key):
raise exceptions.BadValueError("Expected Key, got {!r}".format(value))
if entity and type(entity) not in (Model, Expando):
if value.kind() != entity._get_kind():
raise KindError(
"Expected Key kind to be {}; received "
"{}".format(entity._get_kind(), value.kind())
)
return value
class ModelKey(Property):
"""Special property to store a special "key" for a :class:`Model`.
This is intended to be used as a psuedo-:class:`Property` on each
:class:`Model` subclass. It is **not** intended for other usage in
application code.
It allows key-only queries to be done for a given kind.
.. automethod:: _validate
"""
__slots__ = ()
def __init__(self):
super(ModelKey, self).__init__()
self._name = "__key__"
def _comparison(self, op, value):
"""Internal helper for comparison operators.
This uses the base implementation in :class:`Property`, but doesn't
allow comparison to :data:`None`.
Args:
op (str): The comparison operator. One of ``=``, ``!=``, ``<``,
``<=``, ``>``, ``>=`` or ``in``.
value (Any): The value to compare against.
Returns:
FilterNode: A FilterNode instance representing the requested
comparison.
Raises:
.BadValueError: If ``value`` is :data:`None`.
"""
if value is not None:
return super(ModelKey, self)._comparison(op, value)
raise exceptions.BadValueError(
"__key__ filter query can't be compared to None"
)
def _validate(self, value):
"""Validate a ``value`` before setting it.
Args:
value (.Key): The value to check.
Returns:
.Key: The passed-in ``value``.
"""
return _validate_key(value)
@staticmethod
def _set_value(entity, value):
"""Set the entity key on an entity.
Args:
entity (Model): An entity to set the entity key on.
value (.Key): The key to be set on the entity.
"""
if value is not None:
value = _validate_key(value, entity=entity)
value = entity._validate_key(value)
entity._entity_key = value
@staticmethod
def _get_value(entity):
"""Get the entity key from an entity.
Args:
entity (Model): An entity to get the entity key from.
Returns:
.Key: The entity key stored on ``entity``.
"""
return entity._entity_key
@staticmethod
def _delete_value(entity):
"""Remove / disassociate the entity key from an entity.
Args:
entity (Model): An entity to remove the entity key from.
"""
entity._entity_key = None
class BooleanProperty(Property):
"""A property that contains values of type bool.
.. automethod:: _validate
"""
__slots__ = ()
def _validate(self, value):
"""Validate a ``value`` before setting it.
Args:
value (bool): The value to check.
Returns:
bool: The passed-in ``value``.
Raises:
.BadValueError: If ``value`` is not a :class:`bool`.
"""
if not isinstance(value, bool):
raise exceptions.BadValueError(
"Expected bool, got {!r}".format(value)
)
return value
def _db_set_value(self, v, unused_p, value):
"""Helper for :meth:`_serialize`.
Raises:
NotImplementedError: Always. This method is virtual.
"""
raise NotImplementedError
def _db_get_value(self, v, unused_p):
"""Helper for :meth:`_deserialize`.
Raises:
NotImplementedError: Always. This method is virtual.
"""
raise NotImplementedError
class IntegerProperty(Property):
"""A property that contains values of type integer.
.. note::
If a value is a :class:`bool`, it will be coerced to ``0`` (for
:data:`False`) or ``1`` (for :data:`True`).
.. automethod:: _validate
"""
__slots__ = ()
def _validate(self, value):
"""Validate a ``value`` before setting it.
Args:
value (Union[int, bool]): The value to check.
Returns:
int: The passed-in ``value``.
Raises:
.BadValueError: If ``value`` is not an :class:`int` or convertible
to one.
"""
if not isinstance(value, int):
raise exceptions.BadValueError(
"Expected integer, got {!r}".format(value)
)
return int(value)
def _db_set_value(self, v, unused_p, value):
"""Helper for :meth:`_serialize`.
Raises:
NotImplementedError: Always. This method is virtual.
"""
raise NotImplementedError
def _db_get_value(self, v, unused_p):
"""Helper for :meth:`_deserialize`.
Raises:
NotImplementedError: Always. This method is virtual.
"""
raise NotImplementedError
class FloatProperty(Property):
"""A property that contains values of type float.
.. note::
If a value is a :class:`bool` or :class:`int`, it will be
coerced to a floating point value.
.. automethod:: _validate
"""
__slots__ = ()
def _validate(self, value):
"""Validate a ``value`` before setting it.
Args:
value (Union[float, int, bool]): The value to check.
Returns:
float: The passed-in ``value``, possibly converted to a
:class:`float`.
Raises:
.BadValueError: If ``value`` is not a :class:`float` or convertible
to one.
"""
if not isinstance(value, (float, int)):
raise exceptions.BadValueError(
"Expected float, got {!r}".format(value)
)
return float(value)
def _db_set_value(self, v, unused_p, value):
"""Helper for :meth:`_serialize`.
Raises:
NotImplementedError: Always. This method is virtual.
"""
raise NotImplementedError
def _db_get_value(self, v, unused_p):
"""Helper for :meth:`_deserialize`.
Raises:
NotImplementedError: Always. This method is virtual.
"""
raise NotImplementedError
class _CompressedValue:
"""A marker object wrapping compressed values.
Args:
z_val (bytes): A return value of ``zlib.compress``.
"""
__slots__ = ("z_val",)
def __init__(self, z_val):
self.z_val = z_val
def __repr__(self):
return "_CompressedValue({!r})".format(self.z_val)
def __eq__(self, other):
"""Compare two compressed values."""
if not isinstance(other, _CompressedValue):
return NotImplemented
return self.z_val == other.z_val
def __ne__(self, other):
"""Inequality comparison operation."""
return not self == other
def __hash__(self):
raise TypeError("_CompressedValue is not immutable")
class BlobProperty(Property):
"""A property that contains values that are byte strings.
.. note::
Unlike most property types, a :class:`BlobProperty` is **not**
indexed by default.
.. automethod:: _to_base_type
.. automethod:: _from_base_type
.. automethod:: _validate
Args:
name (str): The name of the property.
compressed (bool): Indicates if the value should be compressed (via
``zlib``).
indexed (bool): Indicates if the value should be indexed.
repeated (bool): Indicates if this property is repeated, i.e. contains
multiple values.
required (bool): Indicates if this property is required on the given
model type.
default (bytes): The default value for this property.
choices (Iterable[bytes]): A container of allowed values for this
property.
validator (Callable[[~google.cloud.ndb.model.Property, Any], bool]): A
validator to be used to check values.
verbose_name (str): A longer, user-friendly name for this property.
write_empty_list (bool): Indicates if an empty list should be written
to the datastore.
Raises:
NotImplementedError: If the property is both compressed and indexed.
"""
_indexed = False
_compressed = False
def __init__(
self,
name=None,
*,
compressed=None,
indexed=None,
repeated=None,
required=None,
default=None,
choices=None,
validator=None,
verbose_name=None,
write_empty_list=None
):
super(BlobProperty, self).__init__(
name=name,
indexed=indexed,
repeated=repeated,
required=required,
default=default,
choices=choices,
validator=validator,
verbose_name=verbose_name,
write_empty_list=write_empty_list,
)
if compressed is not None:
self._compressed = compressed
if self._compressed and self._indexed:
raise NotImplementedError(
"BlobProperty {} cannot be compressed and "
"indexed at the same time.".format(self._name)
)
def _value_to_repr(self, value):
"""Turn the value into a user friendly representation.
.. note::
This will truncate the value based on the "visual" length, e.g.
if it contains many ``\\xXX`` or ``\\uUUUU`` sequences, those
will count against the length as more than one character.
Args:
value (Any): The value to convert to a pretty-print ``repr``.
Returns:
str: The ``repr`` of the "true" value.
"""
long_repr = super(BlobProperty, self)._value_to_repr(value)
if len(long_repr) > _MAX_STRING_LENGTH + 4:
# Truncate, assuming the final character is the closing quote.
long_repr = long_repr[:_MAX_STRING_LENGTH] + "..." + long_repr[-1]
return long_repr
def _validate(self, value):
"""Validate a ``value`` before setting it.
Args:
value (bytes): The value to check.
Raises:
.BadValueError: If ``value`` is not a :class:`bytes`.
.BadValueError: If the current property is indexed but the value
exceeds the maximum length (1500 bytes).
"""
if not isinstance(value, bytes):
raise exceptions.BadValueError(
"Expected bytes, got {!r}".format(value)
)
if self._indexed and len(value) > _MAX_STRING_LENGTH:
raise exceptions.BadValueError(
"Indexed value {} must be at most {:d} "
"bytes".format(self._name, _MAX_STRING_LENGTH)
)
def _to_base_type(self, value):
"""Convert a value to the "base" value type for this property.
Args:
value (bytes): The value to be converted.
Returns:
Optional[bytes]: The converted value. If the current property is
compressed, this will return a wrapped version of the compressed
value. Otherwise, it will return :data:`None` to indicate that
the value didn't need to be converted.
"""
if self._compressed:
return _CompressedValue(zlib.compress(value))
def _from_base_type(self, value):
"""Convert a value from the "base" value type for this property.
Args:
value (bytes): The value to be converted.
Returns:
Optional[bytes]: The converted value. If the current property is
a (wrapped) compressed value, this will unwrap the value and return
the decompressed form. Otherwise, it will return :data:`None` to
indicate that the value didn't need to be unwrapped and
decompressed.
"""
if isinstance(value, _CompressedValue):
return zlib.decompress(value.z_val)
def _db_set_value(self, v, unused_p, value):
"""Helper for :meth:`_serialize`.
Raises:
NotImplementedError: Always. This method is virtual.
"""
raise NotImplementedError
def _db_set_compressed_meaning(self, p):
"""Helper for :meth:`_db_set_value`.
Raises:
NotImplementedError: Always. This method is virtual.
"""
raise NotImplementedError
def _db_set_uncompressed_meaning(self, p):
"""Helper for :meth:`_db_set_value`.
Raises:
NotImplementedError: Always. This method is virtual.
"""
raise NotImplementedError
def _db_get_value(self, v, unused_p):
"""Helper for :meth:`_deserialize`.
Raises:
NotImplementedError: Always. This method is virtual.
"""
raise NotImplementedError
class TextProperty(BlobProperty):
"""An unindexed property that contains UTF-8 encoded text values.
A :class:`TextProperty` is intended for values of unlimited length, hence
is **not** indexed. Previously, a :class:`TextProperty` could be indexed
via:
.. code-block:: python
class Item(ndb.Model):
description = ndb.TextProperty(indexed=True)
...
but this usage is no longer supported. If indexed text is desired, a
:class:`StringProperty` should be used instead.
.. automethod:: _to_base_type
.. automethod:: _from_base_type
.. automethod:: _validate
Raises:
NotImplementedError: If ``indexed=True`` is provided.
"""
__slots__ = ()
def __init__(self, *args, **kwargs):
indexed = kwargs.pop("indexed", False)
if indexed:
raise NotImplementedError(
"A TextProperty cannot be indexed. Previously this was "
"allowed, but this usage is no longer supported."
)
super(TextProperty, self).__init__(*args, **kwargs)
@property
def _indexed(self):
"""bool: Indicates that the property is not indexed."""
return False
def _validate(self, value):
"""Validate a ``value`` before setting it.
Args:
value (Union[bytes, str]): The value to check.
Raises:
.BadValueError: If ``value`` is :class:`bytes`, but is not a valid
UTF-8 encoded string.
.BadValueError: If ``value`` is neither :class:`bytes` nor
:class:`str`.
.BadValueError: If the current property is indexed but the UTF-8
encoded value exceeds the maximum length (1500 bytes).
"""
if isinstance(value, bytes):
try:
encoded_length = len(value)
value = value.decode("utf-8")
except UnicodeError:
raise exceptions.BadValueError(
"Expected valid UTF-8, got {!r}".format(value)
)
elif isinstance(value, str):
encoded_length = len(value.encode("utf-8"))
else:
raise exceptions.BadValueError(
"Expected string, got {!r}".format(value)
)
if self._indexed and encoded_length > _MAX_STRING_LENGTH:
raise exceptions.BadValueError(
"Indexed value {} must be at most {:d} "
"bytes".format(self._name, _MAX_STRING_LENGTH)
)
def _to_base_type(self, value):
"""Convert a value to the "base" value type for this property.
Args:
value (Union[bytes, str]): The value to be converted.
Returns:
Optional[bytes]: The converted value. If ``value`` is a
:class:`str`, this will return the UTF-8 encoded bytes for it.
Otherwise, it will return :data:`None`.
"""
if isinstance(value, str):
return value.encode("utf-8")
def _from_base_type(self, value):
"""Convert a value from the "base" value type for this property.
.. note::
Older versions of ``ndb`` could write non-UTF-8 ``TEXT``
properties. This means that if ``value`` is :class:`bytes`, but is
not a valid UTF-8 encoded string, it can't (necessarily) be
rejected. But, :meth:`_validate` now rejects such values, so it's
not possible to write new non-UTF-8 ``TEXT`` properties.
Args:
value (Union[bytes, str]): The value to be converted.
Returns:
Optional[str]: The converted value. If ``value`` is a a valid UTF-8
encoded :class:`bytes` string, this will return the decoded
:class:`str` corresponding to it. Otherwise, it will return
:data:`None`.
"""
if isinstance(value, bytes):
try:
return value.decode("utf-8")
except UnicodeError:
pass
def _db_set_uncompressed_meaning(self, p):
"""Helper for :meth:`_db_set_value`.
Raises:
NotImplementedError: Always. This method is virtual.
"""
raise NotImplementedError
class StringProperty(TextProperty):
"""An indexed property that contains UTF-8 encoded text values.
This is nearly identical to :class:`TextProperty`, but is indexed. Values
must be at most 1500 bytes (when UTF-8 encoded from :class:`str` to bytes).
Raises:
NotImplementedError: If ``indexed=False`` is provided.
"""
__slots__ = ()
def __init__(self, *args, **kwargs):
indexed = kwargs.pop("indexed", True)
if not indexed:
raise NotImplementedError(
"A StringProperty must be indexed. Previously setting "
"``indexed=False`` was allowed, but this usage is no longer "
"supported."
)
super(StringProperty, self).__init__(*args, **kwargs)
@property
def _indexed(self):
"""bool: Indicates that the property is indexed."""
return True
class GeoPtProperty(Property):
"""A property that contains :attr:`.GeoPt` values.
.. automethod:: _validate
"""
__slots__ = ()
def _validate(self, value):
"""Validate a ``value`` before setting it.
Args:
value (~google.cloud.datastore.helpers.GeoPoint): The value to
check.
Raises:
.BadValueError: If ``value`` is not a :attr:`.GeoPt`.
"""
if not isinstance(value, GeoPt):
raise exceptions.BadValueError(
"Expected GeoPt, got {!r}".format(value)
)
def _db_set_value(self, v, p, value):
"""Helper for :meth:`_serialize`.
Raises:
NotImplementedError: Always. This method is virtual.
"""
raise NotImplementedError
def _db_get_value(self, v, unused_p):
"""Helper for :meth:`_deserialize`.
Raises:
NotImplementedError: Always. This method is virtual.
"""
raise NotImplementedError
class PickleProperty(BlobProperty):
"""A property that contains values that are pickle-able.
.. note::
Unlike most property types, a :class:`PickleProperty` is **not**
indexed by default.
This will use :func:`pickle.dumps` with the highest available pickle
protocol to convert to bytes and :func:`pickle.loads` to convert **from**
bytes. The base value stored in the datastore will be the pickled bytes.
.. automethod:: _to_base_type
.. automethod:: _from_base_type
"""
__slots__ = ()
def _to_base_type(self, value):
"""Convert a value to the "base" value type for this property.
Args:
value (Any): The value to be converted.
Returns:
bytes: The pickled ``value``.
"""
return pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
def _from_base_type(self, value):
"""Convert a value from the "base" value type for this property.
Args:
value (bytes): The value to be converted.
Returns:
Any: The unpickled ``value``.
"""
return pickle.loads(value)
class JsonProperty(BlobProperty):
"""A property that contains JSON-encodable values.
.. note::
Unlike most property types, a :class:`JsonProperty` is **not**
indexed by default.
.. automethod:: _to_base_type
.. automethod:: _from_base_type
.. automethod:: _validate
Args:
name (str): The name of the property.
compressed (bool): Indicates if the value should be compressed (via
``zlib``).
json_type (type): The expected type of values that this property can
hold. If :data:`None`, any type is allowed.
indexed (bool): Indicates if the value should be indexed.
repeated (bool): Indicates if this property is repeated, i.e. contains
multiple values.
required (bool): Indicates if this property is required on the given
model type.
default (Any): The default value for this property.
choices (Iterable[Any]): A container of allowed values for this
property.
validator (Callable[[~google.cloud.ndb.model.Property, Any], bool]): A
validator to be used to check values.
verbose_name (str): A longer, user-friendly name for this property.
write_empty_list (bool): Indicates if an empty list should be written
to the datastore.
"""
_json_type = None
def __init__(
self,
name=None,
*,
compressed=None,
json_type=None,
indexed=None,
repeated=None,
required=None,
default=None,
choices=None,
validator=None,
verbose_name=None,
write_empty_list=None
):
super(JsonProperty, self).__init__(
name=name,
compressed=compressed,
indexed=indexed,
repeated=repeated,
required=required,
default=default,
choices=choices,
validator=validator,
verbose_name=verbose_name,
write_empty_list=write_empty_list,
)
if json_type is not None:
self._json_type = json_type
def _validate(self, value):
"""Validate a ``value`` before setting it.
Args:
value (Any): The value to check.
Raises:
TypeError: If the current property has a JSON type set and
``value`` is not an instance of that type.
"""
if self._json_type is None:
return
if not isinstance(value, self._json_type):
raise TypeError(
"JSON property must be a {}".format(self._json_type)
)
def _to_base_type(self, value):
"""Convert a value to the "base" value type for this property.
Args:
value (Any): The value to be converted.
Returns:
bytes: The ``value``, JSON encoded as an ASCII byte string.
"""
as_str = json.dumps(value, separators=(",", ":"), ensure_ascii=True)
return as_str.encode("ascii")
def _from_base_type(self, value):
"""Convert a value from the "base" value type for this property.
Args:
value (bytes): The value to be converted.
Returns:
Any: The ``value`` (ASCII bytes or string) loaded as JSON.
"""
return json.loads(value.decode("ascii"))
class UserProperty(Property):
__slots__ = ()
def __init__(self, *args, **kwargs):
raise NotImplementedError
class KeyProperty(Property):
"""A property that contains :class:`.Key` values.
The constructor for :class:`KeyProperty` allows at most two positional
arguments. Any usage of :data:`None` as a positional argument will
be ignored. Any of the following signatures are allowed:
.. testsetup:: key-property-constructor
from google.cloud import ndb
class SimpleModel(ndb.Model):
pass
.. doctest:: key-property-constructor
>>> name = "my_value"
>>> ndb.KeyProperty(name)
KeyProperty('my_value')
>>> ndb.KeyProperty(SimpleModel)
KeyProperty(kind='SimpleModel')
>>> ndb.KeyProperty(name, SimpleModel)
KeyProperty('my_value', kind='SimpleModel')
>>> ndb.KeyProperty(SimpleModel, name)
KeyProperty('my_value', kind='SimpleModel')
The type of the positional arguments will be used to determine their
purpose: a string argument is assumed to be the ``name`` and a
:class:`type` argument is assumed to be the ``kind`` (and checked that
the type is a subclass of :class:`Model`).
.. automethod:: _validate
Args:
name (str): The name of the property.
kind (Union[type, str]): The (optional) kind to be stored. If provided
as a positional argument, this must be a subclass of :class:`Model`
otherwise the kind name is sufficient.
indexed (bool): Indicates if the value should be indexed.
repeated (bool): Indicates if this property is repeated, i.e. contains
multiple values.
required (bool): Indicates if this property is required on the given
model type.
default (.Key): The default value for this property.
choices (Iterable[.Key]): A container of allowed values for this
property.
validator (Callable[[~google.cloud.ndb.model.Property, .Key], bool]): A
validator to be used to check values.
verbose_name (str): A longer, user-friendly name for this property.
write_empty_list (bool): Indicates if an empty list should be written
to the datastore.
"""
_kind = None
def __init__(
self,
*args,
name=None,
kind=None,
indexed=None,
repeated=None,
required=None,
default=None,
choices=None,
validator=None,
verbose_name=None,
write_empty_list=None
):
name, kind = self._handle_positional(args, name, kind)
super(KeyProperty, self).__init__(
name=name,
indexed=indexed,
repeated=repeated,
required=required,
default=default,
choices=choices,
validator=validator,
verbose_name=verbose_name,
write_empty_list=write_empty_list,
)
if kind is not None:
self._kind = kind
@staticmethod
def _handle_positional(args, name, kind):
"""Handle positional arguments.
In particular, assign them to the "correct" values and make sure
they don't collide with the relevant keyword arguments.
Args:
args (tuple): The positional arguments provided to the
constructor.
name (Optional[str]): The name that was provided as a keyword
argument to the constructor.
kind (Optional[Union[type, str]]): The kind that was provided as a
keyword argument to the constructor.
Returns:
Tuple[Optional[str], Optional[str]]: The ``name`` and ``kind``
inferred from the arguments. Either may be :data:`None`.
Raises:
TypeError: If ``args`` has more than 2 elements.
TypeError: If a valid ``name`` type (i.e. a string) is specified
twice in ``args``.
TypeError: If a valid ``kind`` type (i.e. a subclass of
:class:`Model`) is specified twice in ``args``.
TypeError: If an element in ``args`` is not a :class:`str` or a
subclass of :class:`Model`.
TypeError: If a ``name`` is specified both in ``args`` and via
the ``name`` keyword.
TypeError: If a ``kind`` is specified both in ``args`` and via
the ``kind`` keyword.
TypeError: If a ``kind`` was provided via ``keyword`` and is
not a :class:`str` or a subclass of :class:`Model`.
"""
# Limit positional arguments.
if len(args) > 2:
raise TypeError(
"The KeyProperty constructor accepts at most two "
"positional arguments."
)
# Filter out None
args = [value for value in args if value is not None]
# Determine the name / kind inferred from the positional arguments.
name_via_positional = None
kind_via_positional = None
for value in args:
if isinstance(value, str):
if name_via_positional is None:
name_via_positional = value
else:
raise TypeError("You can only specify one name")
elif isinstance(value, type) and issubclass(value, Model):
if kind_via_positional is None:
kind_via_positional = value
else:
raise TypeError("You can only specify one kind")
else:
raise TypeError(
"Unexpected positional argument: {!r}".format(value)
)
# Reconcile the two possible ``name``` values.
if name_via_positional is not None:
if name is None:
name = name_via_positional
else:
raise TypeError("You can only specify name once")
# Reconcile the two possible ``kind``` values.
if kind_via_positional is None:
if isinstance(kind, type) and issubclass(kind, Model):
kind = kind._get_kind()
else:
if kind is None:
kind = kind_via_positional._get_kind()
else:
raise TypeError("You can only specify kind once")
# Make sure the ``kind`` is a ``str``.
if kind is not None and not isinstance(kind, str):
raise TypeError("kind must be a Model class or a string")
return name, kind
def _constructor_info(self):
"""Helper for :meth:`__repr__`.
Yields:
Tuple[str, bool]: Pairs of argument name and a boolean indicating
if that argument is a keyword.
"""
yield "name", False
yield "kind", True
from_inspect = super(KeyProperty, self)._constructor_info()
for name, is_keyword in from_inspect:
if name in ("args", "name", "kind"):
continue
yield name, is_keyword
def _validate(self, value):
"""Validate a ``value`` before setting it.
Args:
value (.Key): The value to check.
Raises:
.BadValueError: If ``value`` is not a :class:`.Key`.
.BadValueError: If ``value`` is a partial :class:`.Key` (i.e. it
has no name or ID set).
.BadValueError: If the current property has an associated ``kind``
and ``value`` does not match that kind.
"""
if not isinstance(value, Key):
raise exceptions.BadValueError(
"Expected Key, got {!r}".format(value)
)
# Reject incomplete keys.
if not value.id():
raise exceptions.BadValueError(
"Expected complete Key, got {!r}".format(value)
)
# Verify kind if provided.
if self._kind is not None:
if value.kind() != self._kind:
raise exceptions.BadValueError(
"Expected Key with kind={!r}, got "
"{!r}".format(self._kind, value)
)
def _db_set_value(self, v, unused_p, value):
"""Helper for :meth:`_serialize`.
Raises:
NotImplementedError: Always. This method is virtual.
"""
raise NotImplementedError
def _db_get_value(self, v, unused_p):
"""Helper for :meth:`_deserialize`.
Raises:
NotImplementedError: Always. This method is virtual.
"""
raise NotImplementedError
class BlobKeyProperty(Property):
"""A property containing :class:`~google.cloud.ndb.model.BlobKey` values.
.. automethod:: _validate
"""
__slots__ = ()
def _validate(self, value):
"""Validate a ``value`` before setting it.
Args:
value (~google.cloud.ndb.model.BlobKey): The value to check.
Raises:
.BadValueError: If ``value`` is not a
:class:`~google.cloud.ndb.model.BlobKey`.
"""
if not isinstance(value, BlobKey):
raise exceptions.BadValueError(
"Expected BlobKey, got {!r}".format(value)
)
def _db_set_value(self, v, p, value):
"""Helper for :meth:`_serialize`.
Raises:
NotImplementedError: Always. This method is virtual.
"""
raise NotImplementedError
def _db_get_value(self, v, unused_p):
"""Helper for :meth:`_deserialize`.
Raises:
NotImplementedError: Always. This method is virtual.
"""
raise NotImplementedError
class DateTimeProperty(Property):
"""A property that contains :class:`~datetime.datetime` values.
This property expects "naive" datetime stamps, i.e. no timezone can
be set. Furthermore, the assumption is that naive datetime stamps
represent UTC.
.. note::
Unlike Django, ``auto_now_add`` can be overridden by setting the
value before writing the entity. And unlike the legacy
``google.appengine.ext.db``, ``auto_now`` does not supply a default
value. Also unlike legacy ``db``, when the entity is written, the
property values are updated to match what was written. Finally, beware
that this also updates the value in the in-process cache, **and** that
``auto_now_add`` may interact weirdly with transaction retries (a retry
of a property with ``auto_now_add`` set will reuse the value that was
set on the first try).
.. automethod:: _validate
.. automethod:: _prepare_for_put
Args:
name (str): The name of the property.
auto_now (bool): Indicates that the property should be set to the
current datetime when an entity is created and whenever it is
updated.
auto_now_add (bool): Indicates that the property should be set to the
current datetime when an entity is created.
indexed (bool): Indicates if the value should be indexed.
repeated (bool): Indicates if this property is repeated, i.e. contains
multiple values.
required (bool): Indicates if this property is required on the given
model type.
default (~datetime.datetime): The default value for this property.
choices (Iterable[~datetime.datetime]): A container of allowed values
for this property.
validator (Callable[[~google.cloud.ndb.model.Property, Any], bool]): A
validator to be used to check values.
verbose_name (str): A longer, user-friendly name for this property.
write_empty_list (bool): Indicates if an empty list should be written
to the datastore.
Raises:
ValueError: If ``repeated=True`` and ``auto_now=True``.
ValueError: If ``repeated=True`` and ``auto_now_add=True``.
"""
_auto_now = False
_auto_now_add = False
def __init__(
self,
name=None,
*,
auto_now=None,
auto_now_add=None,
indexed=None,
repeated=None,
required=None,
default=None,
choices=None,
validator=None,
verbose_name=None,
write_empty_list=None
):
super(DateTimeProperty, self).__init__(
name=name,
indexed=indexed,
repeated=repeated,
required=required,
default=default,
choices=choices,
validator=validator,
verbose_name=verbose_name,
write_empty_list=write_empty_list,
)
if self._repeated:
if auto_now:
raise ValueError(
"DateTimeProperty {} could use auto_now and be "
"repeated, but there would be no point.".format(self._name)
)
elif auto_now_add:
raise ValueError(
"DateTimeProperty {} could use auto_now_add and be "
"repeated, but there would be no point.".format(self._name)
)
if auto_now is not None:
self._auto_now = auto_now
if auto_now_add is not None:
self._auto_now_add = auto_now_add
def _validate(self, value):
"""Validate a ``value`` before setting it.
Args:
value (~datetime.datetime): The value to check.
Raises:
.BadValueError: If ``value`` is not a :class:`~datetime.datetime`.
"""
if not isinstance(value, datetime.datetime):
raise exceptions.BadValueError(
"Expected datetime, got {!r}".format(value)
)
@staticmethod
def _now():
"""datetime.datetime: Return current datetime.
Subclasses will override this to return different forms of "now".
"""
return datetime.datetime.utcnow()
def _prepare_for_put(self, entity):
"""Sets the current timestamp when "auto" is set.
If one of the following scenarios occur
* ``auto_now=True``
* ``auto_now_add=True`` and the ``entity`` doesn't have a value set
then this hook will run before the ``entity`` is ``put()`` into
the datastore.
Args:
entity (Model): An entity with values.
"""
if self._auto_now or (
self._auto_now_add and not self._has_value(entity)
):
value = self._now()
self._store_value(entity, value)
def _db_set_value(self, v, p, value):
"""Helper for :meth:`_serialize`.
Raises:
NotImplementedError: Always. This method is virtual.
"""
raise NotImplementedError
def _db_get_value(self, v, unused_p):
"""Helper for :meth:`_deserialize`.
Raises:
NotImplementedError: Always. This method is virtual.
"""
raise NotImplementedError
class DateProperty(DateTimeProperty):
"""A property that contains :class:`~datetime.date` values.
.. automethod:: _to_base_type
.. automethod:: _from_base_type
.. automethod:: _validate
"""
__slots__ = ()
def _validate(self, value):
"""Validate a ``value`` before setting it.
Args:
value (~datetime.date): The value to check.
Raises:
.BadValueError: If ``value`` is not a :class:`~datetime.date`.
"""
if not isinstance(value, datetime.date):
raise exceptions.BadValueError(
"Expected date, got {!r}".format(value)
)
def _to_base_type(self, value):
"""Convert a value to the "base" value type for this property.
Args:
value (~datetime.date): The value to be converted.
Returns:
~datetime.datetime: The converted value: a datetime object with the
time set to ``00:00``.
Raises:
TypeError: If ``value`` is not a :class:`~datetime.date`.
"""
if not isinstance(value, datetime.date):
raise TypeError(
"Cannot convert to datetime expected date value; "
"received {}".format(value)
)
return datetime.datetime(value.year, value.month, value.day)
def _from_base_type(self, value):
"""Convert a value from the "base" value type for this property.
Args:
value (~datetime.datetime): The value to be converted.
Returns:
~datetime.date: The converted value: the date that ``value``
occurs on.
"""
return value.date()
@staticmethod
def _now():
"""datetime.datetime: Return current date."""
return datetime.datetime.utcnow().date()
class TimeProperty(DateTimeProperty):
"""A property that contains :class:`~datetime.time` values.
.. automethod:: _to_base_type
.. automethod:: _from_base_type
.. automethod:: _validate
"""
__slots__ = ()
def _validate(self, value):
"""Validate a ``value`` before setting it.
Args:
value (~datetime.time): The value to check.
Raises:
.BadValueError: If ``value`` is not a :class:`~datetime.time`.
"""
if not isinstance(value, datetime.time):
raise exceptions.BadValueError(
"Expected time, got {!r}".format(value)
)
def _to_base_type(self, value):
"""Convert a value to the "base" value type for this property.
Args:
value (~datetime.time): The value to be converted.
Returns:
~datetime.datetime: The converted value: a datetime object with the
date set to ``1970-01-01``.
Raises:
TypeError: If ``value`` is not a :class:`~datetime.time`.
"""
if not isinstance(value, datetime.time):
raise TypeError(
"Cannot convert to datetime expected time value; "
"received {}".format(value)
)
return datetime.datetime(
1970,
1,
1,
value.hour,
value.minute,
value.second,
value.microsecond,
)
def _from_base_type(self, value):
"""Convert a value from the "base" value type for this property.
Args:
value (~datetime.datetime): The value to be converted.
Returns:
~datetime.time: The converted value: the time that ``value``
occurs at.
"""
return value.time()
@staticmethod
def _now():
"""datetime.datetime: Return current time."""
return datetime.datetime.utcnow().time()
class StructuredProperty(Property):
__slots__ = ()
def __init__(self, *args, **kwargs):
raise NotImplementedError
class LocalStructuredProperty(BlobProperty):
__slots__ = ()
def __init__(self, *args, **kwargs):
raise NotImplementedError
class GenericProperty(Property):
__slots__ = ()
def __init__(self, *args, **kwargs):
raise NotImplementedError
class ComputedProperty(GenericProperty):
__slots__ = ()
def __init__(self, *args, **kwargs):
raise NotImplementedError
class MetaModel(type):
__slots__ = ()
def __new__(self, *args, **kwargs):
raise NotImplementedError
class Model:
__slots__ = ("_entity_key",)
def __init__(self, *args, **kwargs):
raise NotImplementedError
@classmethod
def _get_kind(cls):
"""Return the kind name for this class.
This defaults to ``cls.__name__``; users may override this to give a
class a different name when stored in Google Cloud Datastore than the
name of the class.
"""
return cls.__name__
@staticmethod
def _validate_key(key):
"""Validation for ``_key`` attribute (designed to be overridden).
Args:
key (.Key): Proposed key to use for this entity.
Returns:
.Key: The validated ``key``.
"""
return key
class Expando(Model):
__slots__ = ()
def __init__(self, *args, **kwargs):
raise NotImplementedError
def transaction(*args, **kwargs):
raise NotImplementedError
def transaction_async(*args, **kwargs):
raise NotImplementedError
def in_transaction(*args, **kwargs):
raise NotImplementedError
def transactional(*args, **kwargs):
raise NotImplementedError
def transactional_async(*args, **kwargs):
raise NotImplementedError
def transactional_tasklet(*args, **kwargs):
raise NotImplementedError
def non_transactional(*args, **kwargs):
raise NotImplementedError
def get_multi_async(*args, **kwargs):
raise NotImplementedError
def get_multi(*args, **kwargs):
raise NotImplementedError
def put_multi_async(*args, **kwargs):
raise NotImplementedError
def put_multi(*args, **kwargs):
raise NotImplementedError
def delete_multi_async(*args, **kwargs):
raise NotImplementedError
def delete_multi(*args, **kwargs):
raise NotImplementedError
def get_indexes_async(*args, **kwargs):
raise NotImplementedError
def get_indexes(*args, **kwargs):
raise NotImplementedError
| jonparrott/google-cloud-python | ndb/src/google/cloud/ndb/model.py | Python | apache-2.0 | 97,630 | 0 |
#!/usr/bin/env python
'''coffeehandlers.py - Waqas Bhatti ([email protected]) - Jul 2014
This contains the URL handlers for the astroph-coffee web-server.
'''
import os.path
import logging
import base64
import re
LOGGER = logging.getLogger(__name__)
from datetime import datetime, timedelta
from pytz import utc, timezone
import tornado.web
from tornado.escape import xhtml_escape, xhtml_unescape, url_unescape, squeeze
import arxivdb
import webdb
import fulltextsearch as fts
import ipaddress
######################
## USEFUL CONSTANTS ##
######################
ARCHIVEDATE_REGEX = re.compile(r'^(\d{4})(\d{2})(\d{2})$')
MONTH_NAMES = {x:datetime(year=2014,month=x,day=12)
for x in range(1,13)}
######################
## USEFUL FUNCTIONS ##
######################
def msgencode(message, signer):
'''This escapes a message, then base64 encodes it.
Uses an itsdangerous.Signer instance provided as the signer arg to sign the
message to protect against tampering.
'''
try:
msg = base64.b64encode(signer.sign(xhtml_escape(message)))
msg = msg.replace('=','*')
return msg
except Exception as e:
return ''
def msgdecode(message, signer):
'''This base64 decodes a message, then unescapes it.
Uses an itsdangerous.Signer instance provided as the signer arg to verify
the message to protect against tampering.
'''
try:
msg = message.replace('*','=')
decoded_message = base64.b64decode(msg)
decoded_message = signer.unsign(decoded_message)
return xhtml_unescape(decoded_message)
except Exception as e:
return ''
def group_arxiv_dates(dates, npapers, nlocal, nvoted):
'''
This takes a list of datetime.dates and the number of papers corresponding
to each date and builds a nice dict out of it, allowing the following
listing (in rev-chron order) to be made:
YEAR X
Month X:
Date X --- <strong>YY<strong> papers
.
.
.
YEAR 1
Month 1:
Date 1 --- <strong>YY<strong> papers
'''
years, months = [], []
for x in dates:
years.append(x.year)
months.append(x.month)
unique_years = set(years)
unique_months = set(months)
yeardict = {}
for year in unique_years:
yeardict[year] = {}
for month in unique_months:
yeardict[year][MONTH_NAMES[month]] = [
(x,y,z,w) for (x,y,z,w) in zip(dates, npapers, nlocal, nvoted)
if (x.year == year and x.month == month)
]
for month in yeardict[year].copy():
if not yeardict[year][month]:
del yeardict[year][month]
return yeardict
##################
## URL HANDLERS ##
##################
class CoffeeHandler(tornado.web.RequestHandler):
'''
This handles all requests for /astroph-coffee and redirects based on
time of day.
'''
def initialize(self,
database,
voting_start,
voting_end,
coffee_time,
server_tz,
signer,
room,
building,
department,
institution):
'''
Sets up the database.
'''
self.database = database
self.voting_start = voting_start
self.voting_end = voting_end
self.coffee_time = coffee_time
self.local_tz = timezone(server_tz)
self.signer = signer
self.room = room
self.building = building
self.department = department
self.institution = institution
def get(self):
'''
This handles GET requests.
'''
# handle a redirect with an attached flash message
flash_message = self.get_argument('f', None)
if flash_message:
flashtext = msgdecode(flash_message, self.signer)
LOGGER.warning('flash message: %s' % flashtext)
flashbox = (
'<div data-alert class="alert-box radius">%s'
' <a class="close">×</a></div>' %
flashtext
)
flash_message = flashbox
else:
flash_message = ''
# first, get the session token
session_token = self.get_secure_cookie('coffee_session',
max_age_days=30)
ip_address = self.request.remote_ip
if 'User-Agent' in self.request.headers:
client_header = self.request.headers['User-Agent'] or 'none'
else:
client_header = 'none'
local_today = datetime.now(tz=utc).strftime('%Y-%m-%d %H:%M %Z')
user_name = 'anonuser@%s' % ip_address
new_user = True
# check if we're in voting time-limits
timenow = datetime.now(tz=utc).timetz()
# check if this session_token corresponds to an existing user
if session_token:
sessioninfo = webdb.session_check(session_token,
database=self.database)
if sessioninfo[0]:
user_name = sessioninfo[2]
LOGGER.info('found session for %s, continuing with it' %
user_name)
new_user = False
elif sessioninfo[-1] != 'database_error':
LOGGER.warning('unknown user, starting a new session for '
'%s, %s' % (ip_address, client_header))
sessionok, token = webdb.anon_session_initiate(
ip_address,
client_header,
database=self.database
)
if sessionok and token:
self.set_secure_cookie('coffee_session',
token,
httponly=True)
else:
LOGGER.error('could not set session cookie for %s, %s' %
(ip_address, client_header))
self.set_status(500)
message = ("There was a database error "
"trying to look up user credentials.")
LOGGER.error('database error while looking up session for '
'%s, %s' % (ip_address, client_header))
self.render("errorpage.html",
user_name=user_name,
local_today=local_today,
error_message=message,
flash_message=flash_message,
new_user=new_user)
else:
self.set_status(500)
message = ("There was a database error "
"trying to look up user credentials.")
LOGGER.error('database error while looking up session for '
'%s, %s' % (ip_address, client_header))
self.render("errorpage.html",
user_name=user_name,
local_today=local_today,
error_message=message,
flash_message=flash_message,
new_user=new_user)
# there's no existing user session
else:
if ('crawler' not in client_header.lower() and
'bot' not in client_header.lower()):
LOGGER.warning('unknown user, starting a new session for '
'%s, %s' % (ip_address, client_header))
sessionok, token = webdb.anon_session_initiate(
ip_address,
client_header,
database=self.database
)
if sessionok and token:
self.set_secure_cookie('coffee_session',
token,
httponly=True)
else:
LOGGER.error('could not set session cookie for %s, %s' %
(ip_address, client_header))
self.set_status(500)
message = ("There was a database error "
"trying to look up user credentials.")
LOGGER.error('database error while looking up session for '
'%s, %s' % (ip_address, client_header))
self.render("errorpage.html",
user_name=user_name,
local_today=local_today,
error_message=message,
flash_message=flash_message,
new_user=new_user)
# construct the current dt and use it to figure out the local-to-server
# voting times
dtnow = datetime.now(tz=utc)
dtstart = dtnow.replace(hour=self.voting_start.hour,
minute=self.voting_start.minute,
second=0)
local_start = dtstart.astimezone(self.local_tz)
local_start = local_start.strftime('%H:%M %Z')
dtend = dtnow.replace(hour=self.voting_end.hour,
minute=self.voting_end.minute,
second=0)
local_end = dtend.astimezone(self.local_tz)
local_end = local_end.strftime('%H:%M %Z')
dtcoffee = dtnow.replace(hour=self.coffee_time.hour,
minute=self.coffee_time.minute,
second=0)
local_coffee = dtcoffee.astimezone(self.local_tz)
local_coffee = local_coffee.strftime('%H:%M %Z')
utc_start = self.voting_start.strftime('%H:%M %Z')
utc_end = self.voting_end.strftime('%H:%M %Z')
utc_coffee = self.coffee_time.strftime('%H:%M %Z')
self.render("index.html",
user_name=user_name,
local_today=local_today,
voting_localstart=local_start,
voting_localend=local_end,
voting_start=utc_start,
voting_end=utc_end,
coffeetime_local=local_coffee,
coffeetime_utc=utc_coffee,
flash_message=flash_message,
new_user=new_user,
coffee_room=self.room,
coffee_building=self.building,
coffee_department=self.department,
coffee_institution=self.institution)
class ArticleListHandler(tornado.web.RequestHandler):
'''This handles all requests for the listing of selected articles and voting
pages. Note: if nobody voted on anything, the default is to return all
articles with local authors at the top.
'''
def initialize(self, database,
voting_start,
voting_end,
server_tz,
reserve_interval,
signer):
'''
Sets up the database.
'''
self.database = database
self.voting_start = voting_start
self.voting_end = voting_end
self.server_tz = server_tz
self.signer = signer
self.reserve_interval = reserve_interval
def get(self):
'''
This handles GET requests.
'''
# handle a redirect with an attached flash message
flash_message = self.get_argument('f', None)
if flash_message:
flashtext = msgdecode(flash_message, self.signer)
LOGGER.warning('flash message: %s' % flashtext)
flashbox = (
'<div data-alert class="alert-box radius">%s'
'<a href="#" class="close">×</a></div>' %
flashtext
)
flash_message = flashbox
else:
flash_message = ''
# first, get the session token
session_token = self.get_secure_cookie('coffee_session',
max_age_days=30)
ip_address = self.request.remote_ip
if 'User-Agent' in self.request.headers:
client_header = self.request.headers['User-Agent'] or 'none'
else:
client_header = 'none'
local_today = datetime.now(tz=utc).strftime('%Y-%m-%d %H:%M %Z')
todays_date = datetime.now(tz=utc).strftime('%A, %b %d %Y')
todays_utcdate = datetime.now(tz=utc).strftime('%Y-%m-%d')
todays_localdate = (
datetime.now(tz=timezone(self.server_tz)).strftime('%Y-%m-%d')
)
todays_utcdow = datetime.now(tz=utc).weekday()
todays_localdate_str = (
datetime.now(tz=timezone(self.server_tz)).strftime('%A, %b %d %Y')
)
user_name = 'anonuser@%s' % ip_address
new_user = True
# check if this session_token corresponds to an existing user
if session_token:
sessioninfo = webdb.session_check(session_token,
database=self.database)
if sessioninfo[0]:
user_name = sessioninfo[2]
LOGGER.info('found session for %s, continuing with it' %
user_name)
new_user = False
elif sessioninfo[-1] != 'database_error':
LOGGER.warning('unknown user, starting a new session for '
'%s, %s' % (ip_address, client_header))
sessionok, token = webdb.anon_session_initiate(
ip_address,
client_header,
database=self.database
)
if sessionok and token:
self.set_secure_cookie('coffee_session',
token,
httponly=True)
else:
LOGGER.error('could not set session cookie for %s, %s' %
(ip_address, client_header))
self.set_status(500)
message = ("There was a database error "
"trying to look up user credentials.")
LOGGER.error('database error while looking up session for '
'%s, %s' % (ip_address, client_header))
self.render("errorpage.html",
user_name=user_name,
local_today=local_today,
error_message=message,
flash_message=flash_message,
new_user=new_user)
else:
self.set_status(500)
message = ("There was a database error "
"trying to look up user credentials.")
LOGGER.error('database error while looking up session for '
'%s, %s' % (ip_address, client_header))
self.render("errorpage.html",
user_name=user_name,
local_today=local_today,
error_message=message,
flash_message=flash_message,
new_user=new_user)
# there's no existing user session
else:
if ('crawler' not in client_header.lower() and
'bot' not in client_header.lower()):
LOGGER.warning('unknown user, starting a new session for '
'%s, %s' % (ip_address, client_header))
sessionok, token = webdb.anon_session_initiate(
ip_address,
client_header,
database=self.database
)
if sessionok and token:
self.set_secure_cookie('coffee_session',
token,
httponly=True)
else:
LOGGER.error('could not set session cookie for %s, %s' %
(ip_address, client_header))
self.set_status(500)
message = ("There was a database error "
"trying to look up user credentials.")
LOGGER.error('database error while looking up session for '
'%s, %s' % (ip_address, client_header))
self.render("errorpage.html",
user_name=user_name,
local_today=local_today,
error_message=message,
flash_message=flash_message,
new_user=new_user)
############################
## SERVE THE PAGE REQUEST ##
############################
# check if we're in voting time-limits
timenow = datetime.now(tz=utc).timetz()
# if we are within the time limits, then show the voting page
if (self.voting_start < timenow < self.voting_end):
# get the articles for today
(local_articles, voted_articles,
other_articles, reserved_articles) = (
arxivdb.get_articles_for_voting(database=self.database)
)
# if today's papers aren't ready yet, redirect to the papers display
if not local_articles and not voted_articles and not other_articles:
LOGGER.warning('no papers for today yet, '
'redirecting to previous day papers')
(latestdate, local_articles,
voted_articles, other_articles, reserved_articles) = (
arxivdb.get_articles_for_listing(
database=self.database
)
)
todays_date = datetime.strptime(
latestdate,
'%Y-%m-%d'
).strftime('%A, %b %d %Y')
# don't show a message on the weekend when no papers are loaded
if todays_utcdow in (5,6):
flash_message = ""
else:
flash_message = (
"<div data-alert class=\"alert-box radius\">"
"Papers for today haven't been imported yet. "
"Here are the most recent papers. "
"Please wait a few minutes and try again."
"<a href=\"#\" class=\"close\">×</a></div>"
)
# preprocess the local papers to highlight local author names
if len(local_articles) > 0:
for lind in range(len(local_articles)):
author_list = local_articles[lind][4]
author_list = author_list.split(': ')[-1].split(',')
local_indices = local_articles[lind][-2]
if local_indices and len(local_indices) > 0:
local_indices = [
int(x) for x in local_indices.split(',')
]
for li in local_indices:
author_list[li] = '<strong>%s</strong>' % (
author_list[li]
)
# update this article's local authors
local_articles[lind][4] = ', '.join(author_list)
# show the listing page
self.render("listing.html",
user_name=user_name,
local_today=local_today,
todays_date=todays_date,
local_articles=local_articles,
voted_articles=voted_articles,
other_articles=other_articles,
reserved_articles=reserved_articles,
flash_message=flash_message,
reserve_interval_days=self.reserve_interval,
new_user=new_user)
# if today's papers are ready, show them and ask for votes
else:
# get this user's votes
user_articles = arxivdb.get_user_votes(todays_utcdate,
user_name,
database=self.database)
user_reserved = arxivdb.get_user_reservations(
todays_utcdate,
user_name,
database=self.database
)
LOGGER.info('user has votes on: %s, has reservations on: %s'
% (user_articles, user_reserved))
# preprocess the local papers to highlight local author names
if len(local_articles) > 0:
for lind in range(len(local_articles)):
author_list = local_articles[lind][4]
author_list = author_list.split(': ')[-1].split(',')
local_indices = local_articles[lind][-2]
if local_indices and len(local_indices) > 0:
local_indices = [
int(x) for x in local_indices.split(',')
]
for li in local_indices:
author_list[li] = '<strong>%s</strong>' % (
author_list[li]
)
# update this article's local authors
local_articles[lind][4] = ', '.join(author_list)
# show the voting page
self.render("voting.html",
user_name=user_name,
local_today=local_today,
todays_date=todays_date,
local_articles=local_articles,
voted_articles=voted_articles,
other_articles=other_articles,
reserved_articles=reserved_articles,
flash_message=flash_message,
new_user=new_user,
reserve_interval_days=self.reserve_interval,
user_articles=user_articles,
user_reserved=user_reserved)
# otherwise, show the article list
else:
# get the articles for today
(latestdate, local_articles,
voted_articles, other_articles, reserved_articles) = (
arxivdb.get_articles_for_listing(utcdate=todays_utcdate,
database=self.database)
)
# if today's papers aren't ready yet, show latest papers
if not local_articles and not voted_articles and not other_articles:
(latestdate, local_articles,
voted_articles, other_articles, reserved_articles) = (
arxivdb.get_articles_for_listing(
database=self.database
)
)
todays_date = datetime.strptime(
latestdate,
'%Y-%m-%d'
).strftime('%A, %b %d %Y')
# don't show a message on the weekend when no papers are loaded
if todays_utcdow in (5,6):
flash_message = ""
else:
flash_message = (
"<div data-alert class=\"alert-box radius\">"
"Papers for today haven't been imported yet. "
"Here are the most recent papers. "
"Please wait a few minutes and try again."
"<a href=\"#\" class=\"close\">×</a></div>"
)
# preprocess the local papers to highlight local author names
if len(local_articles) > 0:
for lind in range(len(local_articles)):
author_list = local_articles[lind][4]
author_list = author_list.split(': ')[-1].split(',')
local_indices = local_articles[lind][-2]
if local_indices and len(local_indices) > 0:
local_indices = [
int(x) for x in local_indices.split(',')
]
for li in local_indices:
author_list[li] = '<strong>%s</strong>' % (
author_list[li]
)
# update this article's local authors
local_articles[lind][4] = ', '.join(author_list)
# show the listing page
self.render("listing.html",
user_name=user_name,
local_today=local_today,
todays_date=todays_date,
local_articles=local_articles,
voted_articles=voted_articles,
other_articles=other_articles,
reserved_articles=reserved_articles,
reserve_interval_days=self.reserve_interval,
flash_message=flash_message,
new_user=new_user)
class ReservationHandler(tornado.web.RequestHandler):
'''
This handles all requests for the voting page.
'''
def initialize(self,
database,
voting_start,
voting_end,
debug,
signer,
geofence,
countries,
regions):
'''
Sets up the database.
'''
self.database = database
self.voting_start = voting_start
self.voting_end = voting_end
self.debug = debug
self.signer = signer
self.geofence = geofence[0]
self.ipaddrs = geofence[1]
self.editips = geofence[2]
self.countries = countries
self.regions = regions
def post(self):
'''
This handles a POST request for a paper reservation.
'''
arxivid = self.get_argument('arxivid', None)
reservetype = self.get_argument('reservetype', None)
session_token = self.get_secure_cookie('coffee_session',
max_age_days=30)
sessioninfo = webdb.session_check(session_token,
database=self.database)
user_name = sessioninfo[2]
todays_utcdate = datetime.now(tz=utc).strftime('%Y-%m-%d')
user_ip = self.request.remote_ip
# if we're asked to geofence, then do so
# (unless the request came from INSIDE the building)
# FIXME: add exceptions for private network IPv4 addresses
geolocked = False
# check the network as well
try:
userip_addrobj = ipaddress.ip_address(user_ip.decode())
trustedip = any([(userip_addrobj in x) for x in self.ipaddrs])
except:
trustedip = False
if self.geofence and user_ip != '127.0.0.1':
try:
geoip = self.geofence.city(user_ip)
if (geoip.country.iso_code in self.countries and
geoip.subdivisions.most_specific.iso_code
in self.regions):
LOGGER.info('geofencing ok: '
'reservation request '
'from inside allowed regions')
else:
LOGGER.warning(
'geofencing activated: '
'vote request from %s '
'is outside allowed regions' %
('%s-%s' % (
geoip.country.iso_code,
geoip.subdivisions.most_specific.iso_code
))
)
message = ("Sorry, you're trying to vote "
"from an IP address that is "
"blocked from voting.")
jsondict = {'status':'failed',
'message':message,
'results':None}
geolocked = True
self.write(jsondict)
self.finish()
# fail deadly
except Exception as e:
LOGGER.exception('geofencing failed for IP %s, '
'blocking request.' % user_ip)
message = ("Sorry, you're trying to vote "
"from an IP address that is "
"blocked from voting.")
jsondict = {'status':'failed',
'message':message,
'results':None}
geolocked = True
self.write(jsondict)
self.finish()
#############################
## PROCESS THE RESERVATION ##
#############################
# check if we're in voting time-limits
timenow = datetime.now(tz=utc).timetz()
# if we are within the time limits, then allow the voting POST request
if (self.voting_start < timenow < self.voting_end):
in_votetime = True
else:
in_votetime = False
# if all things are satisfied, then process the reserve request
if (arxivid and
reservetype and
sessioninfo[0] and
((not geolocked) or trustedip) and
in_votetime):
arxivid = xhtml_escape(arxivid)
reservetype = xhtml_escape(reservetype)
LOGGER.info('user: %s, reserving: %s, on: %s' % (user_name,
reservetype,
arxivid))
if 'arXiv:' not in arxivid or reservetype not in ('reserve',
'release'):
message = ("Your paper reservation request "
"used invalid arguments "
"and has been discarded.")
jsondict = {'status':'failed',
'message':message,
'results':None}
self.write(jsondict)
self.finish()
else:
# first, check how many reservations this user has
user_reservations = arxivdb.get_user_reservations(
todays_utcdate,
user_name,
database=self.database
)
# make sure it's less than 5 or we're not adding another
# reservation
if len(user_reservations) < 5 or reservetype != 'reserve':
reserve_outcome = arxivdb.record_reservation(
arxivid,
user_name,
reservetype,
database=self.database
)
if reserve_outcome is False or None:
message = ("That article doesn't exist, "
"and your reservation "
"has been discarded.")
jsondict = {'status':'failed',
'message':message,
'results':None}
self.write(jsondict)
self.finish()
else:
if (reserve_outcome[0] == 1 and
reserve_outcome[1] == user_name):
message = ("Reservation successfully recorded for %s"
% arxivid)
jsondict = {'status':'success',
'message':message,
'results':{'reserved':reserve_outcome[0]}}
elif (reserve_outcome[0] == 1 and
reserve_outcome[1] != user_name):
message = ("Someeone else already reserved that paper!")
jsondict = {'status':'failed',
'message':message,
'results':{'reserved':reserve_outcome[0]}}
elif (reserve_outcome[0] == 0):
message = ("Release successfully recorded for %s"
% arxivid)
jsondict = {'status':'success',
'message':message,
'results':{'reserved':reserve_outcome[0]}}
else:
message = ("That article doesn't exist, "
"or your reservation "
"has been discarded because of a problem.")
jsondict = {'status':'failed',
'message':message,
'results':None}
self.write(jsondict)
self.finish()
else:
message = ("You've reserved 5 articles already.")
jsondict = {'status':'failed',
'message':message,
'results':None}
self.write(jsondict)
self.finish()
elif ((not geolocked) or trustedip):
message = ("Your reservation request could not be authorized"
" and has been discarded.")
jsondict = {'status':'failed',
'message':message,
'results':None}
self.write(jsondict)
self.finish()
else:
message = ("Your reservation request could not be authorized"
" and has been discarded.")
jsondict = {'status':'failed',
'message':message,
'results':None}
self.write(jsondict)
self.finish()
class VotingHandler(tornado.web.RequestHandler):
'''
This handles all requests for the voting page.
'''
def initialize(self,
database,
voting_start,
voting_end,
debug,
signer,
geofence,
countries,
regions):
'''
Sets up the database.
'''
self.database = database
self.voting_start = voting_start
self.voting_end = voting_end
self.debug = debug
self.signer = signer
self.geofence = geofence[0]
self.ipaddrs = geofence[1]
self.editips = geofence[2]
self.countries = countries
self.regions = regions
def post(self):
'''This handles POST requests for vote submissions.
takes the following arguments:
arxivid: article to vote for
votetype: up / down
checks if an existing session is in play. if not, flashes a message
saying 'no dice' in a flash message
- checks if the user has more than five votes used for the utcdate of
the requested arxivid
- if they do, then deny vote
- if they don't, allow vote
if vote is allowed:
- changes the nvote column for arxivid
- adds the current user to the voters column
- returns the nvotes for the arxivid along with
success/failure
if vote is not allowed:
- sends back a 401 + error message, which the frontend JS turns into a
flash message
the frontend JS then:
- updates the vote total for this arxivid
- handles flash messages
- updates the vote button status
'''
arxivid = self.get_argument('arxivid', None)
votetype = self.get_argument('votetype', None)
session_token = self.get_secure_cookie('coffee_session',
max_age_days=30)
sessioninfo = webdb.session_check(session_token,
database=self.database)
user_name = sessioninfo[2]
todays_utcdate = datetime.now(tz=utc).strftime('%Y-%m-%d')
user_ip = self.request.remote_ip
# TESTING
# user_ip = '131.111.184.18' # Cambridge UK
# user_ip = '71.168.183.215' # FIOS NJ
# user_ip = '70.192.88.245' # VZW NJ
# user_ip = '70.42.157.5' # VZW NY
# user_ip = '69.141.255.240' # Comcast PA
# user_ip = '128.112.25.36' # Princeton Univ, NJ
# if we're asked to geofence, then do so
# (unless the request came from INSIDE the building)
# FIXME: add exceptions for private network IPv4 addresses
geolocked = False
# check the network as well
try:
userip_addrobj = ipaddress.ip_address(user_ip.decode())
trustedip = any([(userip_addrobj in x) for x in self.ipaddrs])
except:
trustedip = False
if self.geofence and user_ip != '127.0.0.1':
try:
# check the geoip location
geoip = self.geofence.city(user_ip)
if (geoip.country.iso_code in self.countries and
geoip.subdivisions.most_specific.iso_code
in self.regions):
LOGGER.info('geofencing ok: '
'vote request from inside allowed regions')
else:
LOGGER.warning(
'geofencing activated: '
'vote request from %s '
'is outside allowed regions' %
('%s-%s' % (
geoip.country.iso_code,
geoip.subdivisions.most_specific.iso_code
))
)
message = ("Sorry, you're trying to vote "
"from an IP address that is "
"blocked from voting.")
jsondict = {'status':'failed',
'message':message,
'results':None}
geolocked = True
self.write(jsondict)
self.finish()
# fail deadly
except Exception as e:
LOGGER.exception('geofencing failed for IP %s, '
'blocking request.' % user_ip)
message = ("Sorry, you're trying to vote "
"from an IP address that is "
"blocked from voting.")
jsondict = {'status':'failed',
'message':message,
'results':None}
geolocked = True
self.write(jsondict)
self.finish()
# check if we're in voting time-limits
timenow = datetime.now(tz=utc).timetz()
# if we are within the time limits, then allow the voting POST request
if (self.voting_start < timenow < self.voting_end):
in_votetime = True
else:
in_votetime = False
# if all things are satisfied, then process the vote request
if (arxivid and
votetype and
sessioninfo[0] and
(not geolocked or trustedip) and
in_votetime):
arxivid = xhtml_escape(arxivid)
votetype = xhtml_escape(votetype)
LOGGER.info('user: %s, voting: %s, on: %s' % (user_name,
votetype,
arxivid))
if 'arXiv:' not in arxivid or votetype not in ('up','down'):
message = ("Your vote request used invalid arguments"
" and has been discarded.")
jsondict = {'status':'failed',
'message':message,
'results':None}
self.write(jsondict)
self.finish()
else:
# first, check how many votes this user has
user_votes = arxivdb.get_user_votes(todays_utcdate,
user_name,
database=self.database)
# make sure it's less than 5 or the votetype isn't up
if len(user_votes) < 5 or votetype != 'up':
vote_outcome = arxivdb.record_vote(arxivid,
user_name,
votetype,
database=self.database)
if vote_outcome is False:
message = ("That article doesn't exist, and your vote "
"has been discarded.")
jsondict = {'status':'failed',
'message':message,
'results':None}
self.write(jsondict)
self.finish()
else:
message = ("Vote successfully recorded for %s" % arxivid)
jsondict = {'status':'success',
'message':message,
'results':{'nvotes':vote_outcome}}
self.write(jsondict)
self.finish()
else:
message = ("You've voted on 5 articles already.")
jsondict = {'status':'failed',
'message':message,
'results':None}
self.write(jsondict)
self.finish()
elif (not geolocked or trustedip):
message = ("Your vote request could not be authorized"
" and has been discarded.")
jsondict = {'status':'failed',
'message':message,
'results':None}
self.write(jsondict)
self.finish()
else:
message = ("Your reservation request could not be authorized"
" and has been discarded.")
jsondict = {'status':'failed',
'message':message,
'results':None}
self.write(jsondict)
self.finish()
class EditHandler(tornado.web.RequestHandler):
'''This handles all requests for the editing function.
This allows users in the trustedip range to edit the arxiv listing for the
current day.
The allowable edits are:
- paper is local author
- paper is not local author
'''
def initialize(self,
database,
voting_start,
voting_end,
debug,
signer,
geofence,
countries,
regions):
'''
Sets up the database.
'''
self.database = database
self.voting_start = voting_start
self.voting_end = voting_end
self.debug = debug
self.signer = signer
self.geofence = geofence[0]
self.ipaddrs = geofence[1]
self.editips = geofence[2]
self.countries = countries
self.regions = regions
def post(self):
'''
This handles a POST request for a paper reservation.
'''
arxivid = self.get_argument('arxivid', None)
edittype = self.get_argument('edittype', None)
session_token = self.get_secure_cookie('coffee_session',
max_age_days=30)
sessioninfo = webdb.session_check(session_token,
database=self.database)
user_name = sessioninfo[2]
todays_utcdate = datetime.now(tz=utc).strftime('%Y-%m-%d')
user_ip = self.request.remote_ip
# check the network
try:
userip_addrobj = ipaddress.ip_address(user_ip.decode())
trustedip = any([(userip_addrobj in x) for x in self.editips])
except:
trustedip = False
######################
## PROCESS THE EDIT ##
######################
# check if we're in voting time-limits
timenow = datetime.now(tz=utc).timetz()
# if we are within the time limits, then allow the voting POST request
if (self.voting_start < timenow < self.voting_end):
in_votetime = True
else:
in_votetime = False
# editing only checks its cidr and if we're in vote mode
if (arxivid and edittype and sessioninfo[0] and
trustedip and in_votetime):
arxivid = xhtml_escape(arxivid)
edittype = xhtml_escape(edittype)
LOGGER.info('user: %s, reserving: %s, on: %s' % (user_name,
reservetype,
arxivid))
if 'arXiv:' not in arxivid or editttype not in ('local',
'notlocal'):
message = ("Your paper reservation request "
"used invalid arguments "
"and has been discarded.")
jsondict = {'status':'failed',
'message':message,
'results':None}
self.write(jsondict)
self.finish()
else:
# process the edit
pass
# if we're not allowed to edit, discard the request
else:
message = ("Your edit request could not be authorized "
"(probably because the voting window is over)"
"and has been discarded.")
jsondict = {'status':'failed',
'message':message,
'results':None}
self.write(jsondict)
self.finish()
class AboutHandler(tornado.web.RequestHandler):
'''
This handles all requests for /astroph-coffee/about.
'''
def initialize(self, database):
'''
This sets up the database.
'''
self.database = database
def get(self):
'''
This handles GET requests.
'''
# handle a redirect with an attached flash message
flash_message = self.get_argument('f', None)
if flash_message:
flashtext = msgdecode(flash_message)
LOGGER.warning('flash message: %s' % flashtext)
flashbox = (
'<div data-alert class="alert-box radius">%s'
'<a href="#" class="close">×</a></div>' %
flashtext
)
flash_message = flashbox
else:
flash_message = ''
local_today = datetime.now(tz=utc).strftime('%Y-%m-%d %H:%M %Z')
# first, get the session token
session_token = self.get_secure_cookie('coffee_session',
max_age_days=30)
ip_address = self.request.remote_ip
if 'User-Agent' in self.request.headers:
client_header = self.request.headers['User-Agent'] or 'none'
else:
client_header = 'none'
user_name = 'anonuser@%s' % ip_address
new_user = True
# check if this session_token corresponds to an existing user
if session_token:
sessioninfo = webdb.session_check(session_token,
database=self.database)
if sessioninfo[0]:
user_name = sessioninfo[2]
LOGGER.info('found session for %s, continuing with it' %
user_name)
new_user = False
elif sessioninfo[-1] != 'database_error':
LOGGER.warning('unknown user, starting a new session for '
'%s, %s' % (ip_address, client_header))
sessionok, token = webdb.anon_session_initiate(
ip_address,
client_header,
database=self.database
)
if sessionok and token:
self.set_secure_cookie('coffee_session',
token,
httponly=True)
else:
LOGGER.error('could not set session cookie for %s, %s' %
(ip_address, client_header))
self.set_status(500)
message = ("There was a database error "
"trying to look up user credentials.")
LOGGER.error('database error while looking up session for '
'%s, %s' % (ip_address, client_header))
self.render("errorpage.html",
user_name=user_name,
local_today=local_today,
error_message=message,
flash_message=flash_message,
new_user=new_user)
else:
self.set_status(500)
message = ("There was a database error "
"trying to look up user credentials.")
LOGGER.error('database error while looking up session for '
'%s, %s' % (ip_address, client_header))
self.render("errorpage.html",
user_name=user_name,
error_message=message,
local_today=local_today,
flash_message=flash_message,
new_user=new_user)
else:
if ('crawler' not in client_header.lower() and
'bot' not in client_header.lower()):
LOGGER.warning('unknown user, starting a new session for '
'%s, %s' % (ip_address, client_header))
sessionok, token = webdb.anon_session_initiate(
ip_address,
client_header,
database=self.database
)
if sessionok and token:
self.set_secure_cookie('coffee_session',
token,
httponly=True)
else:
LOGGER.error('could not set session cookie for %s, %s' %
(ip_address, client_header))
self.set_status(500)
message = ("There was a database error "
"trying to look up user credentials.")
LOGGER.error('database error while looking up session for '
'%s, %s' % (ip_address, client_header))
self.render("errorpage.html",
user_name=user_name,
local_today=local_today,
error_message=message,
flash_message=flash_message,
new_user=new_user)
#########################
# show the contact page #
#########################
self.render("about.html",
local_today=local_today,
user_name=user_name,
flash_message=flash_message,
new_user=new_user)
class ArchiveHandler(tornado.web.RequestHandler):
'''
This handles all paper archive requests.
url: /astroph-coffee/archive/YYYYMMDD
'''
def initialize(self,
database,
reserve_interval,
signer):
'''
Sets up the database.
'''
self.database = database
self.reserve_interval = reserve_interval
self.signer = signer
def get(self, archivedate):
'''
This handles GET requests.
'''
# handle a redirect with an attached flash message
flash_message = self.get_argument('f', None)
if flash_message:
flashtext = msgdecode(flash_message, self.signer)
LOGGER.warning('flash message: %s' % flashtext)
flashbox = (
'<div data-alert class="alert-box radius">%s'
'<a href="#" class="close">×</a></div>' %
flashtext
)
flash_message = flashbox
else:
flash_message = ''
local_today = datetime.now(tz=utc).strftime('%Y-%m-%d %H:%M %Z')
# first, get the session token
session_token = self.get_secure_cookie('coffee_session',
max_age_days=30)
ip_address = self.request.remote_ip
if 'User-Agent' in self.request.headers:
client_header = self.request.headers['User-Agent'] or 'none'
else:
client_header = 'none'
user_name = 'anonuser@%s' % ip_address
new_user = True
# check if this session_token corresponds to an existing user
if session_token:
sessioninfo = webdb.session_check(session_token,
database=self.database)
if sessioninfo[0]:
user_name = sessioninfo[2]
LOGGER.info('found session for %s, continuing with it' %
user_name)
new_user = False
elif sessioninfo[-1] != 'database_error':
LOGGER.warning('unknown user, starting a new session for '
'%s, %s' % (ip_address, client_header))
sessionok, token = webdb.anon_session_initiate(
ip_address,
client_header,
database=self.database
)
if sessionok and token:
self.set_secure_cookie('coffee_session',
token,
httponly=True)
else:
LOGGER.error('could not set session cookie for %s, %s' %
(ip_address, client_header))
self.set_status(500)
message = ("There was a database error "
"trying to look up user credentials.")
LOGGER.error('database error while looking up session for '
'%s, %s' % (ip_address, client_header))
self.render("errorpage.html",
user_name=user_name,
local_today=local_today,
error_message=message,
flash_message=flash_message,
new_user=new_user)
else:
self.set_status(500)
message = ("There was a database error "
"trying to look up user credentials.")
LOGGER.error('database error while looking up session for '
'%s, %s' % (ip_address, client_header))
self.render("errorpage.html",
user_name=user_name,
error_message=message,
local_today=local_today,
flash_message=flash_message,
new_user=new_user)
else:
if ('crawler' not in client_header.lower() and
'bot' not in client_header.lower()):
LOGGER.warning('unknown user, starting a new session for '
'%s, %s' % (ip_address, client_header))
sessionok, token = webdb.anon_session_initiate(
ip_address,
client_header,
database=self.database
)
if sessionok and token:
self.set_secure_cookie('coffee_session',
token,
httponly=True)
else:
LOGGER.error('could not set session cookie for %s, %s' %
(ip_address, client_header))
self.set_status(500)
message = ("There was a database error "
"trying to look up user credentials.")
LOGGER.error('database error while looking up session for '
'%s, %s' % (ip_address, client_header))
self.render("errorpage.html",
user_name=user_name,
local_today=local_today,
error_message=message,
flash_message=flash_message,
new_user=new_user)
##################################
# now handle the archive request #
##################################
if archivedate is not None:
archivedate = xhtml_escape(archivedate)
archivedate = re.match(ARCHIVEDATE_REGEX, archivedate)
if archivedate:
year, month, day = archivedate.groups()
listingdate = '%s-%s-%s' % (year, month, day)
# get the articles for today
(latestdate, local_articles,
voted_articles, other_articles, reserved_articles) = (
arxivdb.get_articles_for_listing(utcdate=listingdate,
database=self.database)
)
# if this date's papers aren't available, show the archive index
if (not local_articles and
not voted_articles and
not other_articles and
not reserved_articles):
flash_message = (
"<div data-alert class=\"alert-box radius\">"
"No papers for %s were found. "
"You've been redirected to the Astro-Coffee archive."
"<a href=\"#\" class=\"close\">×</a></div>"
) % listingdate
(archive_dates, archive_npapers,
archive_nlocal, archive_nvoted) = arxivdb.get_archive_index(
database=self.database
)
paper_archives = group_arxiv_dates(archive_dates,
archive_npapers,
archive_nlocal,
archive_nvoted)
self.render("archive.html",
user_name=user_name,
flash_message=flash_message,
new_user=new_user,
paper_archives=paper_archives,
local_today=local_today)
else:
# figure out the UTC date for this archive listing
archive_datestr = datetime(
hour=0,
minute=15,
second=0,
day=int(day),
month=int(month),
year=int(year),
tzinfo=utc
).strftime('%A, %b %d %Y')
# preprocess the local papers to highlight local author names
if len(local_articles) > 0:
for lind in range(len(local_articles)):
author_list = local_articles[lind][4]
author_list = author_list.split(': ')[-1].split(',')
local_indices = local_articles[lind][-2]
if local_indices and len(local_indices) > 0:
local_indices = [
int(x) for x in local_indices.split(',')
]
for li in local_indices:
author_list[li] = '<strong>%s</strong>' % (
author_list[li]
)
# update this article's local authors
local_articles[lind][4] = ', '.join(author_list)
# show the listing page
self.render("archivelisting.html",
user_name=user_name,
local_today=local_today,
todays_date=archive_datestr,
local_articles=local_articles,
voted_articles=voted_articles,
other_articles=other_articles,
reserved_articles=reserved_articles,
reserve_interval_days=self.reserve_interval,
flash_message=flash_message,
new_user=new_user)
else:
(archive_dates, archive_npapers,
archive_nlocal, archive_nvoted) = arxivdb.get_archive_index(
database=self.database
)
paper_archives = group_arxiv_dates(archive_dates,
archive_npapers,
archive_nlocal,
archive_nvoted)
self.render("archive.html",
user_name=user_name,
flash_message=flash_message,
new_user=new_user,
paper_archives=paper_archives,
local_today=local_today)
else:
(archive_dates, archive_npapers,
archive_nlocal, archive_nvoted) = arxivdb.get_archive_index(
database=self.database
)
paper_archives = group_arxiv_dates(archive_dates,
archive_npapers,
archive_nlocal,
archive_nvoted)
self.render("archive.html",
user_name=user_name,
flash_message=flash_message,
new_user=new_user,
paper_archives=paper_archives,
local_today=local_today)
class LocalListHandler(tornado.web.RequestHandler):
'''
This handles all requests for /astroph-coffee/local-authors.
'''
def initialize(self, database, admincontact, adminemail):
'''
This sets up the database.
'''
self.database = database
self.admincontact = admincontact
self.adminemail = adminemail
def get(self):
'''
This handles GET requests.
'''
# handle a redirect with an attached flash message
flash_message = self.get_argument('f', None)
if flash_message:
flashtext = msgdecode(flash_message)
LOGGER.warning('flash message: %s' % flashtext)
flashbox = (
'<div data-alert class="alert-box radius">%s'
'<a href="#" class="close">×</a></div>' %
flashtext
)
flash_message = flashbox
else:
flash_message = ''
local_today = datetime.now(tz=utc).strftime('%Y-%m-%d %H:%M %Z')
# first, get the session token
session_token = self.get_secure_cookie('coffee_session',
max_age_days=30)
ip_address = self.request.remote_ip
if 'User-Agent' in self.request.headers:
client_header = self.request.headers['User-Agent'] or 'none'
else:
client_header = 'none'
user_name = 'anonuser@%s' % ip_address
new_user = True
# check if this session_token corresponds to an existing user
if session_token:
sessioninfo = webdb.session_check(session_token,
database=self.database)
if sessioninfo[0]:
user_name = sessioninfo[2]
LOGGER.info('found session for %s, continuing with it' %
user_name)
new_user = False
elif sessioninfo[-1] != 'database_error':
LOGGER.warning('unknown user, starting a new session for '
'%s, %s' % (ip_address, client_header))
sessionok, token = webdb.anon_session_initiate(
ip_address,
client_header,
database=self.database
)
if sessionok and token:
self.set_secure_cookie('coffee_session',
token,
httponly=True)
else:
LOGGER.error('could not set session cookie for %s, %s' %
(ip_address, client_header))
self.set_status(500)
message = ("There was a database error "
"trying to look up user credentials.")
LOGGER.error('database error while looking up session for '
'%s, %s' % (ip_address, client_header))
self.render("errorpage.html",
user_name=user_name,
local_today=local_today,
error_message=message,
flash_message=flash_message,
new_user=new_user)
else:
self.set_status(500)
message = ("There was a database error "
"trying to look up user credentials.")
LOGGER.error('database error while looking up session for '
'%s, %s' % (ip_address, client_header))
self.render("errorpage.html",
user_name=user_name,
error_message=message,
local_today=local_today,
flash_message=flash_message,
new_user=new_user)
else:
if ('crawler' not in client_header.lower() and
'bot' not in client_header.lower()):
LOGGER.warning('unknown user, starting a new session for '
'%s, %s' % (ip_address, client_header))
sessionok, token = webdb.anon_session_initiate(
ip_address,
client_header,
database=self.database
)
if sessionok and token:
self.set_secure_cookie('coffee_session',
token,
httponly=True)
else:
LOGGER.error('could not set session cookie for %s, %s' %
(ip_address, client_header))
self.set_status(500)
message = ("There was a database error "
"trying to look up user credentials.")
LOGGER.error('database error while looking up session for '
'%s, %s' % (ip_address, client_header))
self.render("errorpage.html",
user_name=user_name,
local_today=local_today,
error_message=message,
flash_message=flash_message,
new_user=new_user)
###############################
# show the local authors page #
###############################
authorlist = webdb.get_local_authors(database=self.database)
if authorlist:
self.render("local-authors.html",
local_today=local_today,
user_name=user_name,
flash_message=flash_message,
new_user=new_user,
authorlist=authorlist,
admincontact=self.admincontact,
adminemail=self.adminemail)
else:
LOGGER.error('could not get the author list!')
message = ("There was a database error "
"trying to look up local authors. "
"Please "
"<a href=\"/astroph-coffee/about\">"
"let us know</a> about this problem!")
self.render("errorpage.html",
user_name=user_name,
local_today=local_today,
error_message=message,
flash_message=flash_message,
new_user=new_user)
class FTSHandler(tornado.web.RequestHandler):
'''
This handles all requests for searching.
GET returns a search page.
POST posts the AJAX request.
'''
def initialize(self,
database,
voting_start,
voting_end,
debug,
signer,
geofence,
countries,
regions):
'''
Sets up the database.
'''
self.database = database
self.voting_start = voting_start
self.voting_end = voting_end
self.debug = debug
self.signer = signer
self.geofence = geofence[0]
self.ipaddrs = geofence[1]
self.editips = geofence[2]
self.countries = countries
self.regions = regions
def get(self):
'''This handles GET requests for searching.
'''
# handle a redirect with an attached flash message
flash_message = self.get_argument('f', None)
if flash_message:
flashtext = msgdecode(flash_message)
LOGGER.warning('flash message: %s' % flashtext)
flashbox = (
'<div data-alert class="alert-box radius">%s'
'<a href="#" class="close">×</a></div>' %
flashtext
)
flash_message = flashbox
else:
flash_message = ''
local_today = datetime.now(tz=utc).strftime('%Y-%m-%d %H:%M %Z')
# first, get the session token
session_token = self.get_secure_cookie('coffee_session',
max_age_days=30)
ip_address = self.request.remote_ip
if 'User-Agent' in self.request.headers:
client_header = self.request.headers['User-Agent'] or 'none'
else:
client_header = 'none'
user_name = 'anonuser@%s' % ip_address
new_user = True
# check if this session_token corresponds to an existing user
if session_token:
sessioninfo = webdb.session_check(session_token,
database=self.database)
if sessioninfo[0]:
user_name = sessioninfo[2]
LOGGER.info('found session for %s, continuing with it' %
user_name)
new_user = False
elif sessioninfo[-1] != 'database_error':
LOGGER.warning('unknown user, starting a new session for '
'%s, %s' % (ip_address, client_header))
sessionok, token = webdb.anon_session_initiate(
ip_address,
client_header,
database=self.database
)
if sessionok and token:
self.set_secure_cookie('coffee_session',
token,
httponly=True)
else:
LOGGER.error('could not set session cookie for %s, %s' %
(ip_address, client_header))
self.set_status(500)
message = ("There was a database error "
"trying to look up user credentials.")
LOGGER.error('database error while looking up session for '
'%s, %s' % (ip_address, client_header))
self.render("errorpage.html",
user_name=user_name,
local_today=local_today,
error_message=message,
flash_message=flash_message,
new_user=new_user)
else:
self.set_status(500)
message = ("There was a database error "
"trying to look up user credentials.")
LOGGER.error('database error while looking up session for '
'%s, %s' % (ip_address, client_header))
self.render("errorpage.html",
user_name=user_name,
error_message=message,
local_today=local_today,
flash_message=flash_message,
new_user=new_user)
else:
if ('crawler' not in client_header.lower() and
'bot' not in client_header.lower()):
LOGGER.warning('unknown user, starting a new session for '
'%s, %s' % (ip_address, client_header))
sessionok, token = webdb.anon_session_initiate(
ip_address,
client_header,
database=self.database
)
if sessionok and token:
self.set_secure_cookie('coffee_session',
token,
httponly=True)
else:
LOGGER.error('could not set session cookie for %s, %s' %
(ip_address, client_header))
self.set_status(500)
message = ("There was a database error "
"trying to look up user credentials.")
LOGGER.error('database error while looking up session for '
'%s, %s' % (ip_address, client_header))
self.render("errorpage.html",
user_name=user_name,
local_today=local_today,
error_message=message,
flash_message=flash_message,
new_user=new_user)
#######################
## CONTENT RENDERING ##
#######################
self.render("search.html",
user_name=user_name,
local_today=local_today,
flash_message=flash_message,
search_page_title="Search the Astro-Coffee archive",
search_page_type="initial",
search_results=None,
search_result_info='',
search_nmatches=0,
new_user=new_user)
def post(self):
'''This handles POST requests for searching.
renders using the search.html template with search_page_type = 'results'
and passes search_results to it from a run of the
fulltextsearch.fts4_phrase_search_paginated function.
'''
# handle a redirect with an attached flash message
flash_message = self.get_argument('f', None)
if flash_message:
flashtext = msgdecode(flash_message)
LOGGER.warning('flash message: %s' % flashtext)
flashbox = (
'<div data-alert class="alert-box radius">%s'
'<a href="#" class="close">×</a></div>' %
flashtext
)
flash_message = flashbox
else:
flash_message = ''
local_today = datetime.now(tz=utc).strftime('%Y-%m-%d %H:%M %Z')
# first, get the session token
session_token = self.get_secure_cookie('coffee_session',
max_age_days=30)
ip_address = self.request.remote_ip
if 'User-Agent' in self.request.headers:
client_header = self.request.headers['User-Agent'] or 'none'
else:
client_header = 'none'
user_name = 'anonuser@%s' % ip_address
new_user = True
# check if this session_token corresponds to an existing user
if session_token:
sessioninfo = webdb.session_check(session_token,
database=self.database)
if sessioninfo[0]:
user_name = sessioninfo[2]
LOGGER.info('found session for %s, continuing with it' %
user_name)
new_user = False
elif sessioninfo[-1] != 'database_error':
LOGGER.warning('unknown user, starting a new session for '
'%s, %s' % (ip_address, client_header))
sessionok, token = webdb.anon_session_initiate(
ip_address,
client_header,
database=self.database
)
if sessionok and token:
self.set_secure_cookie('coffee_session',
token,
httponly=True)
else:
LOGGER.error('could not set session cookie for %s, %s' %
(ip_address, client_header))
self.set_status(500)
message = ("There was a database error "
"trying to look up user credentials.")
LOGGER.error('database error while looking up session for '
'%s, %s' % (ip_address, client_header))
self.render("errorpage.html",
user_name=user_name,
local_today=local_today,
error_message=message,
flash_message=flash_message,
new_user=new_user)
else:
self.set_status(500)
message = ("There was a database error "
"trying to look up user credentials.")
LOGGER.error('database error while looking up session for '
'%s, %s' % (ip_address, client_header))
self.render("errorpage.html",
user_name=user_name,
error_message=message,
local_today=local_today,
flash_message=flash_message,
new_user=new_user)
else:
if ('crawler' not in client_header.lower() and
'bot' not in client_header.lower()):
LOGGER.warning('unknown user, starting a new session for '
'%s, %s' % (ip_address, client_header))
sessionok, token = webdb.anon_session_initiate(
ip_address,
client_header,
database=self.database
)
if sessionok and token:
self.set_secure_cookie('coffee_session',
token,
httponly=True)
else:
LOGGER.error('could not set session cookie for %s, %s' %
(ip_address, client_header))
self.set_status(500)
message = ("There was a database error "
"trying to look up user credentials.")
LOGGER.error('database error while looking up session for '
'%s, %s' % (ip_address, client_header))
self.render("errorpage.html",
user_name=user_name,
local_today=local_today,
error_message=message,
flash_message=flash_message,
new_user=new_user)
#######################
## CONTENT RENDERING ##
#######################
# get the search query
searchquery = self.get_argument('searchquery',None)
if not searchquery or len(searchquery) == 0:
search_result_info = ('Sorry, we couldn\'t understand your '
'search query: <strong>%s</strong>' %
squeeze(xhtml_escape(searchquery)))
search_results = None
search_nmatches = 0
self.render("search.html",
user_name=user_name,
local_today=local_today,
flash_message=flash_message,
search_page_title="Search the Astro-Coffee archive",
search_page_type="results",
search_results=search_results,
search_nmatches=search_nmatches,
search_result_info=search_result_info,
new_user=new_user)
else:
searchquery = squeeze(xhtml_escape(searchquery))
if len(searchquery) > 0:
try:
# figure out the weights to apply
titleq_count = searchquery.count('title:')
abstractq_count = searchquery.count('abstract:')
authorq_count = searchquery.count('authors:')
author_weight = 1.0 + 1.0*authorq_count
abstract_weight = 3.0 + 1.0*abstractq_count
title_weight = 2.0 + 1.0*titleq_count
# turn any " characters into " so we can do exact
# phrase matching
searchquery = searchquery.replace('"','"')
ftsdict = fts.fts4_phrase_query_paginated(
searchquery,
['arxiv_id','day_serial','title',
'authors','comments','abstract',
'link','pdf','utcdate',
'nvotes',
'local_authors', 'local_author_indices'],
sortcol='relevance',
pagelimit=500,
database=self.database,
relevance_weights=[title_weight,
abstract_weight,
author_weight],
)
search_results = ftsdict['results']
all_nmatches = ftsdict['nmatches']
LOGGER.info('found %s objects matching %s' % (all_nmatches,
searchquery))
relevance_sticker = (
'<span data-tooltip aria-haspopup="true" '
'class="has-tip" title="Okapi BM25 relevance '
'weights: title = %.1f, '
'abstract = %.1f,'
' authors = %.1f, all others = 1.0">relevant</span>'
) % (title_weight, abstract_weight, author_weight)
if all_nmatches == 0:
search_nmatches = 0
search_result_info = (
'Sorry, <span class="nmatches">0</span> '
'matching items were found for: '
'<strong>%s</strong>' %
searchquery
)
elif all_nmatches == 1:
search_nmatches = 1
search_result_info = (
'Found only <span class="nmatches">1</span> '
'matching item for: '
'<strong>%s</strong>' % searchquery
)
elif 1 < all_nmatches < 501:
search_nmatches = len(ftsdict['results']['arxiv_id'])
search_result_info = (
'Found <span class="nmatches">%s</span> '
'matching items for: '
'<strong>%s</strong>' %
(search_nmatches,
searchquery)
)
else:
search_nmatches = len(ftsdict['results']['arxiv_id'])
search_result_info = (
'Found %s total matching '
'items for: <strong>%s</strong>. '
'Showing only the '
'top <span class="nmatches">%s</span> '
'%s '
'results below' %
(all_nmatches,
searchquery,
search_nmatches,
relevance_sticker))
self.render(
"search.html",
user_name=user_name,
local_today=local_today,
flash_message=flash_message,
search_page_title="Search the Astro-Coffee archive",
search_page_type="results",
search_results=search_results,
search_nmatches=search_nmatches,
search_result_info=search_result_info,
new_user=new_user
)
# if the query fails on the backend, return nothing.
except Exception as e:
LOGGER.exception("search backend failed on searchquery: %s"
% searchquery)
search_result_info = ('Sorry, we couldn\'t understand your '
'search query: <strong>%s</strong>' %
searchquery)
search_results = None
search_nmatches = 0
self.render("search.html",
user_name=user_name,
local_today=local_today,
flash_message=flash_message,
search_page_title="Search the Astro-Coffee archive",
search_page_type="results",
search_results=search_results,
search_nmatches=search_nmatches,
search_result_info=search_result_info,
new_user=new_user)
# this is if we don't understand the query
else:
search_result_info = ('Sorry, we couldn\'t understand your '
'search query: <strong>%s</strong>.' %
searchquery)
search_results = None
search_nmatches = 0
self.render("search.html",
user_name=user_name,
local_today=local_today,
flash_message=flash_message,
search_page_title="Search the Astro-Coffee archive",
search_page_type="results",
search_results=search_results,
search_nmatches=search_nmatches,
search_result_info=search_result_info,
new_user=new_user)
| waqasbhatti/astroph-coffee | src/coffeehandlers.py | Python | mit | 91,422 | 0.002439 |
from . import error
from . import protocol
from . import transport
from urllib import parse as urlparse
def command(func):
def inner(self, *args, **kwargs):
if hasattr(self, "session"):
session = self.session
else:
session = self
if session.session_id is None:
session.start()
return func(self, *args, **kwargs)
inner.__name__ = func.__name__
inner.__doc__ = func.__doc__
return inner
class Timeouts(object):
def __init__(self, session):
self.session = session
def _get(self, key=None):
timeouts = self.session.send_session_command("GET", "timeouts")
if key is not None:
return timeouts[key]
return timeouts
def _set(self, key, secs):
body = {key: secs * 1000}
self.session.send_session_command("POST", "timeouts", body)
return None
@property
def script(self):
return self._get("script")
@script.setter
def script(self, secs):
return self._set("script", secs)
@property
def page_load(self):
return self._get("pageLoad")
@page_load.setter
def page_load(self, secs):
return self._set("pageLoad", secs)
@property
def implicit(self):
return self._get("implicit")
@implicit.setter
def implicit(self, secs):
return self._set("implicit", secs)
def __str__(self):
name = "%s.%s" % (self.__module__, self.__class__.__name__)
return "<%s script=%d, load=%d, implicit=%d>" % \
(name, self.script, self.page_load, self.implicit)
class ActionSequence(object):
"""API for creating and performing action sequences.
Each action method adds one or more actions to a queue. When perform()
is called, the queued actions fire in order.
May be chained together as in::
ActionSequence(session, "key", id) \
.key_down("a") \
.key_up("a") \
.perform()
"""
def __init__(self, session, action_type, input_id, pointer_params=None):
"""Represents a sequence of actions of one type for one input source.
:param session: WebDriver session.
:param action_type: Action type; may be "none", "key", or "pointer".
:param input_id: ID of input source.
:param pointer_params: Optional dictionary of pointer parameters.
"""
self.session = session
self._id = input_id
self._type = action_type
self._actions = []
self._pointer_params = pointer_params
@property
def dict(self):
d = {
"type": self._type,
"id": self._id,
"actions": self._actions,
}
if self._pointer_params is not None:
d["parameters"] = self._pointer_params
return d
@command
def perform(self):
"""Perform all queued actions."""
self.session.actions.perform([self.dict])
def _key_action(self, subtype, value):
self._actions.append({"type": subtype, "value": value})
def _pointer_action(self, subtype, button=None, x=None, y=None, duration=None, origin=None, width=None,
height=None, pressure=None, tangential_pressure=None, tilt_x=None,
tilt_y=None, twist=None, altitude_angle=None, azimuth_angle=None):
action = {
"type": subtype
}
if button is not None:
action["button"] = button
if x is not None:
action["x"] = x
if y is not None:
action["y"] = y
if duration is not None:
action["duration"] = duration
if origin is not None:
action["origin"] = origin
if width is not None:
action["width"] = width
if height is not None:
action["height"] = height
if pressure is not None:
action["pressure"] = pressure
if tangential_pressure is not None:
action["tangentialPressure"] = tangential_pressure
if tilt_x is not None:
action["tiltX"] = tilt_x
if tilt_y is not None:
action["tiltY"] = tilt_y
if twist is not None:
action["twist"] = twist
if altitude_angle is not None:
action["altitudeAngle"] = altitude_angle
if azimuth_angle is not None:
action["azimuthAngle"] = azimuth_angle
self._actions.append(action)
def pause(self, duration):
self._actions.append({"type": "pause", "duration": duration})
return self
def pointer_move(self, x, y, duration=None, origin=None, width=None, height=None,
pressure=None, tangential_pressure=None, tilt_x=None, tilt_y=None,
twist=None, altitude_angle=None, azimuth_angle=None):
"""Queue a pointerMove action.
:param x: Destination x-axis coordinate of pointer in CSS pixels.
:param y: Destination y-axis coordinate of pointer in CSS pixels.
:param duration: Number of milliseconds over which to distribute the
move. If None, remote end defaults to 0.
:param origin: Origin of coordinates, either "viewport", "pointer" or
an Element. If None, remote end defaults to "viewport".
"""
self._pointer_action("pointerMove", x=x, y=y, duration=duration, origin=origin,
width=width, height=height, pressure=pressure,
tangential_pressure=tangential_pressure, tilt_x=tilt_x, tilt_y=tilt_y,
twist=twist, altitude_angle=altitude_angle, azimuth_angle=azimuth_angle)
return self
def pointer_up(self, button=0):
"""Queue a pointerUp action for `button`.
:param button: Pointer button to perform action with.
Default: 0, which represents main device button.
"""
self._pointer_action("pointerUp", button=button)
return self
def pointer_down(self, button=0, width=None, height=None, pressure=None,
tangential_pressure=None, tilt_x=None, tilt_y=None,
twist=None, altitude_angle=None, azimuth_angle=None):
"""Queue a pointerDown action for `button`.
:param button: Pointer button to perform action with.
Default: 0, which represents main device button.
"""
self._pointer_action("pointerDown", button=button, width=width, height=height,
pressure=pressure, tangential_pressure=tangential_pressure,
tilt_x=tilt_x, tilt_y=tilt_y, twist=twist, altitude_angle=altitude_angle,
azimuth_angle=azimuth_angle)
return self
def click(self, element=None, button=0):
"""Queue a click with the specified button.
If an element is given, move the pointer to that element first,
otherwise click current pointer coordinates.
:param element: Optional element to click.
:param button: Integer representing pointer button to perform action
with. Default: 0, which represents main device button.
"""
if element:
self.pointer_move(0, 0, origin=element)
return self.pointer_down(button).pointer_up(button)
def key_up(self, value):
"""Queue a keyUp action for `value`.
:param value: Character to perform key action with.
"""
self._key_action("keyUp", value)
return self
def key_down(self, value):
"""Queue a keyDown action for `value`.
:param value: Character to perform key action with.
"""
self._key_action("keyDown", value)
return self
def send_keys(self, keys):
"""Queue a keyDown and keyUp action for each character in `keys`.
:param keys: String of keys to perform key actions with.
"""
for c in keys:
self.key_down(c)
self.key_up(c)
return self
def scroll(self, x, y, delta_x, delta_y, duration=None, origin=None):
"""Queue a scroll action.
:param x: Destination x-axis coordinate of pointer in CSS pixels.
:param y: Destination y-axis coordinate of pointer in CSS pixels.
:param delta_x: scroll delta on x-axis in CSS pixels.
:param delta_y: scroll delta on y-axis in CSS pixels.
:param duration: Number of milliseconds over which to distribute the
scroll. If None, remote end defaults to 0.
:param origin: Origin of coordinates, either "viewport" or an Element.
If None, remote end defaults to "viewport".
"""
action = {
"type": "scroll",
"x": x,
"y": y,
"deltaX": delta_x,
"deltaY": delta_y
}
if duration is not None:
action["duration"] = duration
if origin is not None:
action["origin"] = origin
self._actions.append(action)
return self
class Actions(object):
def __init__(self, session):
self.session = session
@command
def perform(self, actions=None):
"""Performs actions by tick from each action sequence in `actions`.
:param actions: List of input source action sequences. A single action
sequence may be created with the help of
``ActionSequence.dict``.
"""
body = {"actions": [] if actions is None else actions}
actions = self.session.send_session_command("POST", "actions", body)
return actions
@command
def release(self):
return self.session.send_session_command("DELETE", "actions")
def sequence(self, *args, **kwargs):
"""Return an empty ActionSequence of the designated type.
See ActionSequence for parameter list.
"""
return ActionSequence(self.session, *args, **kwargs)
class Window(object):
identifier = "window-fcc6-11e5-b4f8-330a88ab9d7f"
def __init__(self, session):
self.session = session
@command
def close(self):
handles = self.session.send_session_command("DELETE", "window")
if handles is not None and len(handles) == 0:
# With no more open top-level browsing contexts, the session is closed.
self.session.session_id = None
return handles
@property
@command
def rect(self):
return self.session.send_session_command("GET", "window/rect")
@property
@command
def size(self):
"""Gets the window size as a tuple of `(width, height)`."""
rect = self.rect
return (rect["width"], rect["height"])
@size.setter
@command
def size(self, new_size):
"""Set window size by passing a tuple of `(width, height)`."""
width, height = new_size
body = {"width": width, "height": height}
self.session.send_session_command("POST", "window/rect", body)
@property
@command
def position(self):
"""Gets the window position as a tuple of `(x, y)`."""
rect = self.rect
return (rect["x"], rect["y"])
@position.setter
@command
def position(self, new_position):
"""Set window position by passing a tuple of `(x, y)`."""
x, y = new_position
body = {"x": x, "y": y}
self.session.send_session_command("POST", "window/rect", body)
@command
def maximize(self):
return self.session.send_session_command("POST", "window/maximize")
@command
def minimize(self):
return self.session.send_session_command("POST", "window/minimize")
@command
def fullscreen(self):
return self.session.send_session_command("POST", "window/fullscreen")
@classmethod
def from_json(cls, json, session):
uuid = json[Window.identifier]
return cls(uuid, session)
class Frame(object):
identifier = "frame-075b-4da1-b6ba-e579c2d3230a"
def __init__(self, session):
self.session = session
@classmethod
def from_json(cls, json, session):
uuid = json[Frame.identifier]
return cls(uuid, session)
class ShadowRoot(object):
identifier = "shadow-075b-4da1-b6ba-e579c2d3230a"
def __init__(self, session, id):
"""
Construct a new shadow root representation.
:param id: Shadow root UUID which must be unique across
all browsing contexts.
:param session: Current ``webdriver.Session``.
"""
self.id = id
self.session = session
@classmethod
def from_json(cls, json, session):
uuid = json[ShadowRoot.identifier]
return cls(uuid, session)
def send_shadow_command(self, method, uri, body=None):
url = "shadow/{}/{}".format(self.id, uri)
return self.session.send_session_command(method, url, body)
@command
def find_element(self, strategy, selector):
body = {"using": strategy,
"value": selector}
return self.send_shadow_command("POST", "element", body)
@command
def find_elements(self, strategy, selector):
body = {"using": strategy,
"value": selector}
return self.send_shadow_command("POST", "elements", body)
class Find(object):
def __init__(self, session):
self.session = session
@command
def css(self, element_selector, all=True):
elements = self._find_element("css selector", element_selector, all)
return elements
def _find_element(self, strategy, selector, all):
route = "elements" if all else "element"
body = {"using": strategy,
"value": selector}
return self.session.send_session_command("POST", route, body)
class Cookies(object):
def __init__(self, session):
self.session = session
def __getitem__(self, name):
self.session.send_session_command("GET", "cookie/%s" % name, {})
def __setitem__(self, name, value):
cookie = {"name": name,
"value": None}
if isinstance(name, str):
cookie["value"] = value
elif hasattr(value, "value"):
cookie["value"] = value.value
self.session.send_session_command("POST", "cookie/%s" % name, {})
class UserPrompt(object):
def __init__(self, session):
self.session = session
@command
def dismiss(self):
self.session.send_session_command("POST", "alert/dismiss")
@command
def accept(self):
self.session.send_session_command("POST", "alert/accept")
@property
@command
def text(self):
return self.session.send_session_command("GET", "alert/text")
@text.setter
@command
def text(self, value):
body = {"text": value}
self.session.send_session_command("POST", "alert/text", body=body)
class Session(object):
def __init__(self,
host,
port,
url_prefix="/",
capabilities=None,
extension=None):
self.transport = transport.HTTPWireProtocol(host, port, url_prefix)
self.requested_capabilities = capabilities
self.capabilities = None
self.session_id = None
self.timeouts = None
self.window = None
self.find = None
self.extension = None
self.extension_cls = extension
self.timeouts = Timeouts(self)
self.window = Window(self)
self.find = Find(self)
self.alert = UserPrompt(self)
self.actions = Actions(self)
def __repr__(self):
return "<%s %s>" % (self.__class__.__name__, self.session_id or "(disconnected)")
def __eq__(self, other):
return (self.session_id is not None and isinstance(other, Session) and
self.session_id == other.session_id)
def __enter__(self):
self.start()
return self
def __exit__(self, *args, **kwargs):
self.end()
def __del__(self):
self.end()
def match(self, capabilities):
return self.requested_capabilities == capabilities
def start(self):
"""Start a new WebDriver session.
:return: Dictionary with `capabilities` and `sessionId`.
:raises error.WebDriverException: If the remote end returns
an error.
"""
if self.session_id is not None:
return
self.transport.close()
body = {"capabilities": {}}
if self.requested_capabilities is not None:
body["capabilities"] = self.requested_capabilities
value = self.send_command("POST", "session", body=body)
self.session_id = value["sessionId"]
self.capabilities = value["capabilities"]
if self.extension_cls:
self.extension = self.extension_cls(self)
return value
def end(self):
"""Try to close the active session."""
if self.session_id is None:
return
try:
self.send_command("DELETE", "session/%s" % self.session_id)
except (OSError, error.InvalidSessionIdException):
pass
finally:
self.session_id = None
self.transport.close()
def send_command(self, method, url, body=None, timeout=None):
"""
Send a command to the remote end and validate its success.
:param method: HTTP method to use in request.
:param uri: "Command part" of the HTTP request URL,
e.g. `window/rect`.
:param body: Optional body of the HTTP request.
:return: `None` if the HTTP response body was empty, otherwise
the `value` field returned after parsing the response
body as JSON.
:raises error.WebDriverException: If the remote end returns
an error.
:raises ValueError: If the response body does not contain a
`value` key.
"""
response = self.transport.send(
method, url, body,
encoder=protocol.Encoder, decoder=protocol.Decoder,
session=self, timeout=timeout)
if response.status != 200:
err = error.from_response(response)
if isinstance(err, error.InvalidSessionIdException):
# The driver could have already been deleted the session.
self.session_id = None
raise err
if "value" in response.body:
value = response.body["value"]
"""
Edge does not yet return the w3c session ID.
We want the tests to run in Edge anyway to help with REC.
In order to run the tests in Edge, we need to hack around
bug:
https://developer.microsoft.com/en-us/microsoft-edge/platform/issues/14641972
"""
if url == "session" and method == "POST" and "sessionId" in response.body and "sessionId" not in value:
value["sessionId"] = response.body["sessionId"]
else:
raise ValueError("Expected 'value' key in response body:\n"
"%s" % response)
return value
def send_session_command(self, method, uri, body=None, timeout=None):
"""
Send a command to an established session and validate its success.
:param method: HTTP method to use in request.
:param url: "Command part" of the HTTP request URL,
e.g. `window/rect`.
:param body: Optional body of the HTTP request. Must be JSON
serialisable.
:return: `None` if the HTTP response body was empty, otherwise
the result of parsing the body as JSON.
:raises error.WebDriverException: If the remote end returns
an error.
"""
url = urlparse.urljoin("session/%s/" % self.session_id, uri)
return self.send_command(method, url, body, timeout)
@property
@command
def url(self):
return self.send_session_command("GET", "url")
@url.setter
@command
def url(self, url):
if urlparse.urlsplit(url).netloc is None:
return self.url(url)
body = {"url": url}
return self.send_session_command("POST", "url", body)
@command
def back(self):
return self.send_session_command("POST", "back")
@command
def forward(self):
return self.send_session_command("POST", "forward")
@command
def refresh(self):
return self.send_session_command("POST", "refresh")
@property
@command
def title(self):
return self.send_session_command("GET", "title")
@property
@command
def source(self):
return self.send_session_command("GET", "source")
@command
def new_window(self, type_hint="tab"):
body = {"type": type_hint}
value = self.send_session_command("POST", "window/new", body)
return value["handle"]
@property
@command
def window_handle(self):
return self.send_session_command("GET", "window")
@window_handle.setter
@command
def window_handle(self, handle):
body = {"handle": handle}
return self.send_session_command("POST", "window", body=body)
def switch_frame(self, frame):
if frame == "parent":
url = "frame/parent"
body = None
else:
url = "frame"
body = {"id": frame}
return self.send_session_command("POST", url, body)
@property
@command
def handles(self):
return self.send_session_command("GET", "window/handles")
@property
@command
def active_element(self):
return self.send_session_command("GET", "element/active")
@command
def cookies(self, name=None):
if name is None:
url = "cookie"
else:
url = "cookie/%s" % name
return self.send_session_command("GET", url, {})
@command
def set_cookie(self, name, value, path=None, domain=None,
secure=None, expiry=None, http_only=None):
body = {
"name": name,
"value": value,
}
if domain is not None:
body["domain"] = domain
if expiry is not None:
body["expiry"] = expiry
if http_only is not None:
body["httpOnly"] = http_only
if path is not None:
body["path"] = path
if secure is not None:
body["secure"] = secure
self.send_session_command("POST", "cookie", {"cookie": body})
def delete_cookie(self, name=None):
if name is None:
url = "cookie"
else:
url = "cookie/%s" % name
self.send_session_command("DELETE", url, {})
#[...]
@command
def execute_script(self, script, args=None):
if args is None:
args = []
body = {
"script": script,
"args": args
}
return self.send_session_command("POST", "execute/sync", body)
@command
def execute_async_script(self, script, args=None):
if args is None:
args = []
body = {
"script": script,
"args": args
}
return self.send_session_command("POST", "execute/async", body)
#[...]
@command
def screenshot(self):
return self.send_session_command("GET", "screenshot")
class Element(object):
"""
Representation of a web element.
A web element is an abstraction used to identify an element when
it is transported via the protocol, between remote- and local ends.
"""
identifier = "element-6066-11e4-a52e-4f735466cecf"
def __init__(self, id, session):
"""
Construct a new web element representation.
:param id: Web element UUID which must be unique across
all browsing contexts.
:param session: Current ``webdriver.Session``.
"""
self.id = id
self.session = session
def __repr__(self):
return "<%s %s>" % (self.__class__.__name__, self.id)
def __eq__(self, other):
return (isinstance(other, Element) and self.id == other.id and
self.session == other.session)
@classmethod
def from_json(cls, json, session):
uuid = json[Element.identifier]
return cls(uuid, session)
def send_element_command(self, method, uri, body=None):
url = "element/%s/%s" % (self.id, uri)
return self.session.send_session_command(method, url, body)
@command
def find_element(self, strategy, selector):
body = {"using": strategy,
"value": selector}
return self.send_element_command("POST", "element", body)
@command
def click(self):
self.send_element_command("POST", "click", {})
@command
def tap(self):
self.send_element_command("POST", "tap", {})
@command
def clear(self):
self.send_element_command("POST", "clear", {})
@command
def send_keys(self, text):
return self.send_element_command("POST", "value", {"text": text})
@property
@command
def text(self):
return self.send_element_command("GET", "text")
@property
@command
def name(self):
return self.send_element_command("GET", "name")
@command
def style(self, property_name):
return self.send_element_command("GET", "css/%s" % property_name)
@property
@command
def rect(self):
return self.send_element_command("GET", "rect")
@property
@command
def selected(self):
return self.send_element_command("GET", "selected")
@command
def screenshot(self):
return self.send_element_command("GET", "screenshot")
@property
@command
def shadow_root(self):
return self.send_element_command("GET", "shadow")
@command
def attribute(self, name):
return self.send_element_command("GET", "attribute/%s" % name)
# This MUST come last because otherwise @property decorators above
# will be overridden by this.
@command
def property(self, name):
return self.send_element_command("GET", "property/%s" % name)
| KiChjang/servo | tests/wpt/web-platform-tests/tools/webdriver/webdriver/client.py | Python | mpl-2.0 | 26,422 | 0.000719 |
# -*- coding: utf-8 -*-
from datetime import timedelta
from itertools import product
import nose
import re
import warnings
from pandas import (date_range, MultiIndex, Index, CategoricalIndex,
compat)
from pandas.core.common import PerformanceWarning
from pandas.indexes.base import InvalidIndexError
from pandas.compat import range, lrange, u, PY3, long, lzip
import numpy as np
from pandas.util.testing import (assert_almost_equal, assertRaisesRegexp,
assert_copy)
import pandas.util.testing as tm
import pandas as pd
from pandas.lib import Timestamp
from .common import Base
class TestMultiIndex(Base, tm.TestCase):
_holder = MultiIndex
_multiprocess_can_split_ = True
_compat_props = ['shape', 'ndim', 'size', 'itemsize']
def setUp(self):
major_axis = Index(['foo', 'bar', 'baz', 'qux'])
minor_axis = Index(['one', 'two'])
major_labels = np.array([0, 0, 1, 2, 3, 3])
minor_labels = np.array([0, 1, 0, 1, 0, 1])
self.index_names = ['first', 'second']
self.indices = dict(index=MultiIndex(levels=[major_axis, minor_axis],
labels=[major_labels, minor_labels
], names=self.index_names,
verify_integrity=False))
self.setup_indices()
def create_index(self):
return self.index
def test_boolean_context_compat2(self):
# boolean context compat
# GH7897
i1 = MultiIndex.from_tuples([('A', 1), ('A', 2)])
i2 = MultiIndex.from_tuples([('A', 1), ('A', 3)])
common = i1.intersection(i2)
def f():
if common:
pass
tm.assertRaisesRegexp(ValueError, 'The truth value of a', f)
def test_labels_dtypes(self):
# GH 8456
i = MultiIndex.from_tuples([('A', 1), ('A', 2)])
self.assertTrue(i.labels[0].dtype == 'int8')
self.assertTrue(i.labels[1].dtype == 'int8')
i = MultiIndex.from_product([['a'], range(40)])
self.assertTrue(i.labels[1].dtype == 'int8')
i = MultiIndex.from_product([['a'], range(400)])
self.assertTrue(i.labels[1].dtype == 'int16')
i = MultiIndex.from_product([['a'], range(40000)])
self.assertTrue(i.labels[1].dtype == 'int32')
i = pd.MultiIndex.from_product([['a'], range(1000)])
self.assertTrue((i.labels[0] >= 0).all())
self.assertTrue((i.labels[1] >= 0).all())
def test_set_name_methods(self):
# so long as these are synonyms, we don't need to test set_names
self.assertEqual(self.index.rename, self.index.set_names)
new_names = [name + "SUFFIX" for name in self.index_names]
ind = self.index.set_names(new_names)
self.assertEqual(self.index.names, self.index_names)
self.assertEqual(ind.names, new_names)
with assertRaisesRegexp(ValueError, "^Length"):
ind.set_names(new_names + new_names)
new_names2 = [name + "SUFFIX2" for name in new_names]
res = ind.set_names(new_names2, inplace=True)
self.assertIsNone(res)
self.assertEqual(ind.names, new_names2)
# set names for specific level (# GH7792)
ind = self.index.set_names(new_names[0], level=0)
self.assertEqual(self.index.names, self.index_names)
self.assertEqual(ind.names, [new_names[0], self.index_names[1]])
res = ind.set_names(new_names2[0], level=0, inplace=True)
self.assertIsNone(res)
self.assertEqual(ind.names, [new_names2[0], self.index_names[1]])
# set names for multiple levels
ind = self.index.set_names(new_names, level=[0, 1])
self.assertEqual(self.index.names, self.index_names)
self.assertEqual(ind.names, new_names)
res = ind.set_names(new_names2, level=[0, 1], inplace=True)
self.assertIsNone(res)
self.assertEqual(ind.names, new_names2)
def test_set_levels(self):
# side note - you probably wouldn't want to use levels and labels
# directly like this - but it is possible.
levels = self.index.levels
new_levels = [[lev + 'a' for lev in level] for level in levels]
def assert_matching(actual, expected):
# avoid specifying internal representation
# as much as possible
self.assertEqual(len(actual), len(expected))
for act, exp in zip(actual, expected):
act = np.asarray(act)
exp = np.asarray(exp)
assert_almost_equal(act, exp)
# level changing [w/o mutation]
ind2 = self.index.set_levels(new_levels)
assert_matching(ind2.levels, new_levels)
assert_matching(self.index.levels, levels)
# level changing [w/ mutation]
ind2 = self.index.copy()
inplace_return = ind2.set_levels(new_levels, inplace=True)
self.assertIsNone(inplace_return)
assert_matching(ind2.levels, new_levels)
# level changing specific level [w/o mutation]
ind2 = self.index.set_levels(new_levels[0], level=0)
assert_matching(ind2.levels, [new_levels[0], levels[1]])
assert_matching(self.index.levels, levels)
ind2 = self.index.set_levels(new_levels[1], level=1)
assert_matching(ind2.levels, [levels[0], new_levels[1]])
assert_matching(self.index.levels, levels)
# level changing multiple levels [w/o mutation]
ind2 = self.index.set_levels(new_levels, level=[0, 1])
assert_matching(ind2.levels, new_levels)
assert_matching(self.index.levels, levels)
# level changing specific level [w/ mutation]
ind2 = self.index.copy()
inplace_return = ind2.set_levels(new_levels[0], level=0, inplace=True)
self.assertIsNone(inplace_return)
assert_matching(ind2.levels, [new_levels[0], levels[1]])
assert_matching(self.index.levels, levels)
ind2 = self.index.copy()
inplace_return = ind2.set_levels(new_levels[1], level=1, inplace=True)
self.assertIsNone(inplace_return)
assert_matching(ind2.levels, [levels[0], new_levels[1]])
assert_matching(self.index.levels, levels)
# level changing multiple levels [w/ mutation]
ind2 = self.index.copy()
inplace_return = ind2.set_levels(new_levels, level=[0, 1],
inplace=True)
self.assertIsNone(inplace_return)
assert_matching(ind2.levels, new_levels)
assert_matching(self.index.levels, levels)
def test_set_labels(self):
# side note - you probably wouldn't want to use levels and labels
# directly like this - but it is possible.
labels = self.index.labels
major_labels, minor_labels = labels
major_labels = [(x + 1) % 3 for x in major_labels]
minor_labels = [(x + 1) % 1 for x in minor_labels]
new_labels = [major_labels, minor_labels]
def assert_matching(actual, expected):
# avoid specifying internal representation
# as much as possible
self.assertEqual(len(actual), len(expected))
for act, exp in zip(actual, expected):
act = np.asarray(act)
exp = np.asarray(exp)
assert_almost_equal(act, exp)
# label changing [w/o mutation]
ind2 = self.index.set_labels(new_labels)
assert_matching(ind2.labels, new_labels)
assert_matching(self.index.labels, labels)
# label changing [w/ mutation]
ind2 = self.index.copy()
inplace_return = ind2.set_labels(new_labels, inplace=True)
self.assertIsNone(inplace_return)
assert_matching(ind2.labels, new_labels)
# label changing specific level [w/o mutation]
ind2 = self.index.set_labels(new_labels[0], level=0)
assert_matching(ind2.labels, [new_labels[0], labels[1]])
assert_matching(self.index.labels, labels)
ind2 = self.index.set_labels(new_labels[1], level=1)
assert_matching(ind2.labels, [labels[0], new_labels[1]])
assert_matching(self.index.labels, labels)
# label changing multiple levels [w/o mutation]
ind2 = self.index.set_labels(new_labels, level=[0, 1])
assert_matching(ind2.labels, new_labels)
assert_matching(self.index.labels, labels)
# label changing specific level [w/ mutation]
ind2 = self.index.copy()
inplace_return = ind2.set_labels(new_labels[0], level=0, inplace=True)
self.assertIsNone(inplace_return)
assert_matching(ind2.labels, [new_labels[0], labels[1]])
assert_matching(self.index.labels, labels)
ind2 = self.index.copy()
inplace_return = ind2.set_labels(new_labels[1], level=1, inplace=True)
self.assertIsNone(inplace_return)
assert_matching(ind2.labels, [labels[0], new_labels[1]])
assert_matching(self.index.labels, labels)
# label changing multiple levels [w/ mutation]
ind2 = self.index.copy()
inplace_return = ind2.set_labels(new_labels, level=[0, 1],
inplace=True)
self.assertIsNone(inplace_return)
assert_matching(ind2.labels, new_labels)
assert_matching(self.index.labels, labels)
def test_set_levels_labels_names_bad_input(self):
levels, labels = self.index.levels, self.index.labels
names = self.index.names
with tm.assertRaisesRegexp(ValueError, 'Length of levels'):
self.index.set_levels([levels[0]])
with tm.assertRaisesRegexp(ValueError, 'Length of labels'):
self.index.set_labels([labels[0]])
with tm.assertRaisesRegexp(ValueError, 'Length of names'):
self.index.set_names([names[0]])
# shouldn't scalar data error, instead should demand list-like
with tm.assertRaisesRegexp(TypeError, 'list of lists-like'):
self.index.set_levels(levels[0])
# shouldn't scalar data error, instead should demand list-like
with tm.assertRaisesRegexp(TypeError, 'list of lists-like'):
self.index.set_labels(labels[0])
# shouldn't scalar data error, instead should demand list-like
with tm.assertRaisesRegexp(TypeError, 'list-like'):
self.index.set_names(names[0])
# should have equal lengths
with tm.assertRaisesRegexp(TypeError, 'list of lists-like'):
self.index.set_levels(levels[0], level=[0, 1])
with tm.assertRaisesRegexp(TypeError, 'list-like'):
self.index.set_levels(levels, level=0)
# should have equal lengths
with tm.assertRaisesRegexp(TypeError, 'list of lists-like'):
self.index.set_labels(labels[0], level=[0, 1])
with tm.assertRaisesRegexp(TypeError, 'list-like'):
self.index.set_labels(labels, level=0)
# should have equal lengths
with tm.assertRaisesRegexp(ValueError, 'Length of names'):
self.index.set_names(names[0], level=[0, 1])
with tm.assertRaisesRegexp(TypeError, 'string'):
self.index.set_names(names, level=0)
def test_metadata_immutable(self):
levels, labels = self.index.levels, self.index.labels
# shouldn't be able to set at either the top level or base level
mutable_regex = re.compile('does not support mutable operations')
with assertRaisesRegexp(TypeError, mutable_regex):
levels[0] = levels[0]
with assertRaisesRegexp(TypeError, mutable_regex):
levels[0][0] = levels[0][0]
# ditto for labels
with assertRaisesRegexp(TypeError, mutable_regex):
labels[0] = labels[0]
with assertRaisesRegexp(TypeError, mutable_regex):
labels[0][0] = labels[0][0]
# and for names
names = self.index.names
with assertRaisesRegexp(TypeError, mutable_regex):
names[0] = names[0]
def test_inplace_mutation_resets_values(self):
levels = [['a', 'b', 'c'], [4]]
levels2 = [[1, 2, 3], ['a']]
labels = [[0, 1, 0, 2, 2, 0], [0, 0, 0, 0, 0, 0]]
mi1 = MultiIndex(levels=levels, labels=labels)
mi2 = MultiIndex(levels=levels2, labels=labels)
vals = mi1.values.copy()
vals2 = mi2.values.copy()
self.assertIsNotNone(mi1._tuples)
# make sure level setting works
new_vals = mi1.set_levels(levels2).values
assert_almost_equal(vals2, new_vals)
# non-inplace doesn't kill _tuples [implementation detail]
assert_almost_equal(mi1._tuples, vals)
# and values is still same too
assert_almost_equal(mi1.values, vals)
# inplace should kill _tuples
mi1.set_levels(levels2, inplace=True)
assert_almost_equal(mi1.values, vals2)
# make sure label setting works too
labels2 = [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]
exp_values = np.empty((6, ), dtype=object)
exp_values[:] = [(long(1), 'a')] * 6
# must be 1d array of tuples
self.assertEqual(exp_values.shape, (6, ))
new_values = mi2.set_labels(labels2).values
# not inplace shouldn't change
assert_almost_equal(mi2._tuples, vals2)
# should have correct values
assert_almost_equal(exp_values, new_values)
# and again setting inplace should kill _tuples, etc
mi2.set_labels(labels2, inplace=True)
assert_almost_equal(mi2.values, new_values)
def test_copy_in_constructor(self):
levels = np.array(["a", "b", "c"])
labels = np.array([1, 1, 2, 0, 0, 1, 1])
val = labels[0]
mi = MultiIndex(levels=[levels, levels], labels=[labels, labels],
copy=True)
self.assertEqual(mi.labels[0][0], val)
labels[0] = 15
self.assertEqual(mi.labels[0][0], val)
val = levels[0]
levels[0] = "PANDA"
self.assertEqual(mi.levels[0][0], val)
def test_set_value_keeps_names(self):
# motivating example from #3742
lev1 = ['hans', 'hans', 'hans', 'grethe', 'grethe', 'grethe']
lev2 = ['1', '2', '3'] * 2
idx = pd.MultiIndex.from_arrays([lev1, lev2], names=['Name', 'Number'])
df = pd.DataFrame(
np.random.randn(6, 4),
columns=['one', 'two', 'three', 'four'],
index=idx)
df = df.sortlevel()
self.assertIsNone(df.is_copy)
self.assertEqual(df.index.names, ('Name', 'Number'))
df = df.set_value(('grethe', '4'), 'one', 99.34)
self.assertIsNone(df.is_copy)
self.assertEqual(df.index.names, ('Name', 'Number'))
def test_names(self):
# names are assigned in __init__
names = self.index_names
level_names = [level.name for level in self.index.levels]
self.assertEqual(names, level_names)
# setting bad names on existing
index = self.index
assertRaisesRegexp(ValueError, "^Length of names", setattr, index,
"names", list(index.names) + ["third"])
assertRaisesRegexp(ValueError, "^Length of names", setattr, index,
"names", [])
# initializing with bad names (should always be equivalent)
major_axis, minor_axis = self.index.levels
major_labels, minor_labels = self.index.labels
assertRaisesRegexp(ValueError, "^Length of names", MultiIndex,
levels=[major_axis, minor_axis],
labels=[major_labels, minor_labels],
names=['first'])
assertRaisesRegexp(ValueError, "^Length of names", MultiIndex,
levels=[major_axis, minor_axis],
labels=[major_labels, minor_labels],
names=['first', 'second', 'third'])
# names are assigned
index.names = ["a", "b"]
ind_names = list(index.names)
level_names = [level.name for level in index.levels]
self.assertEqual(ind_names, level_names)
def test_reference_duplicate_name(self):
idx = MultiIndex.from_tuples(
[('a', 'b'), ('c', 'd')], names=['x', 'x'])
self.assertTrue(idx._reference_duplicate_name('x'))
idx = MultiIndex.from_tuples(
[('a', 'b'), ('c', 'd')], names=['x', 'y'])
self.assertFalse(idx._reference_duplicate_name('x'))
def test_astype(self):
expected = self.index.copy()
actual = self.index.astype('O')
assert_copy(actual.levels, expected.levels)
assert_copy(actual.labels, expected.labels)
self.check_level_names(actual, expected.names)
with assertRaisesRegexp(TypeError, "^Setting.*dtype.*object"):
self.index.astype(np.dtype(int))
def test_constructor_single_level(self):
single_level = MultiIndex(levels=[['foo', 'bar', 'baz', 'qux']],
labels=[[0, 1, 2, 3]], names=['first'])
tm.assertIsInstance(single_level, Index)
self.assertNotIsInstance(single_level, MultiIndex)
self.assertEqual(single_level.name, 'first')
single_level = MultiIndex(levels=[['foo', 'bar', 'baz', 'qux']],
labels=[[0, 1, 2, 3]])
self.assertIsNone(single_level.name)
def test_constructor_no_levels(self):
assertRaisesRegexp(ValueError, "non-zero number of levels/labels",
MultiIndex, levels=[], labels=[])
both_re = re.compile('Must pass both levels and labels')
with tm.assertRaisesRegexp(TypeError, both_re):
MultiIndex(levels=[])
with tm.assertRaisesRegexp(TypeError, both_re):
MultiIndex(labels=[])
def test_constructor_mismatched_label_levels(self):
labels = [np.array([1]), np.array([2]), np.array([3])]
levels = ["a"]
assertRaisesRegexp(ValueError, "Length of levels and labels must be"
" the same", MultiIndex, levels=levels,
labels=labels)
length_error = re.compile('>= length of level')
label_error = re.compile(r'Unequal label lengths: \[4, 2\]')
# important to check that it's looking at the right thing.
with tm.assertRaisesRegexp(ValueError, length_error):
MultiIndex(levels=[['a'], ['b']],
labels=[[0, 1, 2, 3], [0, 3, 4, 1]])
with tm.assertRaisesRegexp(ValueError, label_error):
MultiIndex(levels=[['a'], ['b']], labels=[[0, 0, 0, 0], [0, 0]])
# external API
with tm.assertRaisesRegexp(ValueError, length_error):
self.index.copy().set_levels([['a'], ['b']])
with tm.assertRaisesRegexp(ValueError, label_error):
self.index.copy().set_labels([[0, 0, 0, 0], [0, 0]])
# deprecated properties
with warnings.catch_warnings():
warnings.simplefilter('ignore')
with tm.assertRaisesRegexp(ValueError, length_error):
self.index.copy().levels = [['a'], ['b']]
with tm.assertRaisesRegexp(ValueError, label_error):
self.index.copy().labels = [[0, 0, 0, 0], [0, 0]]
def assert_multiindex_copied(self, copy, original):
# levels shoudl be (at least, shallow copied)
assert_copy(copy.levels, original.levels)
assert_almost_equal(copy.labels, original.labels)
# labels doesn't matter which way copied
assert_almost_equal(copy.labels, original.labels)
self.assertIsNot(copy.labels, original.labels)
# names doesn't matter which way copied
self.assertEqual(copy.names, original.names)
self.assertIsNot(copy.names, original.names)
# sort order should be copied
self.assertEqual(copy.sortorder, original.sortorder)
def test_copy(self):
i_copy = self.index.copy()
self.assert_multiindex_copied(i_copy, self.index)
def test_shallow_copy(self):
i_copy = self.index._shallow_copy()
self.assert_multiindex_copied(i_copy, self.index)
def test_view(self):
i_view = self.index.view()
self.assert_multiindex_copied(i_view, self.index)
def check_level_names(self, index, names):
self.assertEqual([level.name for level in index.levels], list(names))
def test_changing_names(self):
# names should be applied to levels
level_names = [level.name for level in self.index.levels]
self.check_level_names(self.index, self.index.names)
view = self.index.view()
copy = self.index.copy()
shallow_copy = self.index._shallow_copy()
# changing names should change level names on object
new_names = [name + "a" for name in self.index.names]
self.index.names = new_names
self.check_level_names(self.index, new_names)
# but not on copies
self.check_level_names(view, level_names)
self.check_level_names(copy, level_names)
self.check_level_names(shallow_copy, level_names)
# and copies shouldn't change original
shallow_copy.names = [name + "c" for name in shallow_copy.names]
self.check_level_names(self.index, new_names)
def test_duplicate_names(self):
self.index.names = ['foo', 'foo']
assertRaisesRegexp(KeyError, 'Level foo not found',
self.index._get_level_number, 'foo')
def test_get_level_number_integer(self):
self.index.names = [1, 0]
self.assertEqual(self.index._get_level_number(1), 0)
self.assertEqual(self.index._get_level_number(0), 1)
self.assertRaises(IndexError, self.index._get_level_number, 2)
assertRaisesRegexp(KeyError, 'Level fourth not found',
self.index._get_level_number, 'fourth')
def test_from_arrays(self):
arrays = []
for lev, lab in zip(self.index.levels, self.index.labels):
arrays.append(np.asarray(lev).take(lab))
result = MultiIndex.from_arrays(arrays)
self.assertEqual(list(result), list(self.index))
# infer correctly
result = MultiIndex.from_arrays([[pd.NaT, Timestamp('20130101')],
['a', 'b']])
self.assertTrue(result.levels[0].equals(Index([Timestamp('20130101')
])))
self.assertTrue(result.levels[1].equals(Index(['a', 'b'])))
def test_from_product(self):
first = ['foo', 'bar', 'buz']
second = ['a', 'b', 'c']
names = ['first', 'second']
result = MultiIndex.from_product([first, second], names=names)
tuples = [('foo', 'a'), ('foo', 'b'), ('foo', 'c'), ('bar', 'a'),
('bar', 'b'), ('bar', 'c'), ('buz', 'a'), ('buz', 'b'),
('buz', 'c')]
expected = MultiIndex.from_tuples(tuples, names=names)
tm.assert_numpy_array_equal(result, expected)
self.assertEqual(result.names, names)
def test_from_product_datetimeindex(self):
dt_index = date_range('2000-01-01', periods=2)
mi = pd.MultiIndex.from_product([[1, 2], dt_index])
etalon = pd.lib.list_to_object_array([(1, pd.Timestamp(
'2000-01-01')), (1, pd.Timestamp('2000-01-02')), (2, pd.Timestamp(
'2000-01-01')), (2, pd.Timestamp('2000-01-02'))])
tm.assert_numpy_array_equal(mi.values, etalon)
def test_values_boxed(self):
tuples = [(1, pd.Timestamp('2000-01-01')), (2, pd.NaT),
(3, pd.Timestamp('2000-01-03')),
(1, pd.Timestamp('2000-01-04')),
(2, pd.Timestamp('2000-01-02')),
(3, pd.Timestamp('2000-01-03'))]
mi = pd.MultiIndex.from_tuples(tuples)
tm.assert_numpy_array_equal(mi.values,
pd.lib.list_to_object_array(tuples))
# Check that code branches for boxed values produce identical results
tm.assert_numpy_array_equal(mi.values[:4], mi[:4].values)
def test_append(self):
result = self.index[:3].append(self.index[3:])
self.assertTrue(result.equals(self.index))
foos = [self.index[:1], self.index[1:3], self.index[3:]]
result = foos[0].append(foos[1:])
self.assertTrue(result.equals(self.index))
# empty
result = self.index.append([])
self.assertTrue(result.equals(self.index))
def test_get_level_values(self):
result = self.index.get_level_values(0)
expected = ['foo', 'foo', 'bar', 'baz', 'qux', 'qux']
tm.assert_numpy_array_equal(result, expected)
self.assertEqual(result.name, 'first')
result = self.index.get_level_values('first')
expected = self.index.get_level_values(0)
tm.assert_numpy_array_equal(result, expected)
# GH 10460
index = MultiIndex(levels=[CategoricalIndex(
['A', 'B']), CategoricalIndex([1, 2, 3])], labels=[np.array(
[0, 0, 0, 1, 1, 1]), np.array([0, 1, 2, 0, 1, 2])])
exp = CategoricalIndex(['A', 'A', 'A', 'B', 'B', 'B'])
self.assert_index_equal(index.get_level_values(0), exp)
exp = CategoricalIndex([1, 2, 3, 1, 2, 3])
self.assert_index_equal(index.get_level_values(1), exp)
def test_get_level_values_na(self):
arrays = [['a', 'b', 'b'], [1, np.nan, 2]]
index = pd.MultiIndex.from_arrays(arrays)
values = index.get_level_values(1)
expected = [1, np.nan, 2]
tm.assert_numpy_array_equal(values.values.astype(float), expected)
arrays = [['a', 'b', 'b'], [np.nan, np.nan, 2]]
index = pd.MultiIndex.from_arrays(arrays)
values = index.get_level_values(1)
expected = [np.nan, np.nan, 2]
tm.assert_numpy_array_equal(values.values.astype(float), expected)
arrays = [[np.nan, np.nan, np.nan], ['a', np.nan, 1]]
index = pd.MultiIndex.from_arrays(arrays)
values = index.get_level_values(0)
expected = [np.nan, np.nan, np.nan]
tm.assert_numpy_array_equal(values.values.astype(float), expected)
values = index.get_level_values(1)
expected = np.array(['a', np.nan, 1], dtype=object)
tm.assert_numpy_array_equal(values.values, expected)
arrays = [['a', 'b', 'b'], pd.DatetimeIndex([0, 1, pd.NaT])]
index = pd.MultiIndex.from_arrays(arrays)
values = index.get_level_values(1)
expected = pd.DatetimeIndex([0, 1, pd.NaT])
tm.assert_numpy_array_equal(values.values, expected.values)
arrays = [[], []]
index = pd.MultiIndex.from_arrays(arrays)
values = index.get_level_values(0)
self.assertEqual(values.shape, (0, ))
def test_reorder_levels(self):
# this blows up
assertRaisesRegexp(IndexError, '^Too many levels',
self.index.reorder_levels, [2, 1, 0])
def test_nlevels(self):
self.assertEqual(self.index.nlevels, 2)
def test_iter(self):
result = list(self.index)
expected = [('foo', 'one'), ('foo', 'two'), ('bar', 'one'),
('baz', 'two'), ('qux', 'one'), ('qux', 'two')]
self.assertEqual(result, expected)
def test_legacy_pickle(self):
if PY3:
raise nose.SkipTest("testing for legacy pickles not "
"support on py3")
path = tm.get_data_path('multiindex_v1.pickle')
obj = pd.read_pickle(path)
obj2 = MultiIndex.from_tuples(obj.values)
self.assertTrue(obj.equals(obj2))
res = obj.get_indexer(obj)
exp = np.arange(len(obj))
assert_almost_equal(res, exp)
res = obj.get_indexer(obj2[::-1])
exp = obj.get_indexer(obj[::-1])
exp2 = obj2.get_indexer(obj2[::-1])
assert_almost_equal(res, exp)
assert_almost_equal(exp, exp2)
def test_legacy_v2_unpickle(self):
# 0.7.3 -> 0.8.0 format manage
path = tm.get_data_path('mindex_073.pickle')
obj = pd.read_pickle(path)
obj2 = MultiIndex.from_tuples(obj.values)
self.assertTrue(obj.equals(obj2))
res = obj.get_indexer(obj)
exp = np.arange(len(obj))
assert_almost_equal(res, exp)
res = obj.get_indexer(obj2[::-1])
exp = obj.get_indexer(obj[::-1])
exp2 = obj2.get_indexer(obj2[::-1])
assert_almost_equal(res, exp)
assert_almost_equal(exp, exp2)
def test_roundtrip_pickle_with_tz(self):
# GH 8367
# round-trip of timezone
index = MultiIndex.from_product(
[[1, 2], ['a', 'b'], date_range('20130101', periods=3,
tz='US/Eastern')
], names=['one', 'two', 'three'])
unpickled = self.round_trip_pickle(index)
self.assertTrue(index.equal_levels(unpickled))
def test_from_tuples_index_values(self):
result = MultiIndex.from_tuples(self.index)
self.assertTrue((result.values == self.index.values).all())
def test_contains(self):
self.assertIn(('foo', 'two'), self.index)
self.assertNotIn(('bar', 'two'), self.index)
self.assertNotIn(None, self.index)
def test_is_all_dates(self):
self.assertFalse(self.index.is_all_dates)
def test_is_numeric(self):
# MultiIndex is never numeric
self.assertFalse(self.index.is_numeric())
def test_getitem(self):
# scalar
self.assertEqual(self.index[2], ('bar', 'one'))
# slice
result = self.index[2:5]
expected = self.index[[2, 3, 4]]
self.assertTrue(result.equals(expected))
# boolean
result = self.index[[True, False, True, False, True, True]]
result2 = self.index[np.array([True, False, True, False, True, True])]
expected = self.index[[0, 2, 4, 5]]
self.assertTrue(result.equals(expected))
self.assertTrue(result2.equals(expected))
def test_getitem_group_select(self):
sorted_idx, _ = self.index.sortlevel(0)
self.assertEqual(sorted_idx.get_loc('baz'), slice(3, 4))
self.assertEqual(sorted_idx.get_loc('foo'), slice(0, 2))
def test_get_loc(self):
self.assertEqual(self.index.get_loc(('foo', 'two')), 1)
self.assertEqual(self.index.get_loc(('baz', 'two')), 3)
self.assertRaises(KeyError, self.index.get_loc, ('bar', 'two'))
self.assertRaises(KeyError, self.index.get_loc, 'quux')
self.assertRaises(NotImplementedError, self.index.get_loc, 'foo',
method='nearest')
# 3 levels
index = MultiIndex(levels=[Index(lrange(4)), Index(lrange(4)), Index(
lrange(4))], labels=[np.array([0, 0, 1, 2, 2, 2, 3, 3]), np.array(
[0, 1, 0, 0, 0, 1, 0, 1]), np.array([1, 0, 1, 1, 0, 0, 1, 0])])
self.assertRaises(KeyError, index.get_loc, (1, 1))
self.assertEqual(index.get_loc((2, 0)), slice(3, 5))
def test_get_loc_duplicates(self):
index = Index([2, 2, 2, 2])
result = index.get_loc(2)
expected = slice(0, 4)
self.assertEqual(result, expected)
# self.assertRaises(Exception, index.get_loc, 2)
index = Index(['c', 'a', 'a', 'b', 'b'])
rs = index.get_loc('c')
xp = 0
assert (rs == xp)
def test_get_loc_level(self):
index = MultiIndex(levels=[Index(lrange(4)), Index(lrange(4)), Index(
lrange(4))], labels=[np.array([0, 0, 1, 2, 2, 2, 3, 3]), np.array(
[0, 1, 0, 0, 0, 1, 0, 1]), np.array([1, 0, 1, 1, 0, 0, 1, 0])])
loc, new_index = index.get_loc_level((0, 1))
expected = slice(1, 2)
exp_index = index[expected].droplevel(0).droplevel(0)
self.assertEqual(loc, expected)
self.assertTrue(new_index.equals(exp_index))
loc, new_index = index.get_loc_level((0, 1, 0))
expected = 1
self.assertEqual(loc, expected)
self.assertIsNone(new_index)
self.assertRaises(KeyError, index.get_loc_level, (2, 2))
index = MultiIndex(levels=[[2000], lrange(4)], labels=[np.array(
[0, 0, 0, 0]), np.array([0, 1, 2, 3])])
result, new_index = index.get_loc_level((2000, slice(None, None)))
expected = slice(None, None)
self.assertEqual(result, expected)
self.assertTrue(new_index.equals(index.droplevel(0)))
def test_slice_locs(self):
df = tm.makeTimeDataFrame()
stacked = df.stack()
idx = stacked.index
slob = slice(*idx.slice_locs(df.index[5], df.index[15]))
sliced = stacked[slob]
expected = df[5:16].stack()
tm.assert_almost_equal(sliced.values, expected.values)
slob = slice(*idx.slice_locs(df.index[5] + timedelta(seconds=30),
df.index[15] - timedelta(seconds=30)))
sliced = stacked[slob]
expected = df[6:15].stack()
tm.assert_almost_equal(sliced.values, expected.values)
def test_slice_locs_with_type_mismatch(self):
df = tm.makeTimeDataFrame()
stacked = df.stack()
idx = stacked.index
assertRaisesRegexp(TypeError, '^Level type mismatch', idx.slice_locs,
(1, 3))
assertRaisesRegexp(TypeError, '^Level type mismatch', idx.slice_locs,
df.index[5] + timedelta(seconds=30), (5, 2))
df = tm.makeCustomDataframe(5, 5)
stacked = df.stack()
idx = stacked.index
with assertRaisesRegexp(TypeError, '^Level type mismatch'):
idx.slice_locs(timedelta(seconds=30))
# TODO: Try creating a UnicodeDecodeError in exception message
with assertRaisesRegexp(TypeError, '^Level type mismatch'):
idx.slice_locs(df.index[1], (16, "a"))
def test_slice_locs_not_sorted(self):
index = MultiIndex(levels=[Index(lrange(4)), Index(lrange(4)), Index(
lrange(4))], labels=[np.array([0, 0, 1, 2, 2, 2, 3, 3]), np.array(
[0, 1, 0, 0, 0, 1, 0, 1]), np.array([1, 0, 1, 1, 0, 0, 1, 0])])
assertRaisesRegexp(KeyError, "[Kk]ey length.*greater than MultiIndex"
" lexsort depth", index.slice_locs, (1, 0, 1),
(2, 1, 0))
# works
sorted_index, _ = index.sortlevel(0)
# should there be a test case here???
sorted_index.slice_locs((1, 0, 1), (2, 1, 0))
def test_slice_locs_partial(self):
sorted_idx, _ = self.index.sortlevel(0)
result = sorted_idx.slice_locs(('foo', 'two'), ('qux', 'one'))
self.assertEqual(result, (1, 5))
result = sorted_idx.slice_locs(None, ('qux', 'one'))
self.assertEqual(result, (0, 5))
result = sorted_idx.slice_locs(('foo', 'two'), None)
self.assertEqual(result, (1, len(sorted_idx)))
result = sorted_idx.slice_locs('bar', 'baz')
self.assertEqual(result, (2, 4))
def test_slice_locs_not_contained(self):
# some searchsorted action
index = MultiIndex(levels=[[0, 2, 4, 6], [0, 2, 4]],
labels=[[0, 0, 0, 1, 1, 2, 3, 3, 3],
[0, 1, 2, 1, 2, 2, 0, 1, 2]], sortorder=0)
result = index.slice_locs((1, 0), (5, 2))
self.assertEqual(result, (3, 6))
result = index.slice_locs(1, 5)
self.assertEqual(result, (3, 6))
result = index.slice_locs((2, 2), (5, 2))
self.assertEqual(result, (3, 6))
result = index.slice_locs(2, 5)
self.assertEqual(result, (3, 6))
result = index.slice_locs((1, 0), (6, 3))
self.assertEqual(result, (3, 8))
result = index.slice_locs(-1, 10)
self.assertEqual(result, (0, len(index)))
def test_consistency(self):
# need to construct an overflow
major_axis = lrange(70000)
minor_axis = lrange(10)
major_labels = np.arange(70000)
minor_labels = np.repeat(lrange(10), 7000)
# the fact that is works means it's consistent
index = MultiIndex(levels=[major_axis, minor_axis],
labels=[major_labels, minor_labels])
# inconsistent
major_labels = np.array([0, 0, 1, 1, 1, 2, 2, 3, 3])
minor_labels = np.array([0, 1, 0, 1, 1, 0, 1, 0, 1])
index = MultiIndex(levels=[major_axis, minor_axis],
labels=[major_labels, minor_labels])
self.assertFalse(index.is_unique)
def test_truncate(self):
major_axis = Index(lrange(4))
minor_axis = Index(lrange(2))
major_labels = np.array([0, 0, 1, 2, 3, 3])
minor_labels = np.array([0, 1, 0, 1, 0, 1])
index = MultiIndex(levels=[major_axis, minor_axis],
labels=[major_labels, minor_labels])
result = index.truncate(before=1)
self.assertNotIn('foo', result.levels[0])
self.assertIn(1, result.levels[0])
result = index.truncate(after=1)
self.assertNotIn(2, result.levels[0])
self.assertIn(1, result.levels[0])
result = index.truncate(before=1, after=2)
self.assertEqual(len(result.levels[0]), 2)
# after < before
self.assertRaises(ValueError, index.truncate, 3, 1)
def test_get_indexer(self):
major_axis = Index(lrange(4))
minor_axis = Index(lrange(2))
major_labels = np.array([0, 0, 1, 2, 2, 3, 3])
minor_labels = np.array([0, 1, 0, 0, 1, 0, 1])
index = MultiIndex(levels=[major_axis, minor_axis],
labels=[major_labels, minor_labels])
idx1 = index[:5]
idx2 = index[[1, 3, 5]]
r1 = idx1.get_indexer(idx2)
assert_almost_equal(r1, [1, 3, -1])
r1 = idx2.get_indexer(idx1, method='pad')
e1 = [-1, 0, 0, 1, 1]
assert_almost_equal(r1, e1)
r2 = idx2.get_indexer(idx1[::-1], method='pad')
assert_almost_equal(r2, e1[::-1])
rffill1 = idx2.get_indexer(idx1, method='ffill')
assert_almost_equal(r1, rffill1)
r1 = idx2.get_indexer(idx1, method='backfill')
e1 = [0, 0, 1, 1, 2]
assert_almost_equal(r1, e1)
r2 = idx2.get_indexer(idx1[::-1], method='backfill')
assert_almost_equal(r2, e1[::-1])
rbfill1 = idx2.get_indexer(idx1, method='bfill')
assert_almost_equal(r1, rbfill1)
# pass non-MultiIndex
r1 = idx1.get_indexer(idx2._tuple_index)
rexp1 = idx1.get_indexer(idx2)
assert_almost_equal(r1, rexp1)
r1 = idx1.get_indexer([1, 2, 3])
self.assertTrue((r1 == [-1, -1, -1]).all())
# create index with duplicates
idx1 = Index(lrange(10) + lrange(10))
idx2 = Index(lrange(20))
assertRaisesRegexp(InvalidIndexError, "Reindexing only valid with"
" uniquely valued Index objects", idx1.get_indexer,
idx2)
def test_get_indexer_nearest(self):
midx = MultiIndex.from_tuples([('a', 1), ('b', 2)])
with tm.assertRaises(NotImplementedError):
midx.get_indexer(['a'], method='nearest')
with tm.assertRaises(NotImplementedError):
midx.get_indexer(['a'], method='pad', tolerance=2)
def test_format(self):
self.index.format()
self.index[:0].format()
def test_format_integer_names(self):
index = MultiIndex(levels=[[0, 1], [0, 1]],
labels=[[0, 0, 1, 1], [0, 1, 0, 1]], names=[0, 1])
index.format(names=True)
def test_format_sparse_display(self):
index = MultiIndex(levels=[[0, 1], [0, 1], [0, 1], [0]],
labels=[[0, 0, 0, 1, 1, 1], [0, 0, 1, 0, 0, 1],
[0, 1, 0, 0, 1, 0], [0, 0, 0, 0, 0, 0]])
result = index.format()
self.assertEqual(result[3], '1 0 0 0')
def test_format_sparse_config(self):
warn_filters = warnings.filters
warnings.filterwarnings('ignore', category=FutureWarning,
module=".*format")
# GH1538
pd.set_option('display.multi_sparse', False)
result = self.index.format()
self.assertEqual(result[1], 'foo two')
self.reset_display_options()
warnings.filters = warn_filters
def test_to_hierarchical(self):
index = MultiIndex.from_tuples([(1, 'one'), (1, 'two'), (2, 'one'), (
2, 'two')])
result = index.to_hierarchical(3)
expected = MultiIndex(levels=[[1, 2], ['one', 'two']],
labels=[[0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1]])
tm.assert_index_equal(result, expected)
self.assertEqual(result.names, index.names)
# K > 1
result = index.to_hierarchical(3, 2)
expected = MultiIndex(levels=[[1, 2], ['one', 'two']],
labels=[[0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1],
[0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1]])
tm.assert_index_equal(result, expected)
self.assertEqual(result.names, index.names)
# non-sorted
index = MultiIndex.from_tuples([(2, 'c'), (1, 'b'),
(2, 'a'), (2, 'b')],
names=['N1', 'N2'])
result = index.to_hierarchical(2)
expected = MultiIndex.from_tuples([(2, 'c'), (2, 'c'), (1, 'b'),
(1, 'b'),
(2, 'a'), (2, 'a'),
(2, 'b'), (2, 'b')],
names=['N1', 'N2'])
tm.assert_index_equal(result, expected)
self.assertEqual(result.names, index.names)
def test_bounds(self):
self.index._bounds
def test_equals(self):
self.assertTrue(self.index.equals(self.index))
self.assertTrue(self.index.equal_levels(self.index))
self.assertFalse(self.index.equals(self.index[:-1]))
self.assertTrue(self.index.equals(self.index._tuple_index))
# different number of levels
index = MultiIndex(levels=[Index(lrange(4)), Index(lrange(4)), Index(
lrange(4))], labels=[np.array([0, 0, 1, 2, 2, 2, 3, 3]), np.array(
[0, 1, 0, 0, 0, 1, 0, 1]), np.array([1, 0, 1, 1, 0, 0, 1, 0])])
index2 = MultiIndex(levels=index.levels[:-1], labels=index.labels[:-1])
self.assertFalse(index.equals(index2))
self.assertFalse(index.equal_levels(index2))
# levels are different
major_axis = Index(lrange(4))
minor_axis = Index(lrange(2))
major_labels = np.array([0, 0, 1, 2, 2, 3])
minor_labels = np.array([0, 1, 0, 0, 1, 0])
index = MultiIndex(levels=[major_axis, minor_axis],
labels=[major_labels, minor_labels])
self.assertFalse(self.index.equals(index))
self.assertFalse(self.index.equal_levels(index))
# some of the labels are different
major_axis = Index(['foo', 'bar', 'baz', 'qux'])
minor_axis = Index(['one', 'two'])
major_labels = np.array([0, 0, 2, 2, 3, 3])
minor_labels = np.array([0, 1, 0, 1, 0, 1])
index = MultiIndex(levels=[major_axis, minor_axis],
labels=[major_labels, minor_labels])
self.assertFalse(self.index.equals(index))
def test_identical(self):
mi = self.index.copy()
mi2 = self.index.copy()
self.assertTrue(mi.identical(mi2))
mi = mi.set_names(['new1', 'new2'])
self.assertTrue(mi.equals(mi2))
self.assertFalse(mi.identical(mi2))
mi2 = mi2.set_names(['new1', 'new2'])
self.assertTrue(mi.identical(mi2))
mi3 = Index(mi.tolist(), names=mi.names)
mi4 = Index(mi.tolist(), names=mi.names, tupleize_cols=False)
self.assertTrue(mi.identical(mi3))
self.assertFalse(mi.identical(mi4))
self.assertTrue(mi.equals(mi4))
def test_is_(self):
mi = MultiIndex.from_tuples(lzip(range(10), range(10)))
self.assertTrue(mi.is_(mi))
self.assertTrue(mi.is_(mi.view()))
self.assertTrue(mi.is_(mi.view().view().view().view()))
mi2 = mi.view()
# names are metadata, they don't change id
mi2.names = ["A", "B"]
self.assertTrue(mi2.is_(mi))
self.assertTrue(mi.is_(mi2))
self.assertTrue(mi.is_(mi.set_names(["C", "D"])))
mi2 = mi.view()
mi2.set_names(["E", "F"], inplace=True)
self.assertTrue(mi.is_(mi2))
# levels are inherent properties, they change identity
mi3 = mi2.set_levels([lrange(10), lrange(10)])
self.assertFalse(mi3.is_(mi2))
# shouldn't change
self.assertTrue(mi2.is_(mi))
mi4 = mi3.view()
mi4.set_levels([[1 for _ in range(10)], lrange(10)], inplace=True)
self.assertFalse(mi4.is_(mi3))
mi5 = mi.view()
mi5.set_levels(mi5.levels, inplace=True)
self.assertFalse(mi5.is_(mi))
def test_union(self):
piece1 = self.index[:5][::-1]
piece2 = self.index[3:]
the_union = piece1 | piece2
tups = sorted(self.index._tuple_index)
expected = MultiIndex.from_tuples(tups)
self.assertTrue(the_union.equals(expected))
# corner case, pass self or empty thing:
the_union = self.index.union(self.index)
self.assertIs(the_union, self.index)
the_union = self.index.union(self.index[:0])
self.assertIs(the_union, self.index)
# won't work in python 3
# tuples = self.index._tuple_index
# result = self.index[:4] | tuples[4:]
# self.assertTrue(result.equals(tuples))
# not valid for python 3
# def test_union_with_regular_index(self):
# other = Index(['A', 'B', 'C'])
# result = other.union(self.index)
# self.assertIn(('foo', 'one'), result)
# self.assertIn('B', result)
# result2 = self.index.union(other)
# self.assertTrue(result.equals(result2))
def test_intersection(self):
piece1 = self.index[:5][::-1]
piece2 = self.index[3:]
the_int = piece1 & piece2
tups = sorted(self.index[3:5]._tuple_index)
expected = MultiIndex.from_tuples(tups)
self.assertTrue(the_int.equals(expected))
# corner case, pass self
the_int = self.index.intersection(self.index)
self.assertIs(the_int, self.index)
# empty intersection: disjoint
empty = self.index[:2] & self.index[2:]
expected = self.index[:0]
self.assertTrue(empty.equals(expected))
# can't do in python 3
# tuples = self.index._tuple_index
# result = self.index & tuples
# self.assertTrue(result.equals(tuples))
def test_difference(self):
first = self.index
result = first.difference(self.index[-3:])
# - API change GH 8226
with tm.assert_produces_warning():
first - self.index[-3:]
with tm.assert_produces_warning():
self.index[-3:] - first
with tm.assert_produces_warning():
self.index[-3:] - first.tolist()
self.assertRaises(TypeError, lambda: first.tolist() - self.index[-3:])
expected = MultiIndex.from_tuples(sorted(self.index[:-3].values),
sortorder=0,
names=self.index.names)
tm.assertIsInstance(result, MultiIndex)
self.assertTrue(result.equals(expected))
self.assertEqual(result.names, self.index.names)
# empty difference: reflexive
result = self.index.difference(self.index)
expected = self.index[:0]
self.assertTrue(result.equals(expected))
self.assertEqual(result.names, self.index.names)
# empty difference: superset
result = self.index[-3:].difference(self.index)
expected = self.index[:0]
self.assertTrue(result.equals(expected))
self.assertEqual(result.names, self.index.names)
# empty difference: degenerate
result = self.index[:0].difference(self.index)
expected = self.index[:0]
self.assertTrue(result.equals(expected))
self.assertEqual(result.names, self.index.names)
# names not the same
chunklet = self.index[-3:]
chunklet.names = ['foo', 'baz']
result = first.difference(chunklet)
self.assertEqual(result.names, (None, None))
# empty, but non-equal
result = self.index.difference(self.index.sortlevel(1)[0])
self.assertEqual(len(result), 0)
# raise Exception called with non-MultiIndex
result = first.difference(first._tuple_index)
self.assertTrue(result.equals(first[:0]))
# name from empty array
result = first.difference([])
self.assertTrue(first.equals(result))
self.assertEqual(first.names, result.names)
# name from non-empty array
result = first.difference([('foo', 'one')])
expected = pd.MultiIndex.from_tuples([('bar', 'one'), ('baz', 'two'), (
'foo', 'two'), ('qux', 'one'), ('qux', 'two')])
expected.names = first.names
self.assertEqual(first.names, result.names)
assertRaisesRegexp(TypeError, "other must be a MultiIndex or a list"
" of tuples", first.difference, [1, 2, 3, 4, 5])
def test_from_tuples(self):
assertRaisesRegexp(TypeError, 'Cannot infer number of levels from'
' empty list', MultiIndex.from_tuples, [])
idx = MultiIndex.from_tuples(((1, 2), (3, 4)), names=['a', 'b'])
self.assertEqual(len(idx), 2)
def test_argsort(self):
result = self.index.argsort()
expected = self.index._tuple_index.argsort()
tm.assert_numpy_array_equal(result, expected)
def test_sortlevel(self):
import random
tuples = list(self.index)
random.shuffle(tuples)
index = MultiIndex.from_tuples(tuples)
sorted_idx, _ = index.sortlevel(0)
expected = MultiIndex.from_tuples(sorted(tuples))
self.assertTrue(sorted_idx.equals(expected))
sorted_idx, _ = index.sortlevel(0, ascending=False)
self.assertTrue(sorted_idx.equals(expected[::-1]))
sorted_idx, _ = index.sortlevel(1)
by1 = sorted(tuples, key=lambda x: (x[1], x[0]))
expected = MultiIndex.from_tuples(by1)
self.assertTrue(sorted_idx.equals(expected))
sorted_idx, _ = index.sortlevel(1, ascending=False)
self.assertTrue(sorted_idx.equals(expected[::-1]))
def test_sortlevel_not_sort_remaining(self):
mi = MultiIndex.from_tuples([[1, 1, 3], [1, 1, 1]], names=list('ABC'))
sorted_idx, _ = mi.sortlevel('A', sort_remaining=False)
self.assertTrue(sorted_idx.equals(mi))
def test_sortlevel_deterministic(self):
tuples = [('bar', 'one'), ('foo', 'two'), ('qux', 'two'),
('foo', 'one'), ('baz', 'two'), ('qux', 'one')]
index = MultiIndex.from_tuples(tuples)
sorted_idx, _ = index.sortlevel(0)
expected = MultiIndex.from_tuples(sorted(tuples))
self.assertTrue(sorted_idx.equals(expected))
sorted_idx, _ = index.sortlevel(0, ascending=False)
self.assertTrue(sorted_idx.equals(expected[::-1]))
sorted_idx, _ = index.sortlevel(1)
by1 = sorted(tuples, key=lambda x: (x[1], x[0]))
expected = MultiIndex.from_tuples(by1)
self.assertTrue(sorted_idx.equals(expected))
sorted_idx, _ = index.sortlevel(1, ascending=False)
self.assertTrue(sorted_idx.equals(expected[::-1]))
def test_dims(self):
pass
def test_drop(self):
dropped = self.index.drop([('foo', 'two'), ('qux', 'one')])
index = MultiIndex.from_tuples([('foo', 'two'), ('qux', 'one')])
dropped2 = self.index.drop(index)
expected = self.index[[0, 2, 3, 5]]
self.assert_index_equal(dropped, expected)
self.assert_index_equal(dropped2, expected)
dropped = self.index.drop(['bar'])
expected = self.index[[0, 1, 3, 4, 5]]
self.assert_index_equal(dropped, expected)
dropped = self.index.drop('foo')
expected = self.index[[2, 3, 4, 5]]
self.assert_index_equal(dropped, expected)
index = MultiIndex.from_tuples([('bar', 'two')])
self.assertRaises(KeyError, self.index.drop, [('bar', 'two')])
self.assertRaises(KeyError, self.index.drop, index)
self.assertRaises(KeyError, self.index.drop, ['foo', 'two'])
# partially correct argument
mixed_index = MultiIndex.from_tuples([('qux', 'one'), ('bar', 'two')])
self.assertRaises(KeyError, self.index.drop, mixed_index)
# error='ignore'
dropped = self.index.drop(index, errors='ignore')
expected = self.index[[0, 1, 2, 3, 4, 5]]
self.assert_index_equal(dropped, expected)
dropped = self.index.drop(mixed_index, errors='ignore')
expected = self.index[[0, 1, 2, 3, 5]]
self.assert_index_equal(dropped, expected)
dropped = self.index.drop(['foo', 'two'], errors='ignore')
expected = self.index[[2, 3, 4, 5]]
self.assert_index_equal(dropped, expected)
# mixed partial / full drop
dropped = self.index.drop(['foo', ('qux', 'one')])
expected = self.index[[2, 3, 5]]
self.assert_index_equal(dropped, expected)
# mixed partial / full drop / error='ignore'
mixed_index = ['foo', ('qux', 'one'), 'two']
self.assertRaises(KeyError, self.index.drop, mixed_index)
dropped = self.index.drop(mixed_index, errors='ignore')
expected = self.index[[2, 3, 5]]
self.assert_index_equal(dropped, expected)
def test_droplevel_with_names(self):
index = self.index[self.index.get_loc('foo')]
dropped = index.droplevel(0)
self.assertEqual(dropped.name, 'second')
index = MultiIndex(levels=[Index(lrange(4)), Index(lrange(4)), Index(
lrange(4))], labels=[np.array([0, 0, 1, 2, 2, 2, 3, 3]), np.array(
[0, 1, 0, 0, 0, 1, 0, 1]), np.array([1, 0, 1, 1, 0, 0, 1, 0])],
names=['one', 'two', 'three'])
dropped = index.droplevel(0)
self.assertEqual(dropped.names, ('two', 'three'))
dropped = index.droplevel('two')
expected = index.droplevel(1)
self.assertTrue(dropped.equals(expected))
def test_droplevel_multiple(self):
index = MultiIndex(levels=[Index(lrange(4)), Index(lrange(4)), Index(
lrange(4))], labels=[np.array([0, 0, 1, 2, 2, 2, 3, 3]), np.array(
[0, 1, 0, 0, 0, 1, 0, 1]), np.array([1, 0, 1, 1, 0, 0, 1, 0])],
names=['one', 'two', 'three'])
dropped = index[:2].droplevel(['three', 'one'])
expected = index[:2].droplevel(2).droplevel(0)
self.assertTrue(dropped.equals(expected))
def test_drop_not_lexsorted(self):
# GH 12078
# define the lexsorted version of the multi-index
tuples = [('a', ''), ('b1', 'c1'), ('b2', 'c2')]
lexsorted_mi = MultiIndex.from_tuples(tuples, names=['b', 'c'])
self.assertTrue(lexsorted_mi.is_lexsorted())
# and the not-lexsorted version
df = pd.DataFrame(columns=['a', 'b', 'c', 'd'],
data=[[1, 'b1', 'c1', 3], [1, 'b2', 'c2', 4]])
df = df.pivot_table(index='a', columns=['b', 'c'], values='d')
df = df.reset_index()
not_lexsorted_mi = df.columns
self.assertFalse(not_lexsorted_mi.is_lexsorted())
# compare the results
self.assert_index_equal(lexsorted_mi, not_lexsorted_mi)
with self.assert_produces_warning(PerformanceWarning):
self.assert_index_equal(lexsorted_mi.drop('a'),
not_lexsorted_mi.drop('a'))
def test_insert(self):
# key contained in all levels
new_index = self.index.insert(0, ('bar', 'two'))
self.assertTrue(new_index.equal_levels(self.index))
self.assertEqual(new_index[0], ('bar', 'two'))
# key not contained in all levels
new_index = self.index.insert(0, ('abc', 'three'))
tm.assert_numpy_array_equal(new_index.levels[0],
list(self.index.levels[0]) + ['abc'])
tm.assert_numpy_array_equal(new_index.levels[1],
list(self.index.levels[1]) + ['three'])
self.assertEqual(new_index[0], ('abc', 'three'))
# key wrong length
assertRaisesRegexp(ValueError, "Item must have length equal to number"
" of levels", self.index.insert, 0, ('foo2', ))
left = pd.DataFrame([['a', 'b', 0], ['b', 'd', 1]],
columns=['1st', '2nd', '3rd'])
left.set_index(['1st', '2nd'], inplace=True)
ts = left['3rd'].copy(deep=True)
left.loc[('b', 'x'), '3rd'] = 2
left.loc[('b', 'a'), '3rd'] = -1
left.loc[('b', 'b'), '3rd'] = 3
left.loc[('a', 'x'), '3rd'] = 4
left.loc[('a', 'w'), '3rd'] = 5
left.loc[('a', 'a'), '3rd'] = 6
ts.loc[('b', 'x')] = 2
ts.loc['b', 'a'] = -1
ts.loc[('b', 'b')] = 3
ts.loc['a', 'x'] = 4
ts.loc[('a', 'w')] = 5
ts.loc['a', 'a'] = 6
right = pd.DataFrame([['a', 'b', 0],
['b', 'd', 1],
['b', 'x', 2],
['b', 'a', -1],
['b', 'b', 3],
['a', 'x', 4],
['a', 'w', 5],
['a', 'a', 6]],
columns=['1st', '2nd', '3rd'])
right.set_index(['1st', '2nd'], inplace=True)
# FIXME data types changes to float because
# of intermediate nan insertion;
tm.assert_frame_equal(left, right, check_dtype=False)
tm.assert_series_equal(ts, right['3rd'])
# GH9250
idx = [('test1', i) for i in range(5)] + \
[('test2', i) for i in range(6)] + \
[('test', 17), ('test', 18)]
left = pd.Series(np.linspace(0, 10, 11),
pd.MultiIndex.from_tuples(idx[:-2]))
left.loc[('test', 17)] = 11
left.ix[('test', 18)] = 12
right = pd.Series(np.linspace(0, 12, 13),
pd.MultiIndex.from_tuples(idx))
tm.assert_series_equal(left, right)
def test_take_preserve_name(self):
taken = self.index.take([3, 0, 1])
self.assertEqual(taken.names, self.index.names)
def test_join_level(self):
def _check_how(other, how):
join_index, lidx, ridx = other.join(self.index, how=how,
level='second',
return_indexers=True)
exp_level = other.join(self.index.levels[1], how=how)
self.assertTrue(join_index.levels[0].equals(self.index.levels[0]))
self.assertTrue(join_index.levels[1].equals(exp_level))
# pare down levels
mask = np.array(
[x[1] in exp_level for x in self.index], dtype=bool)
exp_values = self.index.values[mask]
tm.assert_numpy_array_equal(join_index.values, exp_values)
if how in ('outer', 'inner'):
join_index2, ridx2, lidx2 = \
self.index.join(other, how=how, level='second',
return_indexers=True)
self.assertTrue(join_index.equals(join_index2))
tm.assert_numpy_array_equal(lidx, lidx2)
tm.assert_numpy_array_equal(ridx, ridx2)
tm.assert_numpy_array_equal(join_index2.values, exp_values)
def _check_all(other):
_check_how(other, 'outer')
_check_how(other, 'inner')
_check_how(other, 'left')
_check_how(other, 'right')
_check_all(Index(['three', 'one', 'two']))
_check_all(Index(['one']))
_check_all(Index(['one', 'three']))
# some corner cases
idx = Index(['three', 'one', 'two'])
result = idx.join(self.index, level='second')
tm.assertIsInstance(result, MultiIndex)
assertRaisesRegexp(TypeError, "Join.*MultiIndex.*ambiguous",
self.index.join, self.index, level=1)
def test_join_self(self):
kinds = 'outer', 'inner', 'left', 'right'
for kind in kinds:
res = self.index
joined = res.join(res, how=kind)
self.assertIs(res, joined)
def test_join_multi(self):
# GH 10665
midx = pd.MultiIndex.from_product(
[np.arange(4), np.arange(4)], names=['a', 'b'])
idx = pd.Index([1, 2, 5], name='b')
# inner
jidx, lidx, ridx = midx.join(idx, how='inner', return_indexers=True)
exp_idx = pd.MultiIndex.from_product(
[np.arange(4), [1, 2]], names=['a', 'b'])
exp_lidx = np.array([1, 2, 5, 6, 9, 10, 13, 14])
exp_ridx = np.array([0, 1, 0, 1, 0, 1, 0, 1])
self.assert_index_equal(jidx, exp_idx)
self.assert_numpy_array_equal(lidx, exp_lidx)
self.assert_numpy_array_equal(ridx, exp_ridx)
# flip
jidx, ridx, lidx = idx.join(midx, how='inner', return_indexers=True)
self.assert_index_equal(jidx, exp_idx)
self.assert_numpy_array_equal(lidx, exp_lidx)
self.assert_numpy_array_equal(ridx, exp_ridx)
# keep MultiIndex
jidx, lidx, ridx = midx.join(idx, how='left', return_indexers=True)
exp_ridx = np.array([-1, 0, 1, -1, -1, 0, 1, -1, -1, 0, 1, -1, -1, 0,
1, -1])
self.assert_index_equal(jidx, midx)
self.assertIsNone(lidx)
self.assert_numpy_array_equal(ridx, exp_ridx)
# flip
jidx, ridx, lidx = idx.join(midx, how='right', return_indexers=True)
self.assert_index_equal(jidx, midx)
self.assertIsNone(lidx)
self.assert_numpy_array_equal(ridx, exp_ridx)
def test_reindex(self):
result, indexer = self.index.reindex(list(self.index[:4]))
tm.assertIsInstance(result, MultiIndex)
self.check_level_names(result, self.index[:4].names)
result, indexer = self.index.reindex(list(self.index))
tm.assertIsInstance(result, MultiIndex)
self.assertIsNone(indexer)
self.check_level_names(result, self.index.names)
def test_reindex_level(self):
idx = Index(['one'])
target, indexer = self.index.reindex(idx, level='second')
target2, indexer2 = idx.reindex(self.index, level='second')
exp_index = self.index.join(idx, level='second', how='right')
exp_index2 = self.index.join(idx, level='second', how='left')
self.assertTrue(target.equals(exp_index))
exp_indexer = np.array([0, 2, 4])
tm.assert_numpy_array_equal(indexer, exp_indexer)
self.assertTrue(target2.equals(exp_index2))
exp_indexer2 = np.array([0, -1, 0, -1, 0, -1])
tm.assert_numpy_array_equal(indexer2, exp_indexer2)
assertRaisesRegexp(TypeError, "Fill method not supported",
self.index.reindex, self.index, method='pad',
level='second')
assertRaisesRegexp(TypeError, "Fill method not supported", idx.reindex,
idx, method='bfill', level='first')
def test_duplicates(self):
self.assertFalse(self.index.has_duplicates)
self.assertTrue(self.index.append(self.index).has_duplicates)
index = MultiIndex(levels=[[0, 1], [0, 1, 2]], labels=[
[0, 0, 0, 0, 1, 1, 1], [0, 1, 2, 0, 0, 1, 2]])
self.assertTrue(index.has_duplicates)
# GH 9075
t = [(u('x'), u('out'), u('z'), 5, u('y'), u('in'), u('z'), 169),
(u('x'), u('out'), u('z'), 7, u('y'), u('in'), u('z'), 119),
(u('x'), u('out'), u('z'), 9, u('y'), u('in'), u('z'), 135),
(u('x'), u('out'), u('z'), 13, u('y'), u('in'), u('z'), 145),
(u('x'), u('out'), u('z'), 14, u('y'), u('in'), u('z'), 158),
(u('x'), u('out'), u('z'), 16, u('y'), u('in'), u('z'), 122),
(u('x'), u('out'), u('z'), 17, u('y'), u('in'), u('z'), 160),
(u('x'), u('out'), u('z'), 18, u('y'), u('in'), u('z'), 180),
(u('x'), u('out'), u('z'), 20, u('y'), u('in'), u('z'), 143),
(u('x'), u('out'), u('z'), 21, u('y'), u('in'), u('z'), 128),
(u('x'), u('out'), u('z'), 22, u('y'), u('in'), u('z'), 129),
(u('x'), u('out'), u('z'), 25, u('y'), u('in'), u('z'), 111),
(u('x'), u('out'), u('z'), 28, u('y'), u('in'), u('z'), 114),
(u('x'), u('out'), u('z'), 29, u('y'), u('in'), u('z'), 121),
(u('x'), u('out'), u('z'), 31, u('y'), u('in'), u('z'), 126),
(u('x'), u('out'), u('z'), 32, u('y'), u('in'), u('z'), 155),
(u('x'), u('out'), u('z'), 33, u('y'), u('in'), u('z'), 123),
(u('x'), u('out'), u('z'), 12, u('y'), u('in'), u('z'), 144)]
index = pd.MultiIndex.from_tuples(t)
self.assertFalse(index.has_duplicates)
# handle int64 overflow if possible
def check(nlevels, with_nulls):
labels = np.tile(np.arange(500), 2)
level = np.arange(500)
if with_nulls: # inject some null values
labels[500] = -1 # common nan value
labels = list(labels.copy() for i in range(nlevels))
for i in range(nlevels):
labels[i][500 + i - nlevels // 2] = -1
labels += [np.array([-1, 1]).repeat(500)]
else:
labels = [labels] * nlevels + [np.arange(2).repeat(500)]
levels = [level] * nlevels + [[0, 1]]
# no dups
index = MultiIndex(levels=levels, labels=labels)
self.assertFalse(index.has_duplicates)
# with a dup
if with_nulls:
f = lambda a: np.insert(a, 1000, a[0])
labels = list(map(f, labels))
index = MultiIndex(levels=levels, labels=labels)
else:
values = index.values.tolist()
index = MultiIndex.from_tuples(values + [values[0]])
self.assertTrue(index.has_duplicates)
# no overflow
check(4, False)
check(4, True)
# overflow possible
check(8, False)
check(8, True)
# GH 9125
n, k = 200, 5000
levels = [np.arange(n), tm.makeStringIndex(n), 1000 + np.arange(n)]
labels = [np.random.choice(n, k * n) for lev in levels]
mi = MultiIndex(levels=levels, labels=labels)
for keep in ['first', 'last', False]:
left = mi.duplicated(keep=keep)
right = pd.lib.duplicated(mi.values, keep=keep)
tm.assert_numpy_array_equal(left, right)
# GH5873
for a in [101, 102]:
mi = MultiIndex.from_arrays([[101, a], [3.5, np.nan]])
self.assertFalse(mi.has_duplicates)
self.assertEqual(mi.get_duplicates(), [])
tm.assert_numpy_array_equal(mi.duplicated(), np.zeros(
2, dtype='bool'))
for n in range(1, 6): # 1st level shape
for m in range(1, 5): # 2nd level shape
# all possible unique combinations, including nan
lab = product(range(-1, n), range(-1, m))
mi = MultiIndex(levels=[list('abcde')[:n], list('WXYZ')[:m]],
labels=np.random.permutation(list(lab)).T)
self.assertEqual(len(mi), (n + 1) * (m + 1))
self.assertFalse(mi.has_duplicates)
self.assertEqual(mi.get_duplicates(), [])
tm.assert_numpy_array_equal(mi.duplicated(), np.zeros(
len(mi), dtype='bool'))
def test_duplicate_meta_data(self):
# GH 10115
index = MultiIndex(levels=[[0, 1], [0, 1, 2]], labels=[
[0, 0, 0, 0, 1, 1, 1], [0, 1, 2, 0, 0, 1, 2]])
for idx in [index,
index.set_names([None, None]),
index.set_names([None, 'Num']),
index.set_names(['Upper', 'Num']), ]:
self.assertTrue(idx.has_duplicates)
self.assertEqual(idx.drop_duplicates().names, idx.names)
def test_tolist(self):
result = self.index.tolist()
exp = list(self.index.values)
self.assertEqual(result, exp)
def test_repr_with_unicode_data(self):
with pd.core.config.option_context("display.encoding", 'UTF-8'):
d = {"a": [u("\u05d0"), 2, 3], "b": [4, 5, 6], "c": [7, 8, 9]}
index = pd.DataFrame(d).set_index(["a", "b"]).index
self.assertFalse("\\u" in repr(index)
) # we don't want unicode-escaped
def test_repr_roundtrip(self):
mi = MultiIndex.from_product([list('ab'), range(3)],
names=['first', 'second'])
str(mi)
if PY3:
tm.assert_index_equal(eval(repr(mi)), mi, exact=True)
else:
result = eval(repr(mi))
# string coerces to unicode
tm.assert_index_equal(result, mi, exact=False)
self.assertEqual(
mi.get_level_values('first').inferred_type, 'string')
self.assertEqual(
result.get_level_values('first').inferred_type, 'unicode')
mi_u = MultiIndex.from_product(
[list(u'ab'), range(3)], names=['first', 'second'])
result = eval(repr(mi_u))
tm.assert_index_equal(result, mi_u, exact=True)
# formatting
if PY3:
str(mi)
else:
compat.text_type(mi)
# long format
mi = MultiIndex.from_product([list('abcdefg'), range(10)],
names=['first', 'second'])
result = str(mi)
if PY3:
tm.assert_index_equal(eval(repr(mi)), mi, exact=True)
else:
result = eval(repr(mi))
# string coerces to unicode
tm.assert_index_equal(result, mi, exact=False)
self.assertEqual(
mi.get_level_values('first').inferred_type, 'string')
self.assertEqual(
result.get_level_values('first').inferred_type, 'unicode')
mi = MultiIndex.from_product(
[list(u'abcdefg'), range(10)], names=['first', 'second'])
result = eval(repr(mi_u))
tm.assert_index_equal(result, mi_u, exact=True)
def test_str(self):
# tested elsewhere
pass
def test_unicode_string_with_unicode(self):
d = {"a": [u("\u05d0"), 2, 3], "b": [4, 5, 6], "c": [7, 8, 9]}
idx = pd.DataFrame(d).set_index(["a", "b"]).index
if PY3:
str(idx)
else:
compat.text_type(idx)
def test_bytestring_with_unicode(self):
d = {"a": [u("\u05d0"), 2, 3], "b": [4, 5, 6], "c": [7, 8, 9]}
idx = pd.DataFrame(d).set_index(["a", "b"]).index
if PY3:
bytes(idx)
else:
str(idx)
def test_slice_keep_name(self):
x = MultiIndex.from_tuples([('a', 'b'), (1, 2), ('c', 'd')],
names=['x', 'y'])
self.assertEqual(x[1:].names, x.names)
def test_isnull_behavior(self):
# should not segfault GH5123
# NOTE: if MI representation changes, may make sense to allow
# isnull(MI)
with tm.assertRaises(NotImplementedError):
pd.isnull(self.index)
def test_level_setting_resets_attributes(self):
ind = MultiIndex.from_arrays([
['A', 'A', 'B', 'B', 'B'], [1, 2, 1, 2, 3]
])
assert ind.is_monotonic
ind.set_levels([['A', 'B', 'A', 'A', 'B'], [2, 1, 3, -2, 5]],
inplace=True)
# if this fails, probably didn't reset the cache correctly.
assert not ind.is_monotonic
def test_isin(self):
values = [('foo', 2), ('bar', 3), ('quux', 4)]
idx = MultiIndex.from_arrays([['qux', 'baz', 'foo', 'bar'], np.arange(
4)])
result = idx.isin(values)
expected = np.array([False, False, True, True])
tm.assert_numpy_array_equal(result, expected)
# empty, return dtype bool
idx = MultiIndex.from_arrays([[], []])
result = idx.isin(values)
self.assertEqual(len(result), 0)
self.assertEqual(result.dtype, np.bool_)
def test_isin_nan(self):
idx = MultiIndex.from_arrays([['foo', 'bar'], [1.0, np.nan]])
tm.assert_numpy_array_equal(idx.isin([('bar', np.nan)]),
[False, False])
tm.assert_numpy_array_equal(idx.isin([('bar', float('nan'))]),
[False, False])
def test_isin_level_kwarg(self):
idx = MultiIndex.from_arrays([['qux', 'baz', 'foo', 'bar'], np.arange(
4)])
vals_0 = ['foo', 'bar', 'quux']
vals_1 = [2, 3, 10]
expected = np.array([False, False, True, True])
tm.assert_numpy_array_equal(expected, idx.isin(vals_0, level=0))
tm.assert_numpy_array_equal(expected, idx.isin(vals_0, level=-2))
tm.assert_numpy_array_equal(expected, idx.isin(vals_1, level=1))
tm.assert_numpy_array_equal(expected, idx.isin(vals_1, level=-1))
self.assertRaises(IndexError, idx.isin, vals_0, level=5)
self.assertRaises(IndexError, idx.isin, vals_0, level=-5)
self.assertRaises(KeyError, idx.isin, vals_0, level=1.0)
self.assertRaises(KeyError, idx.isin, vals_1, level=-1.0)
self.assertRaises(KeyError, idx.isin, vals_1, level='A')
idx.names = ['A', 'B']
tm.assert_numpy_array_equal(expected, idx.isin(vals_0, level='A'))
tm.assert_numpy_array_equal(expected, idx.isin(vals_1, level='B'))
self.assertRaises(KeyError, idx.isin, vals_1, level='C')
def test_reindex_preserves_names_when_target_is_list_or_ndarray(self):
# GH6552
idx = self.index.copy()
target = idx.copy()
idx.names = target.names = [None, None]
other_dtype = pd.MultiIndex.from_product([[1, 2], [3, 4]])
# list & ndarray cases
self.assertEqual(idx.reindex([])[0].names, [None, None])
self.assertEqual(idx.reindex(np.array([]))[0].names, [None, None])
self.assertEqual(idx.reindex(target.tolist())[0].names, [None, None])
self.assertEqual(idx.reindex(target.values)[0].names, [None, None])
self.assertEqual(
idx.reindex(other_dtype.tolist())[0].names, [None, None])
self.assertEqual(
idx.reindex(other_dtype.values)[0].names, [None, None])
idx.names = ['foo', 'bar']
self.assertEqual(idx.reindex([])[0].names, ['foo', 'bar'])
self.assertEqual(idx.reindex(np.array([]))[0].names, ['foo', 'bar'])
self.assertEqual(idx.reindex(target.tolist())[0].names, ['foo', 'bar'])
self.assertEqual(idx.reindex(target.values)[0].names, ['foo', 'bar'])
self.assertEqual(
idx.reindex(other_dtype.tolist())[0].names, ['foo', 'bar'])
self.assertEqual(
idx.reindex(other_dtype.values)[0].names, ['foo', 'bar'])
def test_reindex_lvl_preserves_names_when_target_is_list_or_array(self):
# GH7774
idx = pd.MultiIndex.from_product([[0, 1], ['a', 'b']],
names=['foo', 'bar'])
self.assertEqual(idx.reindex([], level=0)[0].names, ['foo', 'bar'])
self.assertEqual(idx.reindex([], level=1)[0].names, ['foo', 'bar'])
def test_reindex_lvl_preserves_type_if_target_is_empty_list_or_array(self):
# GH7774
idx = pd.MultiIndex.from_product([[0, 1], ['a', 'b']])
self.assertEqual(idx.reindex([], level=0)[0].levels[0].dtype.type,
np.int64)
self.assertEqual(idx.reindex([], level=1)[0].levels[1].dtype.type,
np.object_)
def test_groupby(self):
groups = self.index.groupby(np.array([1, 1, 1, 2, 2, 2]))
labels = self.index.get_values().tolist()
exp = {1: labels[:3], 2: labels[3:]}
tm.assert_dict_equal(groups, exp)
# GH5620
groups = self.index.groupby(self.index)
exp = dict((key, [key]) for key in self.index)
tm.assert_dict_equal(groups, exp)
def test_index_name_retained(self):
# GH9857
result = pd.DataFrame({'x': [1, 2, 6],
'y': [2, 2, 8],
'z': [-5, 0, 5]})
result = result.set_index('z')
result.loc[10] = [9, 10]
df_expected = pd.DataFrame({'x': [1, 2, 6, 9],
'y': [2, 2, 8, 10],
'z': [-5, 0, 5, 10]})
df_expected = df_expected.set_index('z')
tm.assert_frame_equal(result, df_expected)
def test_equals_operator(self):
# GH9785
self.assertTrue((self.index == self.index).all())
| pjryan126/solid-start-careers | store/api/zillow/venv/lib/python2.7/site-packages/pandas/tests/indexes/test_multi.py | Python | gpl-2.0 | 78,143 | 0.000013 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Created on 17/2/2015
@author: Antonio Hermosilla Rodrigo.
@contact: [email protected]
@organization: Antonio Hermosilla Rodrigo.
@copyright: (C) 2015 by Antonio Hermosilla Rodrigo
@version: 1.0.0
'''
import sys
from PyQt4 import QtCore
from PyQt4 import QtGui
from PyQt4 import uic
from os import sep,pardir,getcwd
from os.path import normpath
import Geometrias.PuntoUTM
import Proyecciones.UTM2Geo
import Geodesia.EGM.CalcularOndulacion
class UTM2Geo(QtGui.QWidget):
'''
classdocs
'''
__rutaroot=None
__msgBoxErr=None
__pLat=None
__pLon=None
__pw=None
__pN=None
def __init__(self, parent=None):
'''
Constructor
'''
super(UTM2Geo, self).__init__()
#Se carga el formulario para el controlador.
self.__rutaroot=normpath(getcwd() + sep + pardir)
uic.loadUi(self.__rutaroot+'/Formularios/UTM2Geo.ui', self)
self.__msgBoxErr=QtGui.QMessageBox()
self.__msgBoxErr.setWindowTitle("ERROR")
self.__CargarElipsoides()
self.__tabChanged()
self.__setPrecision()
self.connect(self.pushButton, QtCore.SIGNAL("clicked()"), self.Calcular)
self.connect(self.pushButton_4, QtCore.SIGNAL("clicked()"), self.launch)
self.connect(self.tabWidget, QtCore.SIGNAL("currentChanged (int)"), self.__tabChanged)
self.connect(self.pushButton_2, QtCore.SIGNAL("clicked()"), self.AbrirFicheroUTM)
self.connect(self.pushButton_3, QtCore.SIGNAL("clicked()"), self.AbrirFicheroGeo)
self.connect(self.spinBox_2, QtCore.SIGNAL("valueChanged (int)"), self.__setPrecision)
self.connect(self.spinBox_3, QtCore.SIGNAL("valueChanged (int)"), self.__setPrecision)
self.connect(self.spinBox_4, QtCore.SIGNAL("valueChanged (int)"), self.__setPrecision)
self.connect(self.spinBox_5, QtCore.SIGNAL("valueChanged (int)"), self.__setPrecision)
def __CargarElipsoides(self):
'''!
'''
import BasesDeDatos.SQLite.SQLiteManager
try:
db=BasesDeDatos.SQLite.SQLiteManager.SQLiteManager(self.__rutaroot+'/Geodesia/Elipsoides/Elipsoides.db')
Nombres=db.ObtenerColumna('Elipsoides','Nombre')
Nombres=[i[0] for i in Nombres]
Nombres.sort()
self.comboBox.addItems(Nombres)
self.comboBox.setCurrentIndex(28)
self.comboBox_2.addItems(Nombres)
self.comboBox_2.setCurrentIndex(28)
except Exception as e:
self.__msgBoxErr.setText(e.__str__())
self.__msgBoxErr.exec_()
return
def Calcular(self):
'''!
'''
putm=None
if self.lineEdit.text()=="":
self.__msgBoxErr.setText("Debe introducir un valor para la X UTM.")
self.__msgBoxErr.exec_()
return
if self.lineEdit_2.text()=="":
self.__msgBoxErr.setText("Debe introducir un valor para la Y UTM.")
self.__msgBoxErr.exec_()
return
try:
putm=Geometrias.PuntoUTM.PuntoUTM(self.lineEdit.text(),self.lineEdit_2.text(),huso=self.spinBox.value())
except Exception as e:
self.__msgBoxErr.setText(e.__str__())
self.__msgBoxErr.exec_()
return
Sal=None
try:
Sal=Proyecciones.UTM2Geo.UTM2Geo(putm, self.comboBox.currentText())
self.lineEdit_3.setText(str(round(Sal.getLatitud(),self.__pLat)))
self.lineEdit_4.setText(str(round(Sal.getLongitud(),self.__pLon)))
self.lineEdit_5.setText(str(round(putm.getConvergenciaMeridianos(),self.__pw)))
self.lineEdit_6.setText(str(putm.getEscalaLocalPunto()))
try:
self.lineEdit_7.setText(str(round(Geodesia.EGM.CalcularOndulacion.CalcularOndulacion(Sal),self.__pN)))
except:
self.lineEdit_7.setText("")
except Exception as e:
self.__msgBoxErr.setText(e.__str__())
self.__msgBoxErr.exec_()
return
def AbrirFicheroUTM(self):
'''!
'''
ruta = QtGui.QFileDialog.getOpenFileName(self, 'Abrir Archivo', '.')
self.lineEdit_9.setText(ruta)
def AbrirFicheroGeo(self):
'''!
'''
ruta = QtGui.QFileDialog.getSaveFileName(self, 'Guadar Archivo', '.')
self.lineEdit_10.setText(ruta)
def launch(self):
'''!
'''
QtCore.QThread(self.CalcularArchivo()).exec_()
def CalcularArchivo(self):
'''!
'''
pd=QtGui.QProgressDialog()
if self.lineEdit_9.text()=="":
self.__msgBoxErr.setText("Debe introducir un fichero de coordenadas UTM.")
self.__msgBoxErr.exec_()
return
if self.lineEdit_10.text()=="":
self.__msgBoxErr.setText("Debe introducir un fichero de salida para las coordenadas Geodesicas")
self.__msgBoxErr.exec_()
return
#Formato del fichero de coordenadas Geodesicas.
#ID,X,Y,posY,Huso,helip(opcional)
pd.show()
pd.setLabelText("Tarea 1..2 Procesando el fichero.")
try:
QtGui.QApplication.processEvents()
sal=Proyecciones.UTM2Geo.UTM2GeoFromFile(self.lineEdit_9.text(), self.comboBox_2.currentText())
except Exception as e:
self.__msgBoxErr.setText(e.__str__())
self.__msgBoxErr.exec_()
return
pg=QtGui.QProgressBar(pd)
pd.setBar(pg)
pg.setMinimum(0)
pg.setMaximum(len(sal))
g=open(self.lineEdit_10.text(),'w')
pd.setLabelText("Tarea 2..2 Escribiendo nuevo fichero.")
cont=0
pg.show()
for i in sal:
QtGui.QApplication.processEvents()
line=""
line+=i[0]+","
line+=str(round(i[2].getLatitud(),self.__pLat))+","
line+=str(round(i[2].getLongitud(),self.__pLon))+","
h=i[2].getAlturaElipsoidal()
if h==None:
line+","
else:
line+=str(h)+","
line+=str(i[1].getHuso())+","
line+=str(round(i[1].getConvergenciaMeridianos(),self.__pw))+","
line+=str(round(i[1].getEscalaLocalPunto(),self.__pw))+","
line+=str(i[1].getZonaUTM())+"\n"
g.write(line)
pg.setValue(cont)
cont+=1
g.close()
pg.hide()
def __setPrecision(self):
'''!
'''
self.__pLat=self.spinBox_2.value()
self.__pLon=self.spinBox_3.value()
self.__pw=self.spinBox_4.value()
self.__pN=self.spinBox_5.value()
def __tabChanged(self):
'''!
'''
if self.tabWidget.currentIndex()==0:
self.setFixedSize ( 319, 490)
elif self.tabWidget.currentIndex()==1:
self.setFixedSize ( 562, 272)
pass
elif self.tabWidget.currentIndex()==2:
self.setFixedSize ( 354, 202)
pass
if __name__ == "__main__":
#arranque del programa.
app = QtGui.QApplication(sys.argv)#requerido en todas las aplicaciones con cuadros de diálogo.
dlg=UTM2Geo()#creo un objeto de nuestro controlador del cuadro.
dlg.show()
## dlg.exec_()
sys.exit(app.exec_())#Requerido. Al cerrar el cuadro termina la aplicación
app.close()
| tonihr/pyGeo | Controladores/UTM2Geo.py | Python | gpl-2.0 | 7,615 | 0.016419 |
# -*- coding: utf-8 -*-
def social_poke(entity, argument):
return True
#- Fine Funzione -
| Onirik79/aaritmud | src/socials/social_poke.py | Python | gpl-2.0 | 95 | 0.010526 |
# This file is part of VoltDB.
# Copyright (C) 2008-2013 VoltDB Inc.
#
# This file contains original code and/or modifications of original code.
# Any modifications made by VoltDB Inc. are licensed under the following
# terms and conditions:
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
@VOLT.Command(
bundles = VOLT.AdminBundle(),
description = 'Restore a VoltDB database snapshot.',
arguments = (
VOLT.PathArgument('directory', 'the snapshot server directory', absolute = True),
VOLT.StringArgument('nonce', 'the unique snapshot identifier (nonce)')
)
)
def restore(runner):
columns = [VOLT.FastSerializer.VOLTTYPE_STRING, VOLT.FastSerializer.VOLTTYPE_STRING]
params = [runner.opts.directory, runner.opts.nonce]
response = runner.call_proc('@SnapshotRestore', columns, params)
runner.info('The snapshot was restored.')
print response.table(0).format_table(caption = 'Snapshot Restore Results')
| vtorshyn/voltdb-shardit-src | voltdb-3.7/lib/python/voltcli/voltadmin.d/restore.py | Python | apache-2.0 | 1,943 | 0.006691 |
# -*- coding: utf-8 -*-
# © 2015 iDT LABS (http://[email protected])
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from . import test_holidays_compute_days
| VitalPet/hr | hr_holidays_compute_days/tests/__init__.py | Python | agpl-3.0 | 180 | 0 |
# Copyright 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import reverse
from django.core.urlresolvers import reverse_lazy
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import forms
from horizon import messages
from horizon import tables
from horizon.utils import memoized
from openstack_dashboard import api
from openstack_dashboard import policy
from openstack_dashboard.dashboards.identity.roles \
import forms as project_forms
from openstack_dashboard.dashboards.identity.roles \
import tables as project_tables
class IndexView(tables.DataTableView):
table_class = project_tables.RolesTable
template_name = 'identity/roles/index.html'
page_title = _("Roles")
def get_data(self):
roles = []
filters = self.get_filters()
if policy.check((("identity", "identity:list_roles"),),
self.request):
try:
roles = api.keystone.role_list(self.request,
filters=filters)
except Exception:
exceptions.handle(self.request,
_('Unable to retrieve roles list.'))
else:
msg = _("Insufficient privilege level to view role information.")
messages.info(self.request, msg)
return roles
class UpdateView(forms.ModalFormView):
template_name = 'identity/roles/update.html'
modal_header = _("Update Role")
form_id = "update_role_form"
form_class = project_forms.UpdateRoleForm
submit_label = _("Update Role")
submit_url = "horizon:identity:roles:update"
success_url = reverse_lazy('horizon:identity:roles:index')
page_title = _("Update Role")
@memoized.memoized_method
def get_object(self):
try:
return api.keystone.role_get(self.request, self.kwargs['role_id'])
except Exception:
redirect = reverse("horizon:identity:roles:index")
exceptions.handle(self.request,
_('Unable to update role.'),
redirect=redirect)
def get_context_data(self, **kwargs):
context = super(UpdateView, self).get_context_data(**kwargs)
args = (self.get_object().id,)
context['submit_url'] = reverse(self.submit_url, args=args)
return context
def get_initial(self):
role = self.get_object()
return {'id': role.id,
'name': role.name}
class CreateView(forms.ModalFormView):
template_name = 'identity/roles/create.html'
modal_header = _("Create Role")
form_id = "create_role_form"
form_class = project_forms.CreateRoleForm
submit_label = _("Create Role")
submit_url = reverse_lazy("horizon:identity:roles:create")
success_url = reverse_lazy('horizon:identity:roles:index')
page_title = _("Create Role")
| bac/horizon | openstack_dashboard/dashboards/identity/roles/views.py | Python | apache-2.0 | 3,528 | 0 |
"""This file is useful only if 'salesforce' is a duplicit name in Django registry
then put a string 'salesforce.apps.SalesforceDb' instead of simple 'salesforce'
"""
from django.apps import AppConfig
class SalesforceDb(AppConfig):
name = 'salesforce'
label = 'salesforce_db'
| django-salesforce/django-salesforce | salesforce/apps.py | Python | mit | 286 | 0.003497 |
# -*- encoding:utf-8 -*-
"""期货度量模块"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import matplotlib.pyplot as plt
import numpy as np
import seaborn as sns
from ..CoreBu import ABuEnv
from ..ExtBu.empyrical import stats
from ..MetricsBu.ABuMetricsBase import AbuMetricsBase, valid_check
from ..UtilBu.ABuDTUtil import warnings_filter
__author__ = '阿布'
__weixin__ = 'abu_quant'
class AbuMetricsFutures(AbuMetricsBase):
"""期货度量类,主要区别在于不涉及benchmark"""
def _metrics_base_stats(self):
"""度量真实成交了的capital_pd,即涉及资金的度量,期货相关不涉及benchmark"""
# 平均资金利用率
self.cash_utilization = 1 - (self.capital.capital_pd.cash_blance /
self.capital.capital_pd.capital_blance).mean()
self.algorithm_returns = np.round(self.capital.capital_pd['capital_blance'].pct_change(), 3)
# 收益cum数据
# noinspection PyTypeChecker
self.algorithm_cum_returns = stats.cum_returns(self.algorithm_returns)
# 最后一日的cum return
self.algorithm_period_returns = self.algorithm_cum_returns[-1]
# 交易天数
self.num_trading_days = len(self.algorithm_cum_returns)
# 年化收益
self.algorithm_annualized_returns = \
(ABuEnv.g_market_trade_year / self.num_trading_days) * self.algorithm_period_returns
# noinspection PyUnresolvedReferences
self.mean_algorithm_returns = self.algorithm_returns.cumsum() / np.arange(1, self.num_trading_days + 1,
dtype=np.float64)
# noinspection PyTypeChecker
self.algorithm_volatility = stats.annual_volatility(self.algorithm_returns)
# noinspection PyTypeChecker
self.algorithm_sharpe = stats.sharpe_ratio(self.algorithm_returns)
# 最大回撤
# noinspection PyUnresolvedReferences
self.max_drawdown = stats.max_drawdown(self.algorithm_returns.values)
@valid_check
@warnings_filter # skip: statsmodels / nonparametric / kdetools.py:20
def plot_returns_cmp(self, only_show_returns=False, only_info=False):
"""考虑资金情况下的度量,进行与benchmark的收益度量对比,收益趋势,资金变动可视化,以及其它度量信息,不涉及benchmark"""
self.log_func('买入后卖出的交易数量:{}'.format(self.order_has_ret.shape[0]))
self.log_func('胜率:{:.4f}%'.format(self.win_rate * 100))
self.log_func('平均获利期望:{:.4f}%'.format(self.gains_mean * 100))
self.log_func('平均亏损期望:{:.4f}%'.format(self.losses_mean * 100))
self.log_func('盈亏比:{:.4f}'.format(self.win_loss_profit_rate))
self.log_func('策略收益: {:.4f}%'.format(self.algorithm_period_returns * 100))
self.log_func('策略年化收益: {:.4f}%'.format(self.algorithm_annualized_returns * 100))
self.log_func('策略买入成交比例:{:.4f}%'.format(self.buy_deal_rate * 100))
self.log_func('策略资金利用率比例:{:.4f}%'.format(self.cash_utilization * 100))
self.log_func('策略共执行{}个交易日'.format(self.num_trading_days))
if only_info:
return
self.algorithm_cum_returns.plot()
plt.legend(['algorithm returns'], loc='best')
plt.show()
if only_show_returns:
return
sns.regplot(x=np.arange(0, len(self.algorithm_cum_returns)), y=self.algorithm_cum_returns.values)
plt.show()
sns.distplot(self.capital.capital_pd['capital_blance'], kde_kws={"lw": 3, "label": "capital blance kde"})
plt.show()
@valid_check
def plot_sharp_volatility_cmp(self, only_info=False):
"""sharp,volatility信息输出"""
self.log_func('策略Sharpe夏普比率: {:.4f}'.format(self.algorithm_sharpe))
self.log_func('策略波动率Volatility: {:.4f}'.format(self.algorithm_volatility))
| bbfamily/abu | abupy/MetricsBu/ABuMetricsFutures.py | Python | gpl-3.0 | 4,134 | 0.002949 |
from __future__ import absolute_import
import six
from sentry.api.serializers import Serializer, register
from sentry.models import ReleaseFile
@register(ReleaseFile)
class ReleaseFileSerializer(Serializer):
def serialize(self, obj, attrs, user):
return {
'id': six.text_type(obj.id),
'name': obj.name,
'headers': obj.file.headers,
'size': obj.file.size,
'sha1': obj.file.checksum,
'dateCreated': obj.file.timestamp,
}
| alexm92/sentry | src/sentry/api/serializers/models/release_file.py | Python | bsd-3-clause | 515 | 0 |
from pyui.stack_images import Ui_StackImages
from PyQt5.QtWidgets import QDialog, QAction, QLineEdit, QProgressDialog, QApplication, QToolBar
from PyQt5.QtGui import QIcon, QStandardItemModel, QStandardItem
from PyQt5.QtCore import Qt, QObject, pyqtSignal, QStandardPaths, QByteArray
from pyspectrum_commons import *
from project import Project
from astropy.io import fits
import scipy.ndimage.interpolation
from qmathplotwidget import QMathPlotWidget, QImPlotWidget
import os
from scipy.stats import pearsonr
from scipy.interpolate import UnivariateSpline
import numpy as np
from matplotlib.patches import Rectangle
from rotate_image_dialog import RotateImageDialog
class StackImages(QWidget):
def __init__(self, fits_file, settings):
QWidget.__init__(self)
self.fits_file = fits_file
self.ui = Ui_StackImages()
self.ui.setupUi(self)
self.settings = settings
self.degrees = 0. # TODO
self.files_model = QStandardItemModel()
self.files_model.setHorizontalHeaderLabels(['File', 'Quality', 'Align'])
self.ui.files.setModel(self.files_model)
self.__add_file_to_model(fits_file)
self.plot = QtCommons.nestWidget(self.ui.plot, QImPlotWidget(self.__files_data()[0]['data'], cmap='gray'))
self.__set_ref(0)
self.toolbar = QToolBar()
self.add = self.toolbar.addAction('Add', lambda: open_files_sticky('Open FITS Images',FITS_IMG_EXTS, self.__open, settings, IMPORT_IMG, parent=self ))
self.remove = self.toolbar.addAction('Remove', self.__remove_selected_rows)
self.reference_action = self.toolbar.addAction('Reference', lambda: self.__set_ref(self.ui.files.selectionModel().selectedRows()[0].row() ) )
self.toolbar.addAction('Select alignment region', lambda: self.plot.add_rectangle_selector('select_align', self.__alignment_region_selected))
self.toolbar.addAction('Rotate', lambda: self.rotate_dialog.show() )
self.ui.files.selectionModel().selectionChanged.connect(lambda sel, unsel: self.__selection_changed() )
self.ui.files.clicked.connect(lambda index: self.__draw_image(index.row()))
#self.accepted.connect(self.stack)
self.__selection_changed()
def __selection_changed(self):
sel = len(self.ui.files.selectionModel().selectedRows())
self.remove.setEnabled(sel)
self.reference_action.setEnabled(sel == 1)
def __draw_image(self,index):
image_view = self.plot.axes_image
image_view.set_data(self.files_model.item(index).data()['data'])
image_view.figure.canvas.draw()
def __open(self, files):
existing_files = [d['file'] for d in self.__files_data()]
progress = QProgressDialog("Loading files", None, 0, len(files), self);
progress.setWindowModality(Qt.WindowModal);
progress.show()
for index, file in enumerate(files):
progress.setValue(index+1)
QApplication.instance().processEvents()
if file not in existing_files:
self.__add_file(fits.open(file))
def __row_index(self, data):
return [i for i, d in enumerate(self.__files_data()) if d['file'] == data['file']][0]
def __add_file_to_model(self, fits_file):
item = QStandardItem(os.path.basename(fits_file.filename()))
data = fits_file[0].data
data = scipy.ndimage.interpolation.rotate(data, self.degrees, reshape=True, order=5, mode='constant')
spatial = data.sum(1)
profile = data.sum(0)
roots = UnivariateSpline(range(0, len(spatial)), spatial-np.max(spatial)/2, s=0.2, k=3).roots()
quality = roots[1]-roots[0]
item.setData({'file': fits_file.filename(), 'fits': fits_file, 'data': data, 'spatial': spatial, 'profile': profile, 'quality': quality})
offset = QStandardItem('N/A') # TODO
quality_item = QStandardItem("")
self.files_model.appendRow([item, quality_item, offset])
return item
def __add_file(self, fits_file):
item = self.__add_file_to_model(fits_file)
if self.files_model.rowCount() == 1:
self.__set_ref(0)
else:
self.align(item.data())
self.__update_qualities()
def __update_qualities(self):
qualities = [d['quality'] for d in self.__files_data()]
self.qualities = (min(qualities), max(qualities))
for index in range(0, self.files_model.rowCount()):
self.files_model.item(index, 1).setText("{}%".format(self.__quality_percent(self.files_model.item(index).data()['quality'])))
def __quality_percent(self, quality):
return 100. - (100. * (quality-self.qualities[0]) / (self.qualities[1]-self.qualities[0]))
def align(self, data):
if data['file'] == self.reference['file']:
self.__update_offset(data, (0, 0))
return
offset_range = lambda n: range(1-int(n), int(n)-1)
offsets = lambda name, indexes: [ (pearsonr(self.reference[name][indexes[0]:indexes[1]], data[name][indexes[0]-offset:indexes[1]-offset] )[0], offset) for offset in offset_range(indexes[0]) ]
x_offset = sorted(offsets('profile', self.reference_indexes['h']), key=lambda x: x[0])[-1]
y_offset = sorted(offsets('spatial', self.reference_indexes['v']), key=lambda y: y[0])[-1]
self.__update_offset(data, (x_offset[1], y_offset[1]))
def __update_offset(self, data, offset):
row = self.__row_index(data)
self.files_model.item(row, 2).setText('{}, {}'.format(offset[0], offset[1]))
data.update({'offset': {'x': offset[0], 'y': offset[1]}})
self.files_model.item(row).setData(data)
def __files_data(self):
return [self.files_model.item(i).data() for i in range(0, self.files_model.rowCount())]
def __remove_selected_rows(self):
for row in sorted([r.row() for r in self.ui.files.selectionModel().selectedRows()], reverse=True):
self.files_model.removeRows(row, 1)
if self.files_model.rowCount() == 0:
return
if len([f for f in self.__files_data() if f['file'] == self.reference['file']]) == 0:
self.__set_ref(0)
def __set_ref(self, index):
self.reference = self.files_model.item(index).data()
self.rotate_dialog = RotateImageDialog(self.fits_file, 0)
self.rotate_dialog.rotated.connect(self.__rotated)
indexes = lambda data: (int(len(data)/4), int(len(data)/4*3))
self.__set_reference_indexes(indexes(self.reference['profile']), indexes(self.reference['spatial']) )
#self.reference_indexes = { 'h': indexes(self.reference['profile']), 'v': indexes(self.reference['spatial']) }
for data in self.__files_data() :
self.align(data)
def __rotated(self):
self.degrees = self.rotate_dialog.degrees()
for index in range(0, self.files_model.rowCount()):
self.files_model.removeRow(index)
self.__add_file(self.fits_file)
self.__draw_image(0)
def __alignment_region_selected(self, eclick, erelease):
self.__set_reference_indexes((eclick.xdata, erelease.xdata), (eclick.ydata, erelease.ydata))
def __set_reference_indexes(self, x, y):
self.reference_indexes = { 'h': x, 'v': y }
self.__draw_reference_rect()
def __draw_reference_rect(self):
self.plot.rm_element('reference_indexes')
x, y = self.reference_indexes['h'], self.reference_indexes['v']
rect = Rectangle((x[0], y[0]), x[1]-x[0], y[1]-y[0], fill=True, alpha=0.3, color='green')
self.plot.figure.axes[0].add_artist(rect)
self.plot.add_element(rect, 'reference_indexes')
self.plot.figure.canvas.draw()
def stack(self):
dataset = self.__files_data()
median = MedianStacker(dataset).median()
self.fits_file[0].data = median
class MedianStacker:
def __init__(self, matrices):
self.matrices = matrices
def final_shape(self):
offsets = ( [y['offset']['y'] for y in self.matrices], [x['offset']['x'] for x in self.matrices] )
offsets = (min(offsets[0]), max(offsets[0]), min(offsets[1]), max(offsets[1]))
shape = self.matrices[0]['data'].shape
return {'shape': (shape[0] - offsets[0] + offsets[1], shape[1] - offsets[2] + offsets[3]), 'zero': (-offsets[0],-offsets[2]) }
def data_reposition(self, data, shape_offset):
shape = shape_offset['shape']
ret = np.zeros(shape[0]*shape[1]).reshape(shape)
rows_offset = data['offset']['y'] + shape_offset['zero'][0]
cols_offset = data['offset']['x'] + shape_offset['zero'][1]
rows = [rows_offset, data['data'].shape[0] + rows_offset]
cols = [cols_offset, data['data'].shape[1] + cols_offset]
ret[rows[0]:rows[1], cols[0]:cols[1]] = data['data']
return ret
def median(self):
final_shape = self.final_shape()
data = np.array([self.data_reposition(d, final_shape) for d in self.matrices])
return np.median(data, axis=0)
| GuLinux/PySpectrum | stack_images.py | Python | gpl-3.0 | 9,262 | 0.009177 |
#!/usr/bin/python
# service_proxy_server.py
#
# Copyright (C) 2008-2018 Veselin Penev, https://bitdust.io
#
# This file (service_proxy_server.py) is part of BitDust Software.
#
# BitDust is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# BitDust Software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with BitDust Software. If not, see <http://www.gnu.org/licenses/>.
#
# Please contact us if you have any questions at [email protected]
#
#
#
#
"""
..
module:: service_proxy_server
"""
from __future__ import absolute_import
from services.local_service import LocalService
def create_service():
return ProxyServerService()
class ProxyServerService(LocalService):
service_name = 'service_proxy_server'
config_path = 'services/proxy-server/enabled'
# def init(self):
# self.debug_level = 2
# self.log_events = True
def dependent_on(self):
return ['service_p2p_hookups',
]
def enabled(self):
from main import settings
return settings.enableProxyServer()
def start(self):
from transport.proxy import proxy_router
proxy_router.A('init')
proxy_router.A('start')
return True
def stop(self):
from transport.proxy import proxy_router
proxy_router.A('stop')
proxy_router.A('shutdown')
return True
def request(self, json_payload, newpacket, info):
from transport.proxy import proxy_router
proxy_router.A('request-route-received', (json_payload, newpacket, info, ))
return True
def cancel(self, json_payload, newpacket, info):
from transport.proxy import proxy_router
proxy_router.A('cancel-route-received', (json_payload, newpacket, info, ))
return True
| vesellov/bitdust.devel | services/service_proxy_server.py | Python | agpl-3.0 | 2,236 | 0.001789 |
from django import forms
from django.db.models.fields import CharField, DecimalField
from django.utils.translation import ugettext_lazy as _
from tendenci.apps.invoices.models import Invoice
from tendenci.apps.events.models import Event
class AdminNotesForm(forms.ModelForm):
class Meta:
model = Invoice
fields = ('admin_notes',
)
class AdminAdjustForm(forms.ModelForm):
class Meta:
model = Invoice
fields = ('variance',
'variance_notes',
)
class InvoiceSearchForm(forms.Form):
INVOICE_TYPE_CHOICES = (
('', '-----------------'),
('events', _('events')),
('memberships', _('memberships')),
('jobs', _('jobs'))
)
SEARCH_METHOD_CHOICES = (
('starts_with', _('Starts With')),
('contains', _('Contains')),
('exact', _('Exact')),
)
TENDERED_CHOICES = (
('', _('Show All')),
('tendered', _('Tendered')),
('estimate', _('Estimate')),
('void', _('Void')),
)
BALANCE_CHOICES = (
('', _('Show All')),
('0', _('Zero Balance')),
('1', _('Non-zero Balance')),
)
search_criteria = forms.ChoiceField(choices=[],
required=False)
search_text = forms.CharField(max_length=100, required=False)
search_method = forms.ChoiceField(choices=SEARCH_METHOD_CHOICES,
required=False)
start_dt = forms.DateField(label=_('From'), required=False)
end_dt = forms.DateField(label=_('To'), required=False)
start_amount = forms.DecimalField(required=False)
end_amount = forms.DecimalField(required=False)
tendered = forms.ChoiceField(choices=TENDERED_CHOICES,
required=False)
balance = forms.ChoiceField(choices=BALANCE_CHOICES,
required=False)
last_name = forms.CharField(label=_('Billing Last Name'),
max_length=100, required=False)
invoice_type = forms.ChoiceField(label=_("Invoice Type"), required=False, choices=INVOICE_TYPE_CHOICES)
event = forms.ModelChoiceField(queryset=Event.objects.all(),
label=_("Event "),
required=False,
empty_label=_('All Events'))
event_id = forms.ChoiceField(label=_('Event ID'), required=False, choices=[])
def __init__(self, *args, **kwargs):
super(InvoiceSearchForm, self).__init__(*args, **kwargs)
# Set start date and end date
if self.fields.get('start_dt'):
self.fields.get('start_dt').widget.attrs = {
'class': 'datepicker',
}
if self.fields.get('end_dt'):
self.fields.get('end_dt').widget.attrs = {
'class': 'datepicker',
}
# Set search criteria choices
criteria_choices = [('', _('SELECT ONE'))]
criteria_choices.append(('id', _('ID')))
for field in Invoice._meta.fields:
if isinstance(field, CharField) or isinstance(field, DecimalField):
if not field.name.startswith('bill_to') and not field.name.startswith('ship_to'):
criteria_choices.append((field.name, field.verbose_name))
criteria_choices.append(('owner_id', _('owner')))
self.fields['search_criteria'].choices = criteria_choices
# Set invoice type choices
invoices = Invoice.objects.all().distinct('object_type__app_label')
invoice_choices = [('', '-----------------')]
for entry in invoices:
if entry.object_type:
invoice_choices.append((entry.object_type.app_label, entry.object_type.app_label))
self.fields['invoice_type'].choices = invoice_choices
# Set event_id choices
choices = [('', _('All events'))]
events = Event.objects.all() # .filter(registration__invoice__isnull=False)
for event_obj in events:
choices.append((event_obj.pk, event_obj.pk))
self.fields['event_id'].choices = choices
| alirizakeles/tendenci | tendenci/apps/invoices/forms.py | Python | gpl-3.0 | 4,200 | 0.001905 |
'''
Created on Dec 11, 2015
@author: cmelton
'''
from DDServerApp.ORM import orm,Column,relationship,String,Integer, PickleType, Float,ForeignKey,backref,TextReader, joinedload_all
from DDServerApp.ORM import BASE_DIR, Boolean
from User import User
import os, copy
class Credentials(orm.Base):
'''
classdocs
'''
id = Column(Integer,primary_key=True)
name = Column(String)
serviceAccount = Column(String)
pemFileLocation = Column(String)
user_id = Column(Integer, ForeignKey("user.id"))
user = relationship(User, backref=backref("credentials"))
project = Column(String)
def __init__(self, name, serviceAccount, pemFileLocation, project, user):
'''
Constructor
'''
self.name = name
self.user = user
self.serviceAccount = serviceAccount
self.pemFileLocation = pemFileLocation
self.project = project
def dictForJSON(self):
return {"id": str(self.id),
"name": self.name,
"serviceaccount": self.serviceAccount,
"project": self.project
}
def updateValues(self, name, serviceAccount, pemFileLocation, project):
self.name = name
self.serviceAccount = serviceAccount
if os.path.exists(self.pemFileLocation):
os.remove(self.pemFileLocation)
self.pemFileLocation = pemFileLocation
self.project = project
@staticmethod
def findByID(session, cid, user):
cids=session.query(Credentials).join(User).filter(Credentials.id==int(cid)).filter(User.id==user.id).all()
if len(cids)==0: return None
else: return cids[0]
@staticmethod
def findByName(session, name, user):
creds=session.query(Credentials).filter(Credentials.name==name).filter(Credentials.user_id==user.id).all()
if len(creds)==0: return None
else: return creds[0]
@staticmethod
def delete(session, cid, user):
cred = Credentials.findByID(session, cid, user)
if cred != None:
session.delete(cred)
session.commit()
class WorkflowTemplate(orm.Base):
'''
Class to hold user defined workflow. Its a template because
it can be used to create a workflow at runtime.
'''
id = Column(Integer,primary_key=True)
name = Column(String)
workflow_vars = Column(PickleType)
user_id = Column(Integer, ForeignKey("user.id"))
user = relationship(User, backref = "workflowtemplates")
credentials_id = Column(Integer, ForeignKey("credentials.id"))
credentials = relationship(Credentials, backref = "workflowtemplates")
def __init__(self, name, user, workflowVars={}, credentials = None):
'''
Constructor
'''
self.name = name
self.user = user
self.workflow_vars = workflowVars
self.credentials = credentials
def isActive(self):
return any([wf.active for wf in self.workflows])
def startWorkflow(self, session, logfilename, address, workflowname):
from Workflow import Workflow
print "imported"
if not self.isActive():
print workflowname
wf = Workflow(workflowname, self, self.user, logfilename, address)
session.add(wf)
session.commit()
"print not active found workflow"
wf.start(session)
print "starting workflow"
session.add(wf)
session.commit()
def stopWorkflow(self, session):
for wf in self.workflows:
if wf.active:
wf.stop()
session.add_all(self.workflows)
session.commit()
def _instancesToDictForJSON(self):
return {str(inst.id): inst.dictForJSON() for inst in self.instancetemplates}
def _disksToDictForJSON(self):
return {str(disk.id): disk.dictForJSON() for disk in self.disktemplates}
def dictForJSON(self):
return {"id": str(self.id),
"name": self.name,
"variables": self.workflow_vars,
"instances": self._instancesToDictForJSON(),
"disks": self._disksToDictForJSON(),
"credentials": self.credentials_id}
def updateVarDict(self, vardict, user):
if self.user == user:
self.workflow_vars = {}
for key in vardict:
self._addVars(key, vardict[key])
def _addVars(self, key, value):
self.workflow_vars[key]=value
@staticmethod
def findByID(session, wfid, user=None):
if user == None:
wfs=session.query(WorkflowTemplate).filter(WorkflowTemplate.id==int(wfid)).all()
else:
wfs=session.query(WorkflowTemplate).filter(WorkflowTemplate.id==int(wfid)).filter(WorkflowTemplate.user_id==user.id).all()
if len(wfs)==0: return None
else: return wfs[0]
@staticmethod
def findByName(session, name, user):
wfs=session.query(WorkflowTemplate).filter(WorkflowTemplate.name==name).filter(WorkflowTemplate.user_id==user.id).all()
if len(wfs)==0: return None
else: return wfs[0]
@staticmethod
def delete(session, wfid, user):
workflow = WorkflowTemplate.findByID(session, wfid, user)
if workflow != None:
session.delete(workflow)
session.commit()
# session.query(WorkflowTemplate).filter(WorkflowTemplate.id==wfid).filter(WorkflowTemplate.user_id==user.id).delete()
class Image(orm.Base):
'''
classdocs
'''
id = Column(Integer,primary_key=True)
name = Column(String)
authAccount = Column(String)
rootdir = Column(String)
user_id = Column(Integer, ForeignKey("user.id"))
user = relationship(User, backref = "images")
def __init__(self, name, authAccount, rootdir, user):
'''
Constructor
'''
self.name = name
self.authAccount = authAccount
self.user = user
self.rootdir = rootdir
def dictForJSON(self):
return {"id": str(self.id),
"name": self.name,
"authaccount": self.authAccount,
"installDirectory": self.rootdir}
def updateValues(self, name, authAccount, rootdir, user):
if self.user == user:
self.name = name
self.authAccount = authAccount
self.rootdir = rootdir
@staticmethod
def findByID(session, iid, user):
images=session.query(Image).join(User).filter(Image.id==int(iid)).filter(User.id==user.id).all()
if len(images)==0: return None
else: return images[0]
@staticmethod
def findByName(session, name, user):
images=session.query(Image).join(User).filter(Image.name==name).filter(User.id==user.id).all()
if len(images)==0: return None
else: return images[0]
@staticmethod
def delete(session, iid, user):
image = Image.findByID(session, iid, user)
if image != None:
session.delete(image)
session.commit()
# session.query(Image).filter(Image.id==iid).filter(Image.user_id==user.id).delete()
class DiskTemplate(orm.Base):
'''
classdocs
'''
id = Column(Integer,primary_key=True)
name = Column(String)
workflow_id = Column(Integer, ForeignKey("workflowtemplate.id"))
workflow = relationship(WorkflowTemplate, backref = "disktemplates")
image_id = Column(Integer, ForeignKey("image.id"))
image = relationship(Image)
disk_size = Column(Integer)
disk_type = Column(String)
location = Column(String)
disk_vars = Column(PickleType)
def __init__(self, name, workflow, image, disk_size, disk_type, location):
'''
Constructor
'''
self.name = name
self.workflow = workflow
self.image = image
self.disk_size = disk_size
self.disk_type = disk_type
self.location = location
self.disk_vars = {}
def dictForJSON(self):
return {"id": str(self.id),
"name": self.name,
"location": self.location,
"disktype": self.disk_type,
"size": str(self.disk_size),
"image": str(self.image.id) if self.image!=None else None,
"variables": self.disk_vars}
def updateVarDict(self, vardict, user):
if self.workflow.user == user:
self.disk_vars = {}
for key in vardict:
self._addVars(key, vardict[key])
def _addVars(self, key, value):
self.disk_vars[key]=value
def updateValues(self, name, workflow, image, diskSize, diskType, location, user):
if self.workflow.user == user:
self.name = name
self.workflow = workflow
self.image = image
self.disk_size = diskSize
self.disk_type = diskType
self.location = location
def _substituteVariables(self, x, varDict):
for k, v in varDict.items():
x = x.replace(k, v)
return x
# makes a copy of a list of dictionaries
def _repDicts(self, alist):
return [copy.copy(x) for x in alist]
# creates a separate dictionary for every combination of variables in varDict
def _parseVariableDicts(self, varDict):
replacementTuples = [(key, map(lambda x: x.strip(), value.split(","))) for key, value in varDict.items()]
result = [{}]
for variable, replacements in replacementTuples:
newresult = []
for replacement in replacements:
dictsToAdd = self._repDicts(result)
for dictToAdd in dictsToAdd:
dictToAdd[variable]=replacement
newresult += dictsToAdd
result = newresult
return result
def _mergeDicts(self, dict1, dict2):
result = copy.copy(dict1)
for key, val in dict2.items(): result[key]=val
return result
def generateDisks(self, varDict, gce_manager=None, log = None):
print "generating disks, logfile:", log
variableDicts = self._parseVariableDicts(self._mergeDicts(varDict, self.disk_vars))
result = {}
from DDServerApp.ORM.Mappers import Disk
for variableDict in variableDicts:
name = self._substituteVariables(self.name, variableDict)
result[name] = Disk(name, self.disk_size, self.location, snapshot=None, image=self.image,
disk_type = 'pd-standard', init_source="", shutdown_dest="",
gce_manager=gce_manager, log = log)
return result
@staticmethod
def findByID(session, did, user):
ds=session.query(DiskTemplate).join(WorkflowTemplate).filter(DiskTemplate.id==int(did)).filter(WorkflowTemplate.user_id==user.id).all()
if len(ds)==0: return None
else: return ds[0]
@staticmethod
def findByName(session, name, user):
ds=session.query(DiskTemplate).join(WorkflowTemplate).filter(DiskTemplate.name==name).filter(WorkflowTemplate.user_id==user.id).all()
if len(ds)==0: return None
else: return ds[0]
@staticmethod
def delete(session, did, user):
disk = DiskTemplate.findByID(session, did, user)
if disk != None:
session.delete(disk)
session.commit()
# session.query(DiskTemplate).join(WorkflowTemplate).filter(DiskTemplate.id==did).filter(WorkflowTemplate.user_id==user.id).delete()
class ReadDiskLink(orm.Base):
disktemplate_id = Column(Integer, ForeignKey('disktemplate.id'), primary_key=True)
instancetemplate_id = Column(Integer, ForeignKey('instancetemplate.id'), primary_key=True)
class ReadWriteDiskLink(orm.Base):
disktemplate_id = Column(Integer, ForeignKey('disktemplate.id'), primary_key=True)
instancetemplate_id = Column(Integer, ForeignKey('instancetemplate.id'), primary_key=True)
class InstanceTemplateDependencyRelation(orm.Base):
child_id = Column(Integer, ForeignKey('instancetemplate.id'), primary_key=True)
parent_id = Column(Integer, ForeignKey('instancetemplate.id'), primary_key=True)
class InstanceTemplate(orm.Base):
'''
classdocs
'''
id = Column(Integer,primary_key=True)
name = Column(String)
workflow_id = Column(Integer, ForeignKey("workflowtemplate.id"))
workflow = relationship(WorkflowTemplate, backref = "instancetemplates")
machine_type = Column(String)
location = Column(String)
boot_disk_id = Column(Integer, ForeignKey("disktemplate.id"))
boot_disk = relationship(DiskTemplate)
read_disks = relationship(DiskTemplate, secondary='readdisklink')
read_write_disks = relationship(DiskTemplate, secondary='readwritedisklink')
# commands are backreferenced
dependencies = relationship("InstanceTemplate", secondary='instancetemplatedependencyrelation',
primaryjoin=id==InstanceTemplateDependencyRelation.parent_id,
secondaryjoin=id==InstanceTemplateDependencyRelation.child_id,
backref="next_instances")
variables = Column(PickleType)
ex_tags = Column(String)
ex_metadata = Column(String)
ex_network = Column(String)
numLocalSSD = Column(Integer)
preemptible = Column(Boolean)
def __init__(self, name, machine_type, location, boot_disk, read_disks,
read_write_disks, dependencies, workflow, ex_tags, ex_metadata,
ex_network, numLocalSSD, preemptible):
'''
Constructor
'''
self.name = name
self.machine_type = machine_type
self.location = location
self.boot_disk = boot_disk
self.read_disks = read_disks
self.read_write_disks = read_write_disks
self.dependencies = dependencies
self.variables = {}
self.workflow = workflow
self.ex_tags = ex_tags
self.ex_metadata = ex_metadata
self.ex_network = ex_network
self.numLocalSSD = int(numLocalSSD)
self.preemptible = preemptible
def dictForJSON(self):
# if "commandtemplates" in self.__dict__: commands = self.commandtemplates
# else: commands = []
return {"id": str(self.id),
"name": self.name,
"Commands": {str(c.id): c.dictForJSON() for c in self.commandtemplates},
"BootDisk": str(self.boot_disk.id),
"ReadDisks": [str(rd.id) for rd in self.read_disks],
"WriteDisks": [str(wd.id) for wd in self.read_write_disks],
"variables": self.variables,
"machinetype": self.machine_type,
"location": self.location,
"dependencies": [str(d.id) for d in self.dependencies],
"ex_tags": self.ex_tags,
"ex_metadata": self.ex_metadata,
"ex_network": self.ex_network,
"numLocalSSD": self.numLocalSSD,
"preemptible": self.preemptible}
def _replaceVars(self, x, varDict):
if type(x)==list:
return [self._replaceVars(xi, varDict) for xi in x]
for var, rep in varDict.items():
x = x.replace(var, rep)
return x
def updateValues(self, name, machineType, location, bootDisk, read_disks,
read_write_disks, dependencies, ex_tags, ex_metadata,
ex_network, numLocalSSD, preemptible):
self.name = name
self.machine_type = machineType
self.location = location
self.boot_disk = bootDisk
self.read_disks = read_disks
self.read_write_disks = read_write_disks
self.dependencies = dependencies
self.ex_tags = ex_tags
self.ex_metadata = ex_metadata
self.ex_network = ex_network
self.numLocalSSD = int(numLocalSSD)
self.preemptible = preemptible
def updateVarDict(self, vardict, user):
if self.workflow.user == user:
self.variables = {}
for key in vardict:
self._addVars(key, vardict[key])
def _addVars(self, key, value):
self.variables[key]=value
def _substituteVariables(self, x, varDict):
for k, v in varDict.items():
x = x.replace(k, v)
return x
# makes a copy of a list of dictionaries
def _repDicts(self, alist):
return [copy.copy(x) for x in alist]
# creates a separate dictionary for every combination of variables in varDict
def _parseVariableDicts(self, varDict):
replacementTuples = [(key, map(lambda x: x.strip(), value.split(","))) for key, value in varDict.items()]
result = [{}]
for variable, replacements in replacementTuples:
newresult = []
for replacement in replacements:
dictsToAdd = self._repDicts(result)
for dictToAdd in dictsToAdd:
dictToAdd[variable]=replacement
newresult += dictsToAdd
result = newresult
return result
def _mergeDicts(self, dict1, dict2):
result = copy.copy(dict1)
for key, val in dict2.items(): result[key]=val
return result
def generateInstances(self, varDict, disks, gce_manager=None, log = None):
variableDicts = self._parseVariableDicts(self._mergeDicts(varDict, self.variables))
result = {}
from DDServerApp.ORM.Mappers import Instance
for variableDict in variableDicts:
name = self._substituteVariables(self.name, variableDict)
dependency_names = [self._substituteVariables(d.name, variableDict) for d in self.dependencies]
read_disks = [disks[self._substituteVariables(d.name, variableDict)] for d in self.read_disks]
read_write_disks = [disks[self._substituteVariables(d.name, variableDict)] for d in self.read_write_disks]
boot_disk = disks[self._substituteVariables(self.boot_disk.name, variableDict)]
command_dict = {str(c.id): c.dictForJSON() for c in self.commandtemplates}
for key in command_dict: command_dict[key]["command"] = self._substituteVariables(command_dict[key]["command"], variableDict)
result[name] = Instance(name, self.machine_type, self.boot_disk.image, self.location,
self.ex_network, self.ex_tags, self.ex_metadata, dependency_names,
read_disks, read_write_disks, boot_disk, command_dict,
rootdir=self.boot_disk.image.rootdir, preemptible=self.preemptible, numLocalSSD=self.numLocalSSD,
localSSDInitSources="", localSSDDests="", gce_manager=gce_manager, log = log)
return result
@staticmethod
def findByID(session, iid, user):
iids=session.query(InstanceTemplate).join(WorkflowTemplate).join(User).filter(InstanceTemplate.id==int(iid)).filter(User.id==user.id).all()
if len(iids)==0: return None
else: return iids[0]
@staticmethod
def findByName(session, name, user):
iids=session.query(InstanceTemplate).join(WorkflowTemplate).join(User).filter(InstanceTemplate.name==name).filter(User.id==user.id).all()
if len(iids)==0: return None
else: return iids[0]
@staticmethod
def delete(session, iid, user):
inst = InstanceTemplate.findByID(session, iid, user)
if inst != None:
session.delete(inst)
session.commit()
# session.query(InstanceTemplate).filter_by(InstanceTemplate.id==iid).delete()
# session.query(InstanceTemplate).join(WorkflowTemplate).filter(InstanceTemplate.id==iid).filter(WorkflowTemplate.user_id==user.id).delete()
class CommandTemplateDependencyRelation(orm.Base):
child_id = Column(Integer, ForeignKey('commandtemplate.id'), primary_key=True)
parent_id = Column(Integer, ForeignKey('commandtemplate.id'), primary_key=True)
class CommandTemplate(orm.Base):
'''
classdocs
'''
id = Column(Integer,primary_key=True)
instance_id = Column(Integer, ForeignKey("instancetemplate.id"))
instance = relationship(InstanceTemplate, backref = "commandtemplates")
command_name = Column(String)
command = Column(String)
dependencies = relationship("CommandTemplate", secondary='commandtemplatedependencyrelation',
primaryjoin=id==CommandTemplateDependencyRelation.parent_id,
secondaryjoin=id==CommandTemplateDependencyRelation.child_id,
backref="next_commands")
def __init__(self, instance, command_name, command, dependencies):
'''
Constructor
'''
self.instance = instance
self.command_name = command_name
self.command = command
self.dependencies = dependencies
def dictForJSON(self):
return {"id": str(self.id),
"name": self.command_name,
"command": self.command,
"dependencies": [str(d.id) for d in self.dependencies]}
def updateValues(self, instance, command_name, command, dependencies):
self.instance = instance
self.command_name = command_name
self.command = command
self.dependencies = dependencies
@staticmethod
def delete(session, cid, user):
command = CommandTemplate.findByID(session, cid, user)
if command != None:
session.delete(command)
session.commit()
# session.query(CommandTemplate).join(InstanceTemplate).join(WorkflowTemplate).filter(CommandTemplate.id==cid).filter(WorkflowTemplate.user_id==user.id).delete()
@staticmethod
def findByID(session, cid, user):
cids=session.query(CommandTemplate).join(InstanceTemplate).join(WorkflowTemplate).filter(CommandTemplate.id==int(cid)).filter(WorkflowTemplate.user_id==user.id).all()
if len(cids)==0: return None
else: return cids[0]
@staticmethod
def findByName(session, name, user):
cids=session.query(CommandTemplate).join(InstanceTemplate).join(WorkflowTemplate).filter(CommandTemplate.command_name==name).filter(WorkflowTemplate.user_id==user.id).all()
if len(cids)==0: return None
else: return cids[0]
| collinmelton/DDCloudServer | DDServerApp/ORM/Mappers/WorkflowTemplates.py | Python | gpl-2.0 | 22,461 | 0.012466 |
from .validator import Validator
from ..util import register_as_validator
class ExactLength(Validator):
__validator_name__ = 'exact_length'
def __init__(self, exact_length):
super(ExactLength, self).__init__()
self.exact_length = exact_length
def validate(self, data, request=None, session=None):
return len(data) >= self.exact_length
register_as_validator(ExactLength) | Performante/pyFormante | pyFormante/validation/exact_length.py | Python | gpl-2.0 | 411 | 0.002433 |
import pygame
def timer():
event = pygame.USEREVENT
pygame.init()
screen = pygame.display.set_mode((800, 600))
clock = pygame.time.Clock()
counter, text = 50, '50'
pygame.time.set_timer(event, 1000)
font = pygame.font.SysFont('comicsansms', 20)
while True:
for e in pygame.event.get():
if e.type == event :
counter -= 1
text = str(counter) if counter > 0 else ('time\'s up')
if e.type == pygame.QUIT:
quit()
if e.type == pygame.MOUSEBUTTONDOWN and e.button == 1:
proefgame()
#if text == ('time\'s up'):
else:
screen.fill((0,0,0))
screen.blit(font.render(text, True, (0,255,0)), (700, 30))
if counter < 25:
screen.blit(font.render(text, True, (255, 255, 0)), (700, 30))
if counter < 10:
screen.blit(font.render(text, True, (255,0,0)), (700, 30))
pygame.display.flip()
clock.tick(60)
continue
quit()
timer()
| QuinDiesel/CommitSudoku-Project-Game | Definitief/timer.py | Python | mit | 1,135 | 0.013216 |
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
import os
def strip_comments(l):
return l.split('#', 1)[0].strip()
def reqs(*f):
return list(filter(None, [strip_comments(l) for l in open(
os.path.join(os.getcwd(), *f)).readlines()]))
def get_version(version_tuple):
if not isinstance(version_tuple[-1], int):
return '.'.join(map(str, version_tuple[:-1])) + version_tuple[-1]
return '.'.join(map(str, version_tuple))
init = os.path.join(os.path.dirname(__file__), 'src', 'gmaps', '__init__.py')
version_line = list(filter(lambda l: l.startswith('VERSION'), open(init)))[0]
VERSION = get_version(eval(version_line.split('=')[-1]))
INSTALL_REQUIRES = reqs('requirements.txt')
README = open(os.path.join(os.path.dirname(__file__), 'README.md')).read()
PACKAGES = find_packages('src')
PACKAGE_DIR = {'': 'src'}
setup(
name='python-gmaps',
version=VERSION,
author='Michał Jaworski',
author_email='[email protected]',
description='Google Maps API client',
long_description=README,
packages=PACKAGES,
package_dir=PACKAGE_DIR,
url='https://github.com/swistakm/python-gmaps',
include_package_data=True,
install_requires=INSTALL_REQUIRES,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
)
| 20tab/python-gmaps | setup.py | Python | bsd-2-clause | 1,557 | 0.001285 |
# Copyright 2017 reinforce.io. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tensorforce import util
from tensorforce.core.preprocessors import Preprocessor
class Normalize(Preprocessor):
"""
Normalize state. Subtract minimal value and divide by range.
"""
def __init__(self, shape, scope='normalize', summary_labels=()):
super(Normalize, self).__init__(shape=shape, scope=scope, summary_labels=summary_labels)
def tf_process(self, tensor):
# Min/max across every axis except batch dimension.
min_value = tensor
max_value = tensor
for axis in range(1, util.rank(tensor)):
min_value = tf.reduce_min(input_tensor=min_value, axis=axis, keep_dims=True)
max_value = tf.reduce_max(input_tensor=max_value, axis=axis, keep_dims=True)
return (tensor - min_value) / (max_value - min_value + util.epsilon)
| lefnire/tensorforce | tensorforce/core/preprocessors/normalize.py | Python | apache-2.0 | 1,635 | 0.001835 |
#
# Retrieved from: https://svn.code.sf.net/p/p2tk/code/python/syllabify/syllabifier.py
# on 2014-09-05.
#
# According to https://www.ling.upenn.edu/phonetics/p2tk/, this is licensed
# under MIT.
#
# This is the P2TK automated syllabifier. Given a string of phonemes,
# it automatically divides the phonemes into syllables.
#
# By Joshua Tauberer, based on code originally written by Charles Yang.
#
# The syllabifier requires a language configuration which specifies
# the set of phonemes which are consonants and vowels (syllable nuclei),
# as well as the set of permissible onsets.
#
# Then call syllabify with a language configuration object and a word
# represented as a string (or list) of phonemes.
#
# Returned is a data structure representing the syllabification.
# What you get is a list of syllables. Each syllable is a tuple
# of (stress, onset, nucleus, coda). stress is None or an integer stress
# level attached to the nucleus phoneme on input. onset, nucleus,
# and coda are lists of phonemes.
#
# Example:
#
# import syllabifier
# language = syllabifier.English # or: syllabifier.loadLanguage("english.cfg")
# syllables = syllabifier.syllabify(language, "AO2 R G AH0 N AH0 Z EY1 SH AH0 N Z")
#
# The syllables variable then holds the following:
# [ (2, [], ['AO'], ['R']),
# (0, ['G'], ['AH'], []),
# (0, ['N'], ['AH'], []),
# (1, ['Z'], ['EY'], []),
# (0, ['SH'], ['AH'], ['N', 'Z'])]
#
# You could process that result with this type of loop:
#
# for stress, onset, nucleus, coda in syllables :
# print " ".join(onset), " ".join(nucleus), " ".join(coda)
#
# You can also pass the result to stringify to get a nice printable
# representation of the syllables, with periods separating syllables:
#
# print syllabify.stringify(syllables)
#
#########################################################################
English = {
'consonants': ['B', 'CH', 'D', 'DH', 'F', 'G', 'HH', 'JH', 'K', 'L', 'M', 'N',
'NG', 'P', 'R', 'S', 'SH', 'T', 'TH', 'V', 'W', 'Y', 'Z', 'ZH'],
'vowels': [ 'AA', 'AE', 'AH', 'AO', 'AW', 'AY', 'EH', 'ER', 'EY', 'IH', 'IY', 'OW', 'OY', 'UH', 'UW'],
'onsets': ['P', 'T', 'K', 'B', 'D', 'G', 'F', 'V', 'TH', 'DH', 'S', 'Z', 'SH', 'CH', 'JH', 'M',
'N', 'R', 'L', 'HH', 'W', 'Y', 'P R', 'T R', 'K R', 'B R', 'D R', 'G R', 'F R',
'TH R', 'SH R', 'P L', 'K L', 'B L', 'G L', 'F L', 'S L', 'T W', 'K W', 'D W',
'S W', 'S P', 'S T', 'S K', 'S F', 'S M', 'S N', 'G W', 'SH W', 'S P R', 'S P L',
'S T R', 'S K R', 'S K W', 'S K L', 'TH W', 'ZH', 'P Y', 'K Y', 'B Y', 'F Y',
'HH Y', 'V Y', 'TH Y', 'M Y', 'S P Y', 'S K Y', 'G Y', 'HH W', '']
}
def loadLanguage(filename) :
'''This function loads up a language configuration file and returns
the configuration to be passed to the syllabify function.'''
L = { "consonants" : [], "vowels" : [], "onsets" : [] }
f = open(filename, "r")
section = None
for line in f :
line = line.strip()
if line in ("[consonants]", "[vowels]", "[onsets]") :
section = line[1:-1]
elif section is None :
raise ValueError("File must start with a section header such as [consonants].")
elif not section in L :
raise ValueError("Invalid section: " + section)
else :
L[section].append(line)
for section in "consonants", "vowels", "onsets" :
if len(L[section]) == 0 :
raise ValueError("File does not contain any consonants, vowels, or onsets.")
return L
def syllabify(language, word) :
'''Syllabifies the word, given a language configuration loaded with loadLanguage.
word is either a string of phonemes from the CMU pronouncing dictionary set
(with optional stress numbers after vowels), or a Python list of phonemes,
e.g. "B AE1 T" or ["B", "AE1", "T"]'''
if type(word) == str :
word = word.split()
syllables = [] # This is the returned data structure.
internuclei = [] # This maintains a list of phonemes between nuclei.
for phoneme in word :
phoneme = phoneme.strip()
if phoneme == "" :
continue
stress = None
if phoneme[-1].isdigit() :
stress = int(phoneme[-1])
phoneme = phoneme[0:-1]
if phoneme in language["vowels"] :
# Split the consonants seen since the last nucleus into coda and onset.
coda = None
onset = None
# If there is a period in the input, split there.
if "." in internuclei :
period = internuclei.index(".")
coda = internuclei[:period]
onset = internuclei[period+1:]
else :
# Make the largest onset we can. The 'split' variable marks the break point.
for split in range(0, len(internuclei)+1) :
coda = internuclei[:split]
onset = internuclei[split:]
# If we are looking at a valid onset, or if we're at the start of the word
# (in which case an invalid onset is better than a coda that doesn't follow
# a nucleus), or if we've gone through all of the onsets and we didn't find
# any that are valid, then split the nonvowels we've seen at this location.
if " ".join(onset) in language["onsets"] \
or len(syllables) == 0 \
or len(onset) == 0 :
break
# Tack the coda onto the coda of the last syllable. Can't do it if this
# is the first syllable.
if len(syllables) > 0 :
syllables[-1][3].extend(coda)
# Make a new syllable out of the onset and nucleus.
syllables.append( (stress, onset, [phoneme], []) )
# At this point we've processed the internuclei list.
internuclei = []
elif not phoneme in language["consonants"] and phoneme != "." :
raise ValueError("Invalid phoneme: " + phoneme)
else : # a consonant
internuclei.append(phoneme)
# Done looping through phonemes. We may have consonants left at the end.
# We may have even not found a nucleus.
if len(internuclei) > 0 :
if len(syllables) == 0 :
syllables.append( (None, internuclei, [], []) )
else :
syllables[-1][3].extend(internuclei)
return syllables
def stringify(syllables) :
'''This function takes a syllabification returned by syllabify and
turns it into a string, with phonemes spearated by spaces and
syllables spearated by periods.'''
ret = []
for syl in syllables :
stress, onset, nucleus, coda = syl
if stress != None and len(nucleus) != 0 :
nucleus[0] += str(stress)
ret.append(" ".join(onset + nucleus + coda))
return " . ".join(ret)
# If this module was run directly, syllabify the words on standard input
# into standard output. Hashed lines are printed back untouched.
if __name__ == "__main__" :
import sys
if len(sys.argv) != 2 :
print("Usage: python syllabifier.py english.cfg < textfile.txt > outfile.txt")
else :
L = loadLanguage(sys.argv[1])
for line in sys.stdin :
if line[0] == "#" :
sys.stdout.write(line)
continue
line = line.strip()
s = stringify(syllabify(L, line))
sys.stdout.write(s + "\n")
| dfm/twitterick | twitterick/syllabifier.py | Python | mit | 6,769 | 0.027921 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.