text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
#
# @name: Wascan - Web Application Scanner
# @repo: https://github.com/m4ll0k/Wascan
# @author: Momo Outaadi (M4ll0k)
# @license: See the file 'LICENSE.txt
import os
import sys
from lib.utils.dirs import *
from lib.utils.printer import *
from importlib import import_module
path = os.path.join(os.path.abspath('.').split('lib')[0],'plugins/attacks/')
def Attacks(kwargs,url,data):
info('Starting attacks module...')
for file in dirs(path):
file = file.split('.py')[0]
__import__('plugins.attacks.%s'%(file))
module = sys.modules['plugins.attacks.%s'%(file)]
module = module.__dict__[file]
module(kwargs,url,data).run() | m4ll0k/Spaghetti | lib/handler/attacks.py | Python | gpl-3.0 | 687 | 0.024745 |
""" Defines the base class for color maps
"""
from traits.api import Enum, HasTraits, Instance
from data_range_1d import DataRange1D
class AbstractColormap(HasTraits):
"""
Abstract class for color maps, which map from scalar values to color values.
"""
# The data-space bounds of the mapper.
range = Instance(DataRange1D)
# The color depth of the colors to use.
color_depth = Enum('rgba', 'rgb')
def map_screen(self, val):
"""
map_screen(val) -> color
Maps an array of values to an array of colors. If the input array is
NxM, the returned array is NxMx3 or NxMx4, depending on the
**color_depth** setting.
"""
raise NotImplementedError()
def map_data(self, ary):
"""
map_data(ary) -> color_array
Returns an array of values containing the colors mapping to the values
in *ary*. If the input array is NxM, the returned array is NxMx3 or
NxMx4, depending on the **color_depth** setting.
"""
# XXX this seems bogus: by analogy with AbstractMapper, this should map
# colors to data values, and that will be generally hard to do well.
# no subclass implements this - CJW
raise NotImplementedError()
def map_index(self, ary):
"""
map_index(ary) -> index into color_bands
This method is like map_screen(), but it returns an array of indices
into the color map's color bands instead of an array of colors. If the
input array is NxM, then the output is NxM integer indices.
This method might not apply to all color maps. Ones that cannot
define a static set of color bands (e.g., function-defined color maps)
are not able to implement this function.
"""
raise NotImplementedError()
def map_uint8(self, val):
"""
map_uint8(val) -> rgb24 or rgba32 color
Maps a single value to a single color. Color is represented as either
length-3 or length-4 array of rgb(a) uint8 values, depending on the
**color_depth** setting.
"""
# default implementation (not efficient)
return (self.map_screen(val)*255.0).astype('uint8')
# EOF
| burnpanck/chaco | chaco/abstract_colormap.py | Python | bsd-3-clause | 2,253 | 0.001775 |
#the file is left intentionally blank
| equalitie/vengeance | src/util/__init__.py | Python | agpl-3.0 | 38 | 0.026316 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.test import SimpleTestCase, override_settings
@override_settings(ROOT_URLCONF='view_tests.generic_urls')
class URLHandling(SimpleTestCase):
"""
Tests for URL handling in views and responses.
"""
redirect_target = "/%E4%B8%AD%E6%96%87/target/"
def test_nonascii_redirect(self):
"""
A non-ASCII argument to HttpRedirect is handled properly.
"""
response = self.client.get('/nonascii_redirect/')
self.assertRedirects(response, self.redirect_target)
def test_permanent_nonascii_redirect(self):
"""
A non-ASCII argument to HttpPermanentRedirect is handled properly.
"""
response = self.client.get('/permanent_nonascii_redirect/')
self.assertRedirects(response, self.redirect_target, status_code=301)
| frishberg/django | tests/view_tests/tests/test_specials.py | Python | bsd-3-clause | 880 | 0 |
"""
Model fields for working with trees.
"""
from __future__ import unicode_literals
from django.db import models
from django.conf import settings
from mptt.forms import TreeNodeChoiceField, TreeNodeMultipleChoiceField
__all__ = ('TreeForeignKey', 'TreeOneToOneField', 'TreeManyToManyField')
class TreeForeignKey(models.ForeignKey):
"""
Extends the foreign key, but uses mptt's ``TreeNodeChoiceField`` as
the default form field.
This is useful if you are creating models that need automatically
generated ModelForms to use the correct widgets.
"""
def formfield(self, **kwargs):
"""
Use MPTT's ``TreeNodeChoiceField``
"""
kwargs.setdefault('form_class', TreeNodeChoiceField)
return super(TreeForeignKey, self).formfield(**kwargs)
class TreeOneToOneField(models.OneToOneField):
def formfield(self, **kwargs):
kwargs.setdefault('form_class', TreeNodeChoiceField)
return super(TreeOneToOneField, self).formfield(**kwargs)
class TreeManyToManyField(models.ManyToManyField):
def formfield(self, **kwargs):
kwargs.setdefault('form_class', TreeNodeMultipleChoiceField)
return super(TreeManyToManyField, self).formfield(**kwargs)
# South integration
if 'south' in settings.INSTALLED_APPS: # pragma: no cover
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ["^mptt\.fields\.TreeForeignKey"])
add_introspection_rules([], ["^mptt\.fields\.TreeOneToOneField"])
add_introspection_rules([], ["^mptt\.fields\.TreeManyToManyField"])
| watchdogpolska/django-mptt | mptt/fields.py | Python | mit | 1,592 | 0.004397 |
#-------------------------------------------------------------------------------
# . File : ParseScans.py
# . Program : MolarisTools
# . Copyright : USC, Mikolaj Feliks (2015-2018)
# . License : GNU GPL v3.0 (http://www.gnu.org/licenses/gpl-3.0.en.html)
#-------------------------------------------------------------------------------
import os, math, glob
from MolarisTools.Parser import MolarisOutputFile
def ParsePESScan (pattern="evb_scan_", filenameTotal="total_e.dat", filenameTotalRelative="total_e_rel.dat", patternChanges="changes_", baselineIndex=-1, maximumIndex=-1, logging=True):
"""Parse a potential energy surface scan."""
steps = []
files = glob.glob ("%s*.out" % pattern)
files.sort ()
for fn in files:
mof = MolarisOutputFile (filename=fn, logging=False)
if logging:
print ("# . Parsing file %s ..." % fn)
collect = []
for step in mof.qmmmComponentsI:
Eqmmm = step.Eqmmm
collect.append (Eqmmm)
if logging:
nsteps = len (collect)
print ("# . Found %d steps" % nsteps)
steps.append (collect)
if logging:
ntotal = 0
for step in steps:
ntotal += len (step)
print ("Found %d total steps in %d files" % (ntotal, len (files)))
m, n = (-1, -1)
first = steps[0][-1]
if (maximumIndex > -1):
topRel = steps[maximumIndex][-1]
else:
topRel = first
for (i, step) in enumerate (steps, 1):
if (step[-1] > topRel):
(topRel, n) = (step[-1], i)
if (baselineIndex > -1):
baseRel = steps[baselineIndex][-1]
else:
baseRel = first
for (i, step) in enumerate (steps, 1):
if (step[-1] < baseRel):
(baseRel, m) = (step[-1], i)
barrier = topRel - baseRel
if (m > -1 and n > -1):
if logging:
print ("Found a barrier between points (%d, %d) of %.1f kcal/mol" % (m, n, barrier))
for (filename, subtract) in ((filenameTotal, first), (filenameTotalRelative, baseRel)):
fo = open (filename, "w")
for (i, step) in enumerate (steps, 1):
last = step[-1]
fo.write ("%d %f\n" % (i, last - subtract))
fo.close ()
if logging:
print ("Wrote file %s" % filename)
for (i, step) in enumerate (steps, 1):
filename = "%s%03d.dat" % (patternChanges, i)
fo = open (filename, "w")
previous = step[0]
for iteration in step[1:]:
change = (iteration - previous)
previous = iteration
fo.write ("%f\n" % change)
fo.close ()
if logging:
print ("Wrote file %s" % filename)
def ParsePESScan2D (pattern="evb_scan_", filenameTotal="total_e.dat", filenameTotalRelative="total_e_rel.dat", useDistances=False, zigzag=False, logging=True):
"""Parse a two-dimensional potential energy surface scan."""
files = glob.glob ("%s*.inp" % pattern)
nfiles = len (files)
size = int (math.sqrt (nfiles))
base = 0.
found = False
for i in range (1, size + 1):
for j in range (1, size + 1):
filename = "%s%02d_%02d.out" % (pattern, 1, 1)
if os.path.exists (filename):
if logging:
print ("# . Parsing file %s ..." % filename)
mof = MolarisOutputFile (filename=filename, logging=False)
last = mof.qmmmComponentsI[-1]
base = last.Eqmmm
found = True
break
if found:
break
if logging:
print ("# . Base energy is %f" % base)
rows = []
pairs = []
nlogs = 0
for i in range (1, size + 1):
columns = []
gather = []
for j in range (1, size + 1):
je = j
if zigzag:
# . In a zig-zag scan, the second coordinate
# . increases and decreases for odd and even iterations
# . of the first coordinate, respectively.
if ((i % 2) == 0):
je = size - j + 1
filename = "%s%02d_%02d.out" % (pattern, i, je)
Eqmmm = base
if os.path.exists (filename):
if logging:
print ("# . Parsing file %s ..." % filename)
mof = MolarisOutputFile (filename=filename, logging=False)
last = mof.qmmmComponentsI[-1]
Eqmmm = last.Eqmmm
if logging:
nsteps = len (mof.qmmmComponentsI)
print ("# . Found %d steps" % nsteps)
nlogs += 1
columns.append (Eqmmm)
if useDistances:
filename = "%s%02d_%02d.inp" % (pattern, i, je)
molaris = MolarisInputFile (filename, logging=False)
(coi, coj) = molaris.constrainedPairs[:2]
(ri, rj) = (coi.req, coj.req)
pair = (ri, rj)
gather.append (pair)
rows.append (columns)
if useDistances:
pairs.append (gather)
if logging:
print ("Parsed %d log files out of %d calculations" % (nlogs, nfiles))
low, high = (base, base)
for (i, row) in enumerate (rows):
for (j, column) in enumerate (row):
if (column < low):
low = column
(li, lj) = (i, j)
if (column > high):
high = column
(hi, hj) = (i, j)
if logging:
if useDistances:
(ra, rb) = pairs[li][lj]
print ("Lowest point at (%.2f, %.2f) with energy of %.2f kcals" % (ra, rb, low ))
(ra, rb) = pairs[hi][hj]
print ("Highest point at (%.2f, %.2f) with energy of %.2f kcals" % (ra, rb, high))
else:
print ("Lowest point at (%d, %d) with energy of %.2f kcals" % (li + 1, lj + 1, low ))
print ("Highest point at (%d, %d) with energy of %.2f kcals" % (hi + 1, hj + 1, high))
difference = high - low
print ("Energy difference is %.1f kcals" % difference)
for (filename, subtract) in ((filenameTotal, 0.), (filenameTotalRelative, low)):
fo = open (filename, "w")
for (i, row) in enumerate (rows):
for (j, column) in enumerate (row):
if useDistances:
(idist, jdist) = pairs[i][j]
line = "%6.2f %6.2f %f" % (idist, jdist, rows[i][j] - subtract)
else:
line = "%2d %2d %f" % (j + 1, i + 1, rows[i][j] - subtract)
fo.write ("%s\n" % line)
fo.write ("\n")
fo.close ()
if logging:
print ("Wrote file %s" % filename)
nrows = len (rows)
for irow in range (1, nrows - 2):
ncolumns = len (rows[irow])
for jcolumn in range (1, ncolumns - 2):
up = rows[irow - 1][jcolumn ]
down = rows[irow + 1][jcolumn ]
left = rows[irow ][jcolumn - 1]
right = rows[irow ][jcolumn + 1]
center = rows[irow ][jcolumn ]
checks = (center < up, center < down, center < left, center < right)
if all (checks):
if logging:
if useDistances:
(idist, jdist) = pairs[irow][jcolumn]
print ("Found minimum at (%.2f, %.2f) with energy of %.2f kcals" % (idist, jdist, center - low))
else:
print ("Found minimum at (%d, %d) with energy of %.2f kcals" % (irow, jcolumn, center - low))
checks = (center > up, center > down, center > left, center > right)
if all (checks):
if logging:
if useDistances:
(idist, jdist) = pairs[irow][jcolumn]
print ("Found maximum at (%.2f, %.2f) with energy of %.2f kcals" % (idist, jdist, center - low))
else:
print ("Found maximum at (%d, %d) with energy of %.2f kcals" % (irow, jcolumn, center - low))
#===============================================================================
# . Main program
#===============================================================================
if (__name__ == "__main__"): pass
| mfx9/molaris-tools | MolarisTools/Scripts/ParseScans.py | Python | gpl-3.0 | 8,470 | 0.016411 |
# -*- coding: utf-8 -*-
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains gsutil base unit test case class."""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import logging
import os
import sys
import tempfile
import six
import boto
from boto.utils import get_utf8able_str
from gslib import project_id
from gslib import wildcard_iterator
from gslib.boto_translation import BotoTranslation
from gslib.cloud_api_delegator import CloudApiDelegator
from gslib.command_runner import CommandRunner
from gslib.cs_api_map import ApiMapConstants
from gslib.cs_api_map import ApiSelector
from gslib.discard_messages_queue import DiscardMessagesQueue
from gslib.tests.mock_logging_handler import MockLoggingHandler
from gslib.tests.testcase import base
import gslib.tests.util as util
from gslib.tests.util import unittest
from gslib.tests.util import WorkingDirectory
from gslib.utils.constants import UTF8
from gslib.utils.text_util import print_to_fd
def _AttemptToCloseSysFd(fd):
"""Suppress IOError when closing sys.stdout or sys.stderr in tearDown."""
# In PY2, if another sibling thread/process tried closing it at the same
# time we did, it succeeded either way, so we just continue. This approach
# was taken from https://github.com/pytest-dev/pytest/pull/3305.
if not six.PY2: # This doesn't happen in PY3, AFAICT.
fd.close()
return
try:
fd.close()
except IOError:
pass
class GsutilApiUnitTestClassMapFactory(object):
"""Class map factory for use in unit tests.
BotoTranslation is used for all cases so that GSMockBucketStorageUri can
be used to communicate with the mock XML service.
"""
@classmethod
def GetClassMap(cls):
"""Returns a class map for use in unit tests."""
gs_class_map = {
ApiSelector.XML: BotoTranslation,
ApiSelector.JSON: BotoTranslation
}
s3_class_map = {ApiSelector.XML: BotoTranslation}
class_map = {'gs': gs_class_map, 's3': s3_class_map}
return class_map
@unittest.skipUnless(util.RUN_UNIT_TESTS, 'Not running integration tests.')
class GsUtilUnitTestCase(base.GsUtilTestCase):
"""Base class for gsutil unit tests."""
@classmethod
def setUpClass(cls):
base.GsUtilTestCase.setUpClass()
cls.mock_bucket_storage_uri = util.GSMockBucketStorageUri
cls.mock_gsutil_api_class_map_factory = GsutilApiUnitTestClassMapFactory
cls.logger = logging.getLogger()
cls.command_runner = CommandRunner(
bucket_storage_uri_class=cls.mock_bucket_storage_uri,
gsutil_api_class_map_factory=cls.mock_gsutil_api_class_map_factory)
# Ensure unit tests don't fail if no default_project_id is defined in the
# boto config file.
project_id.UNIT_TEST_PROJECT_ID = 'mock-project-id-for-unit-tests'
def setUp(self):
super(GsUtilUnitTestCase, self).setUp()
self.bucket_uris = []
self.stdout_save = sys.stdout
self.stderr_save = sys.stderr
fd, self.stdout_file = tempfile.mkstemp()
sys.stdout = os.fdopen(fd, 'wb+')
fd, self.stderr_file = tempfile.mkstemp()
# do not set sys.stderr to be 'wb+' - it will blow up the logger
sys.stderr = os.fdopen(fd, 'w+')
self.accumulated_stdout = []
self.accumulated_stderr = []
self.root_logger = logging.getLogger()
self.is_debugging = self.root_logger.isEnabledFor(logging.DEBUG)
self.log_handlers_save = self.root_logger.handlers
fd, self.log_handler_file = tempfile.mkstemp()
self.log_handler_stream = os.fdopen(fd, 'w+')
self.temp_log_handler = logging.StreamHandler(self.log_handler_stream)
self.root_logger.handlers = [self.temp_log_handler]
def tearDown(self):
super(GsUtilUnitTestCase, self).tearDown()
self.root_logger.handlers = self.log_handlers_save
self.temp_log_handler.flush()
self.temp_log_handler.close()
self.log_handler_stream.seek(0)
log_output = self.log_handler_stream.read()
self.log_handler_stream.close()
os.unlink(self.log_handler_file)
sys.stdout.seek(0)
sys.stderr.seek(0)
if six.PY2:
stdout = sys.stdout.read()
stderr = sys.stderr.read()
else:
try:
stdout = sys.stdout.read()
stderr = sys.stderr.read()
except UnicodeDecodeError:
sys.stdout.seek(0)
sys.stderr.seek(0)
stdout = sys.stdout.buffer.read()
stderr = sys.stderr.buffer.read()
[six.ensure_text(string) for string in self.accumulated_stderr]
[six.ensure_text(string) for string in self.accumulated_stdout]
stdout = six.ensure_text(get_utf8able_str(stdout))
stderr = six.ensure_text(get_utf8able_str(stderr))
stdout += ''.join(self.accumulated_stdout)
stderr += ''.join(self.accumulated_stderr)
_AttemptToCloseSysFd(sys.stdout)
_AttemptToCloseSysFd(sys.stderr)
sys.stdout = self.stdout_save
sys.stderr = self.stderr_save
os.unlink(self.stdout_file)
os.unlink(self.stderr_file)
_id = six.ensure_text(self.id())
if self.is_debugging and stdout:
print_to_fd('==== stdout {} ====\n'.format(_id), file=sys.stderr)
print_to_fd(stdout, file=sys.stderr)
print_to_fd('==== end stdout ====\n', file=sys.stderr)
if self.is_debugging and stderr:
print_to_fd('==== stderr {} ====\n'.format(_id), file=sys.stderr)
print_to_fd(stderr, file=sys.stderr)
print_to_fd('==== end stderr ====\n', file=sys.stderr)
if self.is_debugging and log_output:
print_to_fd('==== log output {} ====\n'.format(_id), file=sys.stderr)
print_to_fd(log_output, file=sys.stderr)
print_to_fd('==== end log output ====\n', file=sys.stderr)
def RunCommand(self,
command_name,
args=None,
headers=None,
debug=0,
return_stdout=False,
return_stderr=False,
return_log_handler=False,
cwd=None):
"""Method for calling gslib.command_runner.CommandRunner.
Passes parallel_operations=False for all tests, optionally saving/returning
stdout output. We run all tests multi-threaded, to exercise those more
complicated code paths.
TODO: Change to run with parallel_operations=True for all tests. At
present when you do this it causes many test failures.
Args:
command_name: The name of the command being run.
args: Command-line args (arg0 = actual arg, not command name ala bash).
headers: Dictionary containing optional HTTP headers to pass to boto.
debug: Debug level to pass in to boto connection (range 0..3).
return_stdout: If True, will save and return stdout produced by command.
return_stderr: If True, will save and return stderr produced by command.
return_log_handler: If True, will return a MockLoggingHandler instance
that was attached to the command's logger while running.
cwd: The working directory that should be switched to before running the
command. The working directory will be reset back to its original
value after running the command. If not specified, the working
directory is left unchanged.
Returns:
One or a tuple of requested return values, depending on whether
return_stdout, return_stderr, and/or return_log_handler were specified.
Return Types:
stdout - binary
stderr - str (binary in Py2, text in Py3)
log_handler - MockLoggingHandler
"""
args = args or []
command_line = six.ensure_text(' '.join([command_name] + args))
if self.is_debugging:
print_to_fd('\nRunCommand of {}\n'.format(command_line),
file=self.stderr_save)
# Save and truncate stdout and stderr for the lifetime of RunCommand. This
# way, we can return just the stdout and stderr that was output during the
# RunNamedCommand call below.
sys.stdout.seek(0)
sys.stderr.seek(0)
stdout = sys.stdout.read()
stderr = sys.stderr.read()
if stdout:
self.accumulated_stdout.append(stdout)
if stderr:
self.accumulated_stderr.append(stderr)
sys.stdout.seek(0)
sys.stderr.seek(0)
sys.stdout.truncate()
sys.stderr.truncate()
mock_log_handler = MockLoggingHandler()
logging.getLogger(command_name).addHandler(mock_log_handler)
if debug:
logging.getLogger(command_name).setLevel(logging.DEBUG)
try:
with WorkingDirectory(cwd):
self.command_runner.RunNamedCommand(command_name,
args=args,
headers=headers,
debug=debug,
parallel_operations=False,
do_shutdown=False)
finally:
sys.stdout.seek(0)
sys.stderr.seek(0)
if six.PY2:
stdout = sys.stdout.read()
stderr = sys.stderr.read()
else:
try:
stdout = sys.stdout.read()
stderr = sys.stderr.read()
except UnicodeDecodeError:
sys.stdout.seek(0)
sys.stderr.seek(0)
stdout = sys.stdout.buffer.read().decode(UTF8)
stderr = sys.stderr.buffer.read().decode(UTF8)
logging.getLogger(command_name).removeHandler(mock_log_handler)
mock_log_handler.close()
log_output = '\n'.join(
'%s:\n ' % level + '\n '.join(records)
for level, records in six.iteritems(mock_log_handler.messages)
if records)
_id = six.ensure_text(self.id())
if self.is_debugging and log_output:
print_to_fd('==== logging RunCommand {} {} ====\n'.format(
_id, command_line),
file=self.stderr_save)
print_to_fd(log_output, file=self.stderr_save)
print_to_fd('\n==== end logging ====\n', file=self.stderr_save)
if self.is_debugging and stdout:
print_to_fd('==== stdout RunCommand {} {} ====\n'.format(
_id, command_line),
file=self.stderr_save)
print_to_fd(stdout, file=self.stderr_save)
print_to_fd('==== end stdout ====\n', file=self.stderr_save)
if self.is_debugging and stderr:
print_to_fd('==== stderr RunCommand {} {} ====\n'.format(
_id, command_line),
file=self.stderr_save)
print_to_fd(stderr, file=self.stderr_save)
print_to_fd('==== end stderr ====\n', file=self.stderr_save)
# Reset stdout and stderr files, so that we won't print them out again
# in tearDown if debugging is enabled.
sys.stdout.seek(0)
sys.stderr.seek(0)
sys.stdout.truncate()
sys.stderr.truncate()
to_return = []
if return_stdout:
to_return.append(stdout)
if return_stderr:
to_return.append(stderr)
if return_log_handler:
to_return.append(mock_log_handler)
if len(to_return) == 1:
return to_return[0]
return tuple(to_return)
@classmethod
def MakeGsUtilApi(cls, debug=0):
gsutil_api_map = {
ApiMapConstants.API_MAP:
(cls.mock_gsutil_api_class_map_factory.GetClassMap()),
ApiMapConstants.SUPPORT_MAP: {
'gs': [ApiSelector.XML, ApiSelector.JSON],
's3': [ApiSelector.XML]
},
ApiMapConstants.DEFAULT_MAP: {
'gs': ApiSelector.JSON,
's3': ApiSelector.XML
}
}
return CloudApiDelegator(cls.mock_bucket_storage_uri,
gsutil_api_map,
cls.logger,
DiscardMessagesQueue(),
debug=debug)
@classmethod
def _test_wildcard_iterator(cls, uri_or_str, debug=0):
"""Convenience method for instantiating a test instance of WildcardIterator.
This makes it unnecessary to specify all the params of that class
(like bucket_storage_uri_class=mock_storage_service.MockBucketStorageUri).
Also, naming the factory method this way makes it clearer in the test code
that WildcardIterator needs to be set up for testing.
Args are same as for wildcard_iterator.wildcard_iterator(), except
there are no class args for bucket_storage_uri_class or gsutil_api_class.
Args:
uri_or_str: StorageUri or string representing the wildcard string.
debug: debug level to pass to the underlying connection (0..3)
Returns:
WildcardIterator, over which caller can iterate.
"""
# TODO: Remove when tests no longer pass StorageUri arguments.
uri_string = uri_or_str
if hasattr(uri_or_str, 'uri'):
uri_string = uri_or_str.uri
return wildcard_iterator.CreateWildcardIterator(uri_string,
cls.MakeGsUtilApi())
@staticmethod
def _test_storage_uri(uri_str, default_scheme='file', debug=0, validate=True):
"""Convenience method for instantiating a testing instance of StorageUri.
This makes it unnecessary to specify
bucket_storage_uri_class=mock_storage_service.MockBucketStorageUri.
Also naming the factory method this way makes it clearer in the test
code that StorageUri needs to be set up for testing.
Args, Returns, and Raises are same as for boto.storage_uri(), except there's
no bucket_storage_uri_class arg.
Args:
uri_str: Uri string to create StorageUri for.
default_scheme: Default scheme for the StorageUri
debug: debug level to pass to the underlying connection (0..3)
validate: If True, validate the resource that the StorageUri refers to.
Returns:
StorageUri based on the arguments.
"""
return boto.storage_uri(uri_str, default_scheme, debug, validate,
util.GSMockBucketStorageUri)
def CreateBucket(self,
bucket_name=None,
test_objects=0,
storage_class=None,
provider='gs'):
"""Creates a test bucket.
The bucket and all of its contents will be deleted after the test.
Args:
bucket_name: Create the bucket with this name. If not provided, a
temporary test bucket name is constructed.
test_objects: The number of objects that should be placed in the bucket or
a list of object names to place in the bucket. Defaults to
0.
storage_class: storage class to use. If not provided we us standard.
provider: string provider to use, default gs.
Returns:
StorageUri for the created bucket.
"""
bucket_name = bucket_name or self.MakeTempName('bucket')
bucket_uri = boto.storage_uri(
'%s://%s' % (provider, bucket_name.lower()),
suppress_consec_slashes=False,
bucket_storage_uri_class=util.GSMockBucketStorageUri)
bucket_uri.create_bucket(storage_class=storage_class)
self.bucket_uris.append(bucket_uri)
try:
iter(test_objects)
except TypeError:
test_objects = [self.MakeTempName('obj') for _ in range(test_objects)]
for i, name in enumerate(test_objects):
self.CreateObject(bucket_uri=bucket_uri,
object_name=name,
contents='test {}'.format(i).encode(UTF8))
return bucket_uri
def CreateObject(self, bucket_uri=None, object_name=None, contents=None):
"""Creates a test object.
Args:
bucket_uri: The URI of the bucket to place the object in. If not
specified, a new temporary bucket is created.
object_name: The name to use for the object. If not specified, a temporary
test object name is constructed.
contents: The contents to write to the object. If not specified, the key
is not written to, which means that it isn't actually created
yet on the server.
Returns:
A StorageUri for the created object.
"""
bucket_uri = bucket_uri or self.CreateBucket(provider=self.default_provider)
object_name = object_name or self.MakeTempName('obj')
key_uri = bucket_uri.clone_replace_name(object_name)
if contents is not None:
key_uri.set_contents_from_string(contents)
return key_uri
| endlessm/chromium-browser | third_party/catapult/third_party/gsutil/gslib/tests/testcase/unit_testcase.py | Python | bsd-3-clause | 16,831 | 0.004872 |
"""
WSGI config for text_analysis project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "text_analysis.settings")
application = get_wsgi_application()
| bungoume/mecab-web-api | text_analysis/text_analysis/wsgi.py | Python | mit | 403 | 0 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""Provides functions for creating a python classes from the cp2k_input.xml
file.
"""
import xml.etree.cElementTree as cElementTree
import utilities
import textwrap
#===============================================================================
def validify_section(string):
"""Modifies the section name so that it can be used as a valid attribute
name in a python class.
"""
original = string
changed = False
if "-" in string:
changed = True
string = string.replace("-", "_")
if "+" in string:
changed = True
string = string.replace("+", "PLUS")
if string[0].isdigit():
changed = True
string = "NUM" + string
if changed:
print(" Section {} replaced with {}".format(original, string))
return string
#===============================================================================
def validify_keyword(string):
"""Modifies the keyword name so that it can be used as a valid attribute
name in a python class.
"""
original = string
changed = False
if "-" in string:
changed = True
string = string.replace("-", "_")
if "+" in string:
changed = True
string = string.replace("+", "PLUS")
if string[0].isdigit():
changed = True
string = "NUM" + string
if changed:
print(" Keyword {} replaced with {}".format(original, string))
string = string.capitalize()
return string
#===============================================================================
def create_docstring(item):
description = item.find("DESCRIPTION")
default_value = item.find("DEFAULT_VALUE")
default_unit = item.find("DEFAULT_UNIT")
# Description
output = " \"\"\"\n"
if description is not None:
if description.text is not None:
for line in textwrap.wrap(description.text):
output += " " + line + "\n"
# If the values are enumerated, document the possible values
data_type = item.find("DATA_TYPE")
if data_type.get("kind") == "keyword":
output += "\n Available values:\n"
enumerations = data_type.find("ENUMERATION")
for enum in enumerations.findall("ITEM"):
output += " " + enum.find("NAME").text + "\n"
enum_description = enum.find("DESCRIPTION").text
if enum_description is not None:
for line in textwrap.wrap(enum_description):
output += " " + line + "\n"
# Default value
if default_value is not None:
if default_value.text is not None:
output += "\n Default value:\n"
for line in textwrap.wrap(default_value.text):
output += " " + line + "\n"
# Default unit
if default_unit is not None:
output += "\n Default unit:\n"
if default_unit.text is not None:
for line in textwrap.wrap(default_unit.text):
output += " " + line + "\n"
output += " \"\"\"\n"
return output
#===============================================================================
def recursive_class_creation(section, level, class_dictionary, version_dictionary):
"""Recursively goes throught the .xml file created by cp2k --xml command
and creates a python class for each section. Keywords, default keywords,
section parameters and subsections are stored as attributes.
"""
default_keywords = {}
repeated_default_keywords = {}
keywords = {}
repeated_keywords = {}
subsections = {}
repeated_subsections = {}
repeated_aliases = {}
aliases = {}
attributes = []
inp_name = ""
# Initial string for each section of the class
imports = ["from pycp2k.inputsection import InputSection"]
docstring = ""
properties = ""
setters = ""
public = (
" def __init__(self):\n"
" InputSection.__init__(self)\n"
)
private = ""
class_subsections = ""
functions = "\n"
# The root with tag CP2K_INPUT doesn't have a name. It is hardcoded here.
sectionname = section.find("NAME")
if sectionname is None:
class_name = "_CP2K_INPUT"
inp_name = "CP2K_INPUT"
else:
class_name = sectionname.text
inp_name = class_name
class_name = validify_section(class_name)
class_name = "_" + class_name.lower()
# Start writing class body
section_description = section.find("DESCRIPTION")
# if section_description is not None and section_description.text is not None:
# docstring += " \"\"\"\n"
# for line in textwrap.wrap(section_description.text):
# docstring += " " + line + "\n"
# docstring += " \"\"\"\n"
# else:
# docstring += " \"\"\"\"\"\"\n"
#---------------------------------------------------------------------------
# Create attribute for section parameter
section_parameters = section.find("SECTION_PARAMETERS")
if section_parameters is not None:
attributes.append("Section_parameters")
public += " self.Section_parameters = None\n"
# Write the description for the section parameter
# public += create_docstring(section_parameters)
#---------------------------------------------------------------------------
# Create attribute for all the keywords
for keyword in section.findall("KEYWORD"):
# First find out the default name and whether the attribute is visible or not
default_name = ""
visible = True
for keyname in keyword.findall("NAME"):
keytype = keyname.get("type")
name = keyname.text
newname = validify_keyword(name)
if keytype == "default":
default_name = newname
if name.startswith("__"):
visible = False
# Now store the keywords as class attributes
if visible:
for keyname in keyword.findall("NAME"):
name = keyname.text
newname = validify_keyword(name)
# Create original attribute for the default keyname
if newname == default_name:
# Special case for repeateable keywords.
if keyword.get("repeats") == "yes":
public += " self." + newname + " = []\n"
# public += create_docstring(keyword)
repeated_keywords[newname] = name
else:
public += " self." + newname + " = None\n"
# public += create_docstring(keyword)
keywords[newname] = name
# Create properties for aliases
else:
if keyword.get("repeats") == "yes":
public += " self." + newname + " = self." + default_name + "\n"
repeated_aliases[newname] = default_name
else:
aliases[newname] = default_name
properties += ("\n @property\n"
" def " + newname + "(self):\n"
" \"\"\"\n"
" See documentation for " + default_name + "\n"
" \"\"\"\n"
" return self." + default_name + "\n")
setters += ("\n @" + newname + ".setter\n"
" def " + newname + "(self, value):\n"
" self." + default_name + " = value\n")
#---------------------------------------------------------------------------
# Create a class attribute for all DEFAULT_KEYWORDS
default_keyword = section.find("DEFAULT_KEYWORD")
if default_keyword is not None:
attributes.append("Default_keyword")
# Special case for repeateable default_keywords. Create a dictionary of the
# keyword and add a function for creating them.
name = default_keyword.find("NAME").text
newname = validify_keyword(name)
if default_keyword.get("repeats") == "yes":
public += " self." + newname + " = []\n"
# public += create_docstring(default_keyword)
repeated_default_keywords[newname] = name
else:
public += " self." + newname + " = None\n"
# public += create_docstring(default_keyword)
default_keywords[newname] = name
#---------------------------------------------------------------------------
# Create attribute for each subsection
for subsection in section.findall("SECTION"):
member_class_name = recursive_class_creation(subsection, level+1, class_dictionary, version_dictionary)
member_name = subsection.find("NAME").text
member_name = member_name.replace("-", "_")
member_name = member_name.replace("+", "PLUS")
if member_name[0].isdigit():
member_name = "_" + member_name
imports.append("from .{0} import {0}".format(member_class_name))
# Special case for repeateable sections. Create a dictionary of the
# subsections and add a function for creating them.
if subsection.get("repeats") == "yes":
class_subsections += " self." + member_name + "_list = []\n"
repeated_subsections[member_name] = member_class_name
attributes.append(member_name + "_list")
else:
class_subsections += " self." + member_name + " = " + member_class_name + "()\n"
subsections[member_name] = subsection.find("NAME").text
#---------------------------------------------------------------------------
# Write a list of the stored variable names
private += " self._name = \"" + str(inp_name) + "\"\n"
if len(keywords) != 0:
private += " self._keywords = " + str(keywords) + "\n"
if len(repeated_keywords) != 0:
private += " self._repeated_keywords = " + str(repeated_keywords) + "\n"
if len(default_keywords) != 0:
private += " self._default_keywords = " + str(default_keywords) + "\n"
if len(repeated_default_keywords) != 0:
private += " self._repeated_default_keywords = " + str(repeated_default_keywords) + "\n"
if len(subsections) != 0:
private += " self._subsections = " + str(subsections) + "\n"
if len(repeated_subsections) != 0:
private += " self._repeated_subsections = " + str(repeated_subsections) + "\n"
if len(aliases) != 0:
private += " self._aliases = " + str(aliases) + "\n"
if len(repeated_aliases) != 0:
private += " self._repeated_aliases = " + str(repeated_aliases) + "\n"
if len(attributes) != 0:
private += " self._attributes = " + str(attributes) + "\n"
#---------------------------------------------------------------------------
# Write a function for adding repeateable sections
for repeated in repeated_subsections.items():
attribute_name = repeated[0]
attribute_class_name = repeated[1]
functions += (" def " + attribute_name + "_add(self, section_parameters=None):\n"
" new_section = " + attribute_class_name + "()\n"
" if section_parameters is not None:\n"
" if hasattr(new_section, 'Section_parameters'):\n"
" new_section.Section_parameters = section_parameters\n"
" self." + attribute_name + "_list.append(new_section)\n"
" return new_section\n\n")
#---------------------------------------------------------------------------
# The class names are not unique. Use numbering to identify classes.
exists = False
import_string = "\n".join(imports)
class_string = docstring + public + class_subsections + private + functions + properties + setters
version_number = version_dictionary.get(class_name)
if version_number is None:
version_dictionary[class_name] = 1
class_dictionary[class_name+str(1)] = (import_string, class_string)
return class_name+str(1)
for version in range(version_number):
old_class_body = class_dictionary[class_name+str(version + 1)]
if old_class_body == class_string:
exists = True
version_number = version + 1
break
if not exists:
version_dictionary[class_name] = version_number + 1
class_dictionary[class_name+str(version_number + 1)] = (import_string, class_string)
return class_name + str(version_number + 1)
else:
return class_name + str(version_number)
#===============================================================================
def main(xml_path):
"""Parses the classes and saves them to the package directory as
parsedclasses.py.
"""
# Start parsing here
utilities.print_subtitle("CREATING INPUT STRUCTURE...")
tree = cElementTree.parse(xml_path)
root = tree.getroot()
# Extract the cp2k version and revision
version = root.find("CP2K_VERSION").text
revision = root.find("COMPILE_REVISION").text
class_dictionary = {}
version_dictionary = {}
recursive_class_creation(root, 0, class_dictionary, version_dictionary)
# Put each class into its own module. This produces manu small files, but
# this way it it easier for autocompletion to handle everything
for class_name, class_body in class_dictionary.items():
with open('pycp2k/classes/{}.py'.format(class_name), 'w') as file:
file.write(class_body[0] + "\n\n\n")
class_body_header = (
"class " + class_name + "(InputSection):\n"
)
file.write(class_body_header)
file.write(class_body[1])
return (version, revision)
# Run main function by default
if __name__ == "__main__":
main()
| SINGROUP/pycp2k | inputparser.py | Python | lgpl-3.0 | 14,369 | 0.003549 |
# Copyright (c) 2009-2010 Stanford University
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR(S) DISCLAIM ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL AUTHORS BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
# Note in order for this module to work you must have libramcloud.so
# somewhere in a system library path and have run /sbin/ldconfig since
# installing it
import ctypes
from ctypes.util import find_library
import itertools
import os
class RejectRules(ctypes.Structure):
_fields_ = [("given_version", ctypes.c_uint64),
("object_doesnt_exist", ctypes.c_uint8, 8),
("object_exists", ctypes.c_uint8, 8),
("version_eq_given", ctypes.c_uint8, 8),
("version_gt_given", ctypes.c_uint8, 8),
]
def _as_tuple(self):
return (self.object_doesnt_exist, self.object_exists,
self.version_eq_given, self.version_gt_given,
self.given_version)
def __cmp__(self, other):
return cmp(self._as_tuple(), other._as_tuple())
def __repr__(self):
return 'ramcloud.RejectRules(%s)' % str(self._as_tuple())
@staticmethod
def exactly(want_version):
return RejectRules(object_doesnt_exist=True, version_gt_given=True,
given_version=want_version)
def load_so():
not_found = ImportError("Couldn't find libramcloud.so, ensure it is " +
"installed and that you have registered it with " +
"/sbin/ldconfig")
# try to find the overridden path first, if possible using
# LD_LIBRARY_PATH which means we don't have to install the so
# during devel
path = None
if 'LD_LIBRARY_PATH' in os.environ:
for search_dir in os.environ['LD_LIBRARY_PATH'].split(':'):
test_path = os.path.join(search_dir, 'libramcloud.so')
if os.path.exists(test_path):
path = test_path
break
# couldn't find the so in LD_LIBRARY_PATH, so try the usual approach
if not path:
path = find_library('ramcloud')
if not path:
raise not_found
try:
so = ctypes.cdll.LoadLibrary(path)
except OSError, e:
if 'No such file or directory' in str(e):
raise not_found
else:
raise
def malloc_errcheck(result, func, arguments):
if result == 0:
raise MemoryError()
return result
# ctypes.c_bool was introduced in Python 2.6
if not hasattr(ctypes, 'c_bool'):
class c_bool_compat(ctypes.c_uint8):
def __init__(self, value=None):
if value:
ctypes.c_uint8.__init__(self, 1)
else:
ctypes.c_uint8.__init__(self, 0)
@staticmethod
def from_param(param):
if param:
return ctypes.c_uint8(1)
else:
return ctypes.c_uint8(0)
ctypes.c_bool = c_bool_compat
from ctypes import POINTER
# argument types aliased to their names for sanity
# alphabetical order
address = ctypes.c_char_p
buf = ctypes.c_void_p
client = ctypes.c_void_p
id = ctypes.c_uint64
len = ctypes.c_uint32
name = ctypes.c_char_p
nanoseconds = ctypes.c_uint64
nonce = ctypes.c_uint64
rejectRules = POINTER(RejectRules)
serviceLocator = ctypes.c_char_p
status = ctypes.c_int
table = ctypes.c_uint32
version = ctypes.c_uint64
so.rc_connect.argtypes = [address, POINTER(client)]
so.rc_connect.restype = status
so.rc_disconnect.argtypes = [client]
so.rc_disconnect.restype = None
so.rc_create.argtypes = [client, table, buf, len, POINTER(id),
POINTER(version)]
so.rc_create.restype = status
so.rc_createTable.argtypes = [client, name]
so.rc_createTable.restype = status
so.rc_dropTable.argtypes = [client, name]
so.rc_dropTable.restype = status
so.rc_getStatus.argtypes = []
so.rc_getStatus.restype = status
so.rc_openTable.argtypes = [client, name, POINTER(table)]
so.rc_openTable.restype = status
so.rc_ping.argtypes = [client, serviceLocator, nonce, nanoseconds,
POINTER(nonce)]
so.rc_ping.restype = status
so.rc_read.argtypes = [client, table, id, rejectRules, POINTER(version),
buf, len, POINTER(len)]
so.rc_read.restype = status
so.rc_remove.argtypes = [client, table, id, rejectRules,
POINTER(version)]
so.rc_remove.restype = status
so.rc_write.argtypes = [client, table, id, buf, len, rejectRules,
POINTER(version)]
so.rc_write.restype = status
return so
def _ctype_copy(addr, var, width):
ctypes.memmove(addr, ctypes.addressof(var), width)
return addr + width
class RCException(Exception):
def __init__(self, status):
Exception.__init__(self, 'RAMCloud error ' + str(status))
self.status = status
pass
class NoObjectError(Exception):
pass
class ObjectExistsError(Exception):
pass
class VersionError(Exception):
def __init__(self, want_version, got_version):
Exception.__init__(self, "Bad version: want %d but got %d" %
(want_version, got_version))
self.want_version = want_version
self.got_version = got_version
class RAMCloud(object):
def __init__(self):
self.client = ctypes.c_void_p()
self.hook = lambda: None
def __del__(self):
if self.client.value != None:
so.rc_disconnect(self.client)
def handle_error(self, status, actual_version=0):
if status == 0:
return
if status == 2:
raise NoObjectError()
if status == 3:
raise ObjectExistsError()
if status == 4:
raise VersionError(reject_rules.given_version, actual_version)
raise RCException(status)
def connect(self, serverLocator='fast+udp:host=127.0.0.1,port=12242'):
s = so.rc_connect(serverLocator, ctypes.byref(self.client))
self.handle_error(s)
def create(self, table_id, id, data):
reject_rules = RejectRules(object_exists=True)
return self.write_rr(table_id, id, data, reject_rules)
def create_table(self, name):
s = so.rc_createTable(self.client, name)
self.handle_error(s)
def delete(self, table_id, id, want_version=None):
if want_version:
reject_rules = RejectRules.exactly(want_version)
else:
reject_rules = RejectRules(object_doesnt_exist=True)
return self.delete_rr(table_id, id, reject_rules)
def delete_rr(self, table_id, id, reject_rules):
got_version = ctypes.c_uint64()
self.hook()
s = so.rc_remove(self.client, table_id, id,
ctypes.byref(reject_rules), ctypes.byref(got_version))
self.handle_error(s, got_version.value)
return got_version.value
def drop_table(self, name):
s = so.rc_dropTable(self.client, name)
self.handle_error(s)
def insert(self, table_id, data):
id = ctypes.c_uint64()
version = ctypes.c_uint64()
self.hook()
so.rc_create(self.client, table_id, data, len(data), ctypes.byref(id),
ctypes.byref(version))
return id.value
def open_table(self, name):
handle = ctypes.c_uint32()
s = so.rc_openTable(self.client, name, ctypes.byref(handle))
self.handle_error(s)
return handle.value
def ping(self, serviceLocator, nonce, nanoseconds):
result = ctypes.c_uint64();
s = so.rc_ping(self.client, serviceLocator, nonce, nanoseconds,
ctypes.byref(result))
self.handle_error(s)
return result
def read(self, table_id, id, want_version=None):
if want_version:
reject_rules = RejectRules.exactly(want_version)
else:
reject_rules = RejectRules(object_doesnt_exist=True)
return self.read_rr(table_id, id, reject_rules)
def read_rr(self, table_id, id, reject_rules):
max_length = 1024 * 1024 * 2
buf = ctypes.create_string_buffer(max_length)
actual_length = ctypes.c_uint32()
got_version = ctypes.c_uint64()
reject_rules.object_doesnt_exist = True
self.hook()
s = so.rc_read(self.client, table_id, id, ctypes.byref(reject_rules),
ctypes.byref(got_version), ctypes.byref(buf), max_length,
ctypes.byref(actual_length))
self.handle_error(s, got_version.value)
return (buf.raw[0:actual_length.value], got_version.value)
def update(self, table_id, id, data, want_version=None):
if want_version:
reject_rules = RejectRules.exactly(want_version)
else:
reject_rules = RejectRules(object_doesnt_exist=True)
return self.write_rr(table_id, id, data, reject_rules)
def write(self, table_id, id, data, want_version=None):
if want_version:
reject_rules = RejectRules(version_gt_given=True,
given_version=want_version)
else:
reject_rules = RejectRules()
return self.write_rr(table_id, id, data, reject_rules)
def write_rr(self, table_id, id, data, reject_rules):
got_version = ctypes.c_uint64()
self.hook()
s = so.rc_write(self.client, table_id, id, data, len(data),
ctypes.byref(reject_rules), ctypes.byref(got_version))
self.handle_error(s, got_version.value)
return got_version.value
def main():
r = RAMCloud()
r.connect()
print "Client: 0x%x" % r.client.value
r.ping()
r.create_table("test")
print "Created table 'test'",
table = r.open_table("test")
print "with id %s" % table
r.create(table, 0, "Hello, World, from Python")
print "Created object 0 in table"
value, got_version = r.read(table, 0)
print value
id = r.insert(table, "test")
print "Inserted value and got back id %d" % id
print "Value read back: %s" % r.read(table, id)[0]
r.update(table, id, "test")
bs = "binary\00safe?"
oid = r.insert(table, bs)
value = r.read(table, oid)[0]
assert value == bs
r.drop_table("test")
# these don't belong here, but they're testing a bug in which table names
# were truncated to 8 characters
r.create_table("01234567890123456789A")
r.create_table("01234567890123456789B")
assert (r.open_table("01234567890123456789A") !=
r.open_table("01234567890123456789B"))
r.drop_table("01234567890123456789A")
r.drop_table("01234567890123456789B")
so = load_so()
if __name__ == '__main__':
main()
| t3rm1n4l/ramcloud | bindings/python/ramcloud.py | Python | isc | 11,657 | 0.003174 |
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Building()
result.template = "object/building/poi/base/shared_base_poi_medium.iff"
result.attribute_template_id = -1
result.stfName("poi_n","base_poi_building")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | anhstudios/swganh | data/scripts/templates/object/building/poi/base/shared_base_poi_medium.py | Python | mit | 451 | 0.046563 |
#!/usr/bin/env python2.7
#naive implementation of a program that sorts function definitions alphabetically
import sys
import fileinput
DEBUG = False
function = ''
first_fun = None
output = []
funs = {}
bracket_count = 0
for line in fileinput.input():
bracket_count += line.count("{")
if bracket_count:
# we are in a function
if not function:
function = line.split("(")[0].split()[-1]
else:
function = None
if function:
if first_fun == None:
first_fun = len(output)
if function not in funs:
funs[function] = []
funs[function].append(line)
else:
# sys.stdout.write(line)
output.append(line)
bracket_count -= line.count("}")
if DEBUG:
print(funs)
output_funs = []
for k in sorted(funs.keys()):
for l in funs[k]:
output_funs.append(l)
if first_fun != None:
output = output[:first_fun] + output_funs + output[first_fun:]
for l in output:
sys.stdout.write(l)
| bdevorem/GGNoRe | sort_fun.py | Python | gpl-3.0 | 1,009 | 0.005946 |
#!/usr/bin/env python3
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2014-2017 Florian Bruhin (The Compiler) <[email protected]>
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Generate the html documentation based on the asciidoc files."""
import re
import os
import os.path
import sys
import subprocess
import glob
import shutil
import tempfile
import argparse
import io
sys.path.insert(0, os.path.join(os.path.dirname(__file__), os.pardir))
from scripts import utils
class AsciiDoc:
"""Abstraction of an asciidoc subprocess."""
FILES = [
('FAQ.asciidoc', 'qutebrowser/html/doc/FAQ.html'),
('CHANGELOG.asciidoc', 'qutebrowser/html/doc/CHANGELOG.html'),
('CONTRIBUTING.asciidoc', 'qutebrowser/html/doc/CONTRIBUTING.html'),
('doc/quickstart.asciidoc', 'qutebrowser/html/doc/quickstart.html'),
('doc/userscripts.asciidoc', 'qutebrowser/html/doc/userscripts.html'),
]
def __init__(self, args):
self._cmd = None
self._args = args
self._homedir = None
self._themedir = None
self._tempdir = None
self._failed = False
def prepare(self):
"""Get the asciidoc command and create the homedir to use."""
self._cmd = self._get_asciidoc_cmd()
self._homedir = tempfile.mkdtemp()
self._themedir = os.path.join(
self._homedir, '.asciidoc', 'themes', 'qute')
self._tempdir = os.path.join(self._homedir, 'tmp')
os.makedirs(self._tempdir)
os.makedirs(self._themedir)
def cleanup(self):
"""Clean up the temporary home directory for asciidoc."""
if self._homedir is not None and not self._failed:
shutil.rmtree(self._homedir)
def build(self):
if self._args.website:
self._build_website()
else:
self._build_docs()
self._copy_images()
def _build_docs(self):
"""Render .asciidoc files to .html sites."""
files = self.FILES[:]
for src in glob.glob('doc/help/*.asciidoc'):
name, _ext = os.path.splitext(os.path.basename(src))
dst = 'qutebrowser/html/doc/{}.html'.format(name)
files.append((src, dst))
# patch image links to use local copy
replacements = [
("http://qutebrowser.org/img/cheatsheet-big.png",
"qute://help/img/cheatsheet-big.png"),
("http://qutebrowser.org/img/cheatsheet-small.png",
"qute://help/img/cheatsheet-small.png")
]
for src, dst in files:
src_basename = os.path.basename(src)
modified_src = os.path.join(self._tempdir, src_basename)
with open(modified_src, 'w', encoding='utf-8') as modified_f, \
open(src, 'r', encoding='utf-8') as f:
for line in f:
for orig, repl in replacements:
line = line.replace(orig, repl)
modified_f.write(line)
self.call(modified_src, dst)
def _copy_images(self):
"""Copy image files to qutebrowser/html/doc."""
print("Copying files...")
dst_path = os.path.join('qutebrowser', 'html', 'doc', 'img')
try:
os.mkdir(dst_path)
except FileExistsError:
pass
for filename in ['cheatsheet-big.png', 'cheatsheet-small.png']:
src = os.path.join('doc', 'img', filename)
dst = os.path.join(dst_path, filename)
shutil.copy(src, dst)
def _build_website_file(self, root, filename):
"""Build a single website file."""
# pylint: disable=too-many-locals,too-many-statements
src = os.path.join(root, filename)
src_basename = os.path.basename(src)
parts = [self._args.website[0]]
dirname = os.path.dirname(src)
if dirname:
parts.append(os.path.relpath(os.path.dirname(src)))
parts.append(
os.extsep.join((os.path.splitext(src_basename)[0],
'html')))
dst = os.path.join(*parts)
os.makedirs(os.path.dirname(dst), exist_ok=True)
modified_src = os.path.join(self._tempdir, src_basename)
shutil.copy('www/header.asciidoc', modified_src)
outfp = io.StringIO()
with open(modified_src, 'r', encoding='utf-8') as header_file:
header = header_file.read()
header += "\n\n"
with open(src, 'r', encoding='utf-8') as infp:
outfp.write("\n\n")
hidden = False
found_title = False
title = ""
last_line = ""
for line in infp:
if line.strip() == '// QUTE_WEB_HIDE':
assert not hidden
hidden = True
elif line.strip() == '// QUTE_WEB_HIDE_END':
assert hidden
hidden = False
elif line == "The Compiler <[email protected]>\n":
continue
elif re.match(r'^:\w+:.*', line):
# asciidoc field
continue
if not found_title:
if re.match(r'^=+$', line):
line = line.replace('=', '-')
found_title = True
title = last_line.rstrip('\n') + " | qutebrowser\n"
title += "=" * (len(title) - 1)
elif re.match(r'^= .+', line):
line = '==' + line[1:]
found_title = True
title = last_line.rstrip('\n') + " | qutebrowser\n"
title += "=" * (len(title) - 1)
if not hidden:
outfp.write(line.replace(".asciidoc[", ".html["))
last_line = line
current_lines = outfp.getvalue()
outfp.close()
with open(modified_src, 'w+', encoding='utf-8') as final_version:
final_version.write(title + "\n\n" + header + current_lines)
asciidoc_args = ['--theme=qute', '-a toc', '-a toc-placement=manual']
self.call(modified_src, dst, *asciidoc_args)
def _build_website(self):
"""Prepare and build the website."""
theme_file = os.path.abspath(os.path.join('www', 'qute.css'))
shutil.copy(theme_file, self._themedir)
outdir = self._args.website[0]
for root, _dirs, files in os.walk(os.getcwd()):
for filename in files:
basename, ext = os.path.splitext(filename)
if (ext != '.asciidoc' or
basename in ['header', 'OpenSans-License']):
continue
self._build_website_file(root, filename)
copy = {'icons': 'icons', 'doc/img': 'doc/img', 'www/media': 'media/'}
for src, dest in copy.items():
full_dest = os.path.join(outdir, dest)
try:
shutil.rmtree(full_dest)
except FileNotFoundError:
pass
shutil.copytree(src, full_dest)
for dst, link_name in [
('README.html', 'index.html'),
(os.path.join('doc', 'quickstart.html'), 'quickstart.html'),
]:
try:
os.symlink(dst, os.path.join(outdir, link_name))
except FileExistsError:
pass
def _get_asciidoc_cmd(self):
"""Try to find out what commandline to use to invoke asciidoc."""
if self._args.asciidoc is not None:
return self._args.asciidoc
try:
subprocess.call(['asciidoc'], stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL)
except OSError:
pass
else:
return ['asciidoc']
try:
subprocess.call(['asciidoc.py'], stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL)
except OSError:
pass
else:
return ['asciidoc.py']
raise FileNotFoundError
def call(self, src, dst, *args):
"""Call asciidoc for the given files.
Args:
src: The source .asciidoc file.
dst: The destination .html file, or None to auto-guess.
*args: Additional arguments passed to asciidoc.
"""
print("Calling asciidoc for {}...".format(os.path.basename(src)))
cmdline = self._cmd[:]
if dst is not None:
cmdline += ['--out-file', dst]
cmdline += args
cmdline.append(src)
try:
env = os.environ.copy()
env['HOME'] = self._homedir
subprocess.check_call(cmdline, env=env)
except (subprocess.CalledProcessError, OSError) as e:
self._failed = True
utils.print_col(str(e), 'red')
print("Keeping modified sources in {}.".format(self._homedir))
sys.exit(1)
def main(colors=False):
"""Generate html files for the online documentation."""
utils.change_cwd()
utils.use_color = colors
parser = argparse.ArgumentParser()
parser.add_argument('--website', help="Build website into a given "
"directory.", nargs=1)
parser.add_argument('--asciidoc', help="Full path to python and "
"asciidoc.py. If not given, it's searched in PATH.",
nargs=2, required=False,
metavar=('PYTHON', 'ASCIIDOC'))
parser.add_argument('--no-authors', help=argparse.SUPPRESS,
action='store_true')
args = parser.parse_args()
try:
os.mkdir('qutebrowser/html/doc')
except FileExistsError:
pass
asciidoc = AsciiDoc(args)
try:
asciidoc.prepare()
except FileNotFoundError:
utils.print_col("Could not find asciidoc! Please install it, or use "
"the --asciidoc argument to point this script to the "
"correct python/asciidoc.py location!", 'red')
sys.exit(1)
try:
asciidoc.build()
finally:
asciidoc.cleanup()
if __name__ == '__main__':
main(colors=True)
| lahwaacz/qutebrowser | scripts/asciidoc2html.py | Python | gpl-3.0 | 10,901 | 0.000092 |
#!/usr/bin/python3
from __future__ import print_function
import os,sys,inspect
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parentdir = os.path.dirname(currentdir)
sys.path.insert(0,parentdir)
import sys
import scipy
from tqdm import tqdm
import django
from django.utils import timezone
from django.db.models import Count
django.setup()
from papers.models import Article, Feature
import papers.utils as utils
def get_articles_without_features():
articles = Article.objects.annotate(num_children=Count('feature')).filter(num_children=0)
data = []
for art in articles:
data.append( (art.title, art.authors, art.abstract, art.keywords) )
return articles, data
def get_features(data):
features = scipy.sparse.csr_matrix( utils.compute_features( data ) )
return features
def add_features_to_db(articles, features, stride=200):
for k in tqdm(range(len(articles)//stride+1)):
start = int(k*stride)
end = min((k+1)*stride,features.shape[0])
A = scipy.sparse.coo_matrix(features[start:end])
Feature.objects.bulk_create( [ Feature( article=articles[int(i+k*stride)], index=int(j), value=float(v)) for i,j,v in zip(A.row, A.col, A.data) ] )
if __name__ == "__main__":
# Feature.objects.all().delete()
print("Finding articles without features...")
articles, data = get_articles_without_features()
if not len(data):
sys.exit()
print("Computing features for %i articles..."%(len(data)))
features = get_features(data)
print("Adding %i feature vectors to db... "%(features.shape[0]))
add_features_to_db(articles, features)
| fzenke/morla | scripts/compute_feature_vectors.py | Python | mit | 1,674 | 0.022103 |
from django import template
# from kilonull.conf import BlogConf
from kilonull.models import Menu, MenuItem
from kilonull.settings import SETTINGS
import re
register = template.Library()
@register.simple_tag
def blog_title():
return SETTINGS['BLOG_TITLE']
# Generate HTML for the header menu.
# Use a regex (path) to check if the current page is in the menu. If it is,
# apply the active class.
@register.simple_tag
def get_menu(menu_slug, curr_page):
html = ""
menu_items = MenuItem.objects.filter(menu__slug=menu_slug) \
.order_by("order")
path = re.compile("%s(.*)" % SETTINGS['BLOG_SITE_URL'])
for item in menu_items:
html += "<li"
match = path.match(item.link_url)
if match and match.group(1) == curr_page:
html += " class='active'"
html += "><a href='%s'>%s</a></li>" % (item.link_url, item.link_text)
return html
| vacuus/kilonull | kilonull/templatetags/theming.py | Python | lgpl-3.0 | 901 | 0 |
# Copyright (c) 2014-2017 Cedric Bellegarde <[email protected]>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import random
from lollypop.define import Shuffle, NextContext
from lollypop.player_base import BasePlayer
from lollypop.objects import Track
class UserPlaylistPlayer(BasePlayer):
"""
Manage user playlist
"""
def __init__(self):
"""
Init user playlist
"""
BasePlayer.__init__(self)
self.__user_playlist_backup = []
def get_user_playlist_ids(self):
"""
Get playlist id
@return id as int
"""
return self._user_playlist_ids
def populate_user_playlist_by_tracks(self, track_ids, playlist_ids):
"""
Set user playlist as current playback playlist
@param array of track ids as [int]
@param playlist ids as [int]
"""
if self.is_party:
self.set_party(False)
self._user_playlist = []
for track_id in track_ids:
self._user_playlist.append(track_id)
self._albums = []
self._user_playlist_ids = playlist_ids
self.__user_playlist_backup = []
self._shuffle_playlist()
def update_user_playlist(self, track_ids):
"""
Update user playlist content
@param track_ids as int
"""
if self._albums:
return
self._user_playlist = track_ids
self.__user_playlist_backup = []
self._shuffle_playlist()
def get_user_playlist(self):
"""
Get user playlist
@return track id as [int]
"""
if self.__user_playlist_backup:
return self.__user_playlist_backup
else:
return self._user_playlist
def next(self, force):
"""
Next Track
@param force as bool
@return Track
"""
track = Track()
if force:
current_track = self._next_track
else:
current_track = self._current_track
if self._user_playlist and\
current_track.id in self._user_playlist:
idx = self._user_playlist.index(current_track.id)
if idx + 1 >= len(self._user_playlist):
self._next_context = NextContext.STOP
idx = 0
else:
idx += 1
track = Track(self._user_playlist[idx])
return track
def prev(self):
"""
Prev track id
@return Track
"""
track = Track()
if self._user_playlist and\
self._current_track.id in self._user_playlist:
idx = self._user_playlist.index(self._current_track.id)
if idx - 1 < 0:
idx = len(self._user_playlist) - 1
else:
idx -= 1
track = Track(self._user_playlist[idx])
return track
#######################
# PROTECTED #
#######################
def _shuffle_playlist(self):
"""
Shuffle/Un-shuffle playlist based on shuffle setting
"""
if self._shuffle == Shuffle.TRACKS:
# Shuffle user playlist
if self._user_playlist:
self.__user_playlist_backup = list(self._user_playlist)
random.shuffle(self._user_playlist)
# Unshuffle
else:
if self.__user_playlist_backup:
self._user_playlist = self.__user_playlist_backup
self.__user_playlist_backup = []
self.set_next()
self.set_prev()
| gnumdk/lollypop | lollypop/player_userplaylist.py | Python | gpl-3.0 | 4,223 | 0 |
# -*- coding: utf-8 -*-
import glob
import os
import polib
from django import VERSION as DJANGO_VERSION
from django.core.management.commands.makemessages import (
Command as OriginalMakeMessagesCommand)
from django.utils import translation
from django.utils.translation.trans_real import CONTEXT_SEPARATOR
class Command(OriginalMakeMessagesCommand):
# Django version 1.7+ requires_model_validation is deprecated
# and the value of 'requires_system_checks' is used (which is defined in
# the original command). The attribute is completely removed in Django 1.9.
if DJANGO_VERSION < (1, 7):
requires_model_validation = False
can_import_settings = True
def handle_noargs(self, *args, **options):
from django.conf import settings
super(Command, self).handle_noargs(*args, **options)
locale = options.get('locale')
domain = options.get('domain')
verbosity = int(options.get('verbosity'))
process_all = options.get('all')
# now that we've built the regular po files, we mark any translations that are already translated elsewhere
# as obsolete. If there is already a translation in the local po file, we keep it.
localedir = os.path.abspath('locale')
locales = []
if locale is not None:
locales.append(locale)
elif process_all:
locale_dirs = filter(os.path.isdir, glob.glob('%s/*' % localedir))
locales = [os.path.basename(l) for l in locale_dirs]
# monkeypatch settings to not include the project locale directory
localepaths = [os.path.normpath(path) for path in settings.LOCALE_PATHS]
# remove the locale we're currently writing to from the settings, so that we can check for existing translations
# NOT in this file
localepaths = [path for path in localepaths if not path == localedir]
settings.LOCALE_PATHS = list(localepaths)
missing = object()
for locale in locales:
translation.activate(locale)
catalog = translation.trans_real.catalog()
# catalog = trans.translation(locale)
# catalog = translation.trans_real.translation._fetch(locale)
# catalog._fallback = False
if verbosity > 0:
self.stdout.write("cleaning translations for language %s " % locale)
if locale in ['en', 'en-us']:
self.stdout.write(" (unreliable because %s is usually not translated) " % locale)
basedir = os.path.join(localedir, locale, 'LC_MESSAGES')
pofile = os.path.join(basedir, '%s.po' % domain)
mofile = os.path.join(basedir, '%s.mo' % domain)
po = polib.pofile(pofile)
obsolete_count = 0
for entry in po:
# if entry.msgid_plural and locale == 'de': import ipdb; ipdb.set_trace()
# if entry.msgid == 'one translation' and locale == 'de': import ipdb; ipdb.set_trace()
context = entry.msgctxt or None
if entry.msgid_plural:
if context:
msg = catalog._catalog.get((u"%s%s%s" % (context, CONTEXT_SEPARATOR, entry.msgid), True), missing)
else:
msg = catalog._catalog.get((entry.msgid, True), missing)
else:
if context:
msg = catalog._catalog.get(u"%s%s%s" % (context, CONTEXT_SEPARATOR, entry.msgid), missing)
else:
msg = catalog._catalog.get(entry.msgid, missing)
is_already_translated_elsewhere = not msg is missing
if not entry.msgstr and is_already_translated_elsewhere:
entry.obsolete = 1
obsolete_count += 1
if verbosity > 0:
self.stdout.write(".")
po.save(pofile)
# po.save_as_mofile(mofile) # should be done by regular compilemessages
self.stdout.write(u" marked %s obsolete translations\n" % obsolete_count)
| divio/django-commontranslations | django_commontranslations/management/commands/makemessages_unique.py | Python | bsd-3-clause | 4,136 | 0.003627 |
# -*- coding: utf-8 -*-
"""Page model for Automation/Anisble/Credentials"""
import attr
from navmazing import NavigateToAttribute, NavigateToSibling
from widgetastic_patternfly import BootstrapSelect, Button, Dropdown, Input
from widgetastic_manageiq import ParametrizedSummaryTable, Table, PaginationPane
from widgetastic.exceptions import NoSuchElementException
from widgetastic.utils import ParametrizedLocator
from widgetastic.widget import ConditionalSwitchableView, ParametrizedView, Text, TextInput, View
from cfme.base import Server
from cfme.base.login import BaseLoggedInPage
from cfme.common import Taggable, TagPageView
from cfme.exceptions import ItemNotFound
from cfme.modeling.base import BaseCollection, BaseEntity
from cfme.utils.appliance.implementations.ui import navigator, navigate_to, CFMENavigateStep
from cfme.utils.wait import wait_for
class CredentialsBaseView(BaseLoggedInPage):
title = Text(locator=".//div[@id='main-content']//h1")
@property
def in_ansible_credentials(self):
return (
self.logged_in_as_current_user and
self.navigation.currently_selected == ["Automation", "Ansible", "Credentials"]
)
class CredentialsListView(CredentialsBaseView):
@View.nested
class toolbar(View): # noqa
configuration = Dropdown("Configuration")
policy = Dropdown(text='Policy')
credentials = Table(".//div[@id='miq-gtl-view']//table")
paginator = PaginationPane()
@property
def is_displayed(self):
return self.in_ansible_credentials and self.title.text == "Credentials"
class CredentialDetailsView(CredentialsBaseView):
@View.nested
class toolbar(View): # noqa
configuration = Dropdown("Configuration")
download = Button(title="Download summary in PDF format")
policy = Dropdown(text='Policy')
@View.nested
class entities(View): # noqa
summary = ParametrizedView.nested(ParametrizedSummaryTable)
@property
def is_displayed(self):
return (
self.in_ansible_credentials and
self.title.text == "{} (Summary)".format(self.context["object"].name)
)
class CredentialFormView(CredentialsBaseView):
name = Input(name="name")
credential_form = ConditionalSwitchableView(reference="credential_type")
@credential_form.register("<Choose>", default=True)
class CredentialFormDefaultView(View):
pass
@credential_form.register("Machine")
class CredentialFormMachineView(View):
username = Input(locator='.//input[@title="Username for this credential"]')
password = Input(locator='.//input[@title="Password for this credential"][2]')
private_key = TextInput(
locator='.//textarea[@title="RSA or DSA private key to be used instead of password"][2]'
)
private_key_phrase = Input(
locator='.//input[@title="Passphrase to unlock SSH private key if encrypted"][2]')
privilage_escalation = BootstrapSelect("{{name}}")
privilage_escalation_username = Input(
locator='.//input[@title="Privilege escalation username"]')
privilage_escalation_password = Input(
locator='.//input[@title="Password for privilege escalation method"][2]')
@credential_form.register("Scm")
class CredentialFormScmView(View):
username = Input(locator='.//input[@title="Username for this credential"]')
password = Input(locator='.//input[@title="Password for this credential"][2]')
private_key = TextInput(
locator='.//textarea[@title="RSA or DSA private key to be used instead of password"][2]'
)
private_key_phrase = Input(
locator='.//input[@title="Passphrase to unlock SSH private key if encrypted"][2]')
@credential_form.register("Vault")
class CredentialFormVaultView(View):
vault_password = Input(locator='.//input[@title="Vault password"][2]')
@credential_form.register("Amazon")
class CredentialFormAmazonView(View):
access_key = Input(locator='.//input[@title="AWS Access Key for this credential"]')
secret_key = Input(locator='.//input[@title="AWS Secret Key for this credential"][2]')
sts_token = Input(
locator='.//input[@title="Security Token Service(STS) Token for this credential"][2]')
@credential_form.register("VMware")
class CredentialFormVMwareView(View):
username = Input(locator='.//input[@title="Username for this credential"]')
password = Input(locator='.//input[@title="Password for this credential"][2]')
vcenter_host = Input(
locator='.//input[@title="The hostname or IP address of the vCenter Host"]')
@credential_form.register("OpenStack")
class CredentialFormOpenStackView(View):
username = Input(locator='.//input[@title="The username to use to connect to OpenStack"]')
password = Input(locator='.//input[@title="The password or API'
' key to use to connect to OpenStack"][2]')
authentication_url = Input(
locator='.//input[@title="The host to authenticate with. '
'For example, https://openstack.business.com/v2.0"]')
project = Input(locator='.//input[@title="This is the tenant name. This value '
'is usually the same as the username"]')
domain = Input(locator='.//input[@title="OpenStack domains define administrative '
'boundaries. It is only needed for Keystone v3 authentication URLs"]')
@credential_form.register("Red Hat Virtualization")
class CredentialFormRHVView(View):
username = Input(locator='.//input[@title="Username for this credential"]')
password = Input(locator='.//input[@title="Password for this credential"][2]')
host = Input(locator='.//input[@title="The host to authenticate with"]')
@credential_form.register("Google Compute Engine")
class CredentialFormGCEView(View):
service_account = Input(locator='.//input[@title="The email address assigned to '
'the Google Compute Engine service account"]')
priv_key = TextInput(locator='.//textarea[@title="Contents of the PEM file associated with '
'the service account email"]')
project = Input(locator='.//input[@title="The GCE assigned identification. It is '
'constructed as two words followed by a three digit number, such as: '
'squeamish-ossifrage-123"]')
cancel_button = Button("Cancel")
class CredentialAddView(CredentialFormView):
credential_type = BootstrapSelect("type")
add_button = Button("Add")
@property
def is_displayed(self):
return (
self.in_ansible_credentials and
self.title.text == "Add a new Credential"
)
class CredentialEditView(CredentialFormView):
@ParametrizedView.nested
class input(ParametrizedView): # noqa
PARAMETERS = ("title", )
field_enable = Text(ParametrizedLocator(
".//*[(self::input or self::textarea) and "
"@title={title|quote}]/../../a[text()='Update']"))
field_disable = Text(ParametrizedLocator(
".//*[(self::input or self::textarea) and "
"@title={title|quote}]/../../a[text()='Cancel']"))
def toggle(self):
if self.field_enable.is_displayed:
self.field_enable.click()
elif self.field_disable.is_displayed:
self.field_disable.click()
credential_type = Text(locator=".//label[normalize-space(.)='Credential type']/../div")
save_button = Button("Save")
reset_button = Button("Reset")
@property
def is_displayed(self):
return (
self.in_ansible_credentials and
self.title.text == 'Edit a Credential "{}"'.format(self.context["object"].name)
)
def before_fill(self, values):
for name in self.widget_names:
if name not in values or values[name] is None:
continue
widget = getattr(self, name)
title = self.browser.get_attribute("title", widget)
try:
self.input(title).toggle()
except NoSuchElementException:
continue
class Credential(BaseEntity, Taggable):
"""A class representing one Embedded Ansible credential in the UI."""
# TODO - This is one of the only classes that hasn't been converted to attrs
# The class needs to be reworked and split into multiple subtypes. The kwargs
# is also problematic for attrs
def __init__(self, collection, name, credential_type, **credentials):
super(Credential, self).__init__(collection)
self.name = name
self.credential_type = credential_type
for key, value in credentials.items():
setattr(self, key, value)
__repr__ = object.__repr__
def update(self, updates):
machine_credential_fill_dict = {
"username": updates.get("username"),
"password": updates.get("password"),
"private_key": updates.get("private_key"),
"private_key_phrase": updates.get("private_key_phrase"),
"privilage_escalation": updates.get("privilage_escalation"),
"privilage_escalation_username": updates.get("privilage_escalation_username"),
"privilage_escalation_password": updates.get("privilage_escalation_password"),
}
scm_credential_fill_dict = {
"username": updates.get("username"),
"password": updates.get("password"),
"private_key": updates.get("private_key"),
"private_key_phrase": updates.get("private_key_phrase")
}
vault_credential_fill_dict = {
"vault_password": updates.get("vault_password")
}
amazon_credential_fill_dict = {
"access_key": updates.get("access_key"),
"secret_key": updates.get("secret_key"),
"sts_token": updates.get("sts_token"),
}
vmware_credential_fill_dict = {
"username": updates.get("username"),
"password": updates.get("password"),
"vcenter_host": updates.get("vcenter_host")
}
openstack_credential_fill_dict = {
"username": updates.get("username"),
"password": updates.get("password"),
"authentication_url": updates.get("authentication_url"),
"project": updates.get("project"),
"domain": updates.get("domain")
}
gce_credential_fill_dict = {
"service_account": updates.get("service_account"),
"priv_key": updates.get("priv_key"),
"project": updates.get("project")
}
rhv_credential_fill_dict = {
"username": updates.get("username"),
"password": updates.get("password"),
"host": updates.get("host")
}
credential_type_map = {
"Machine": machine_credential_fill_dict,
"Scm": scm_credential_fill_dict,
"Vault": vault_credential_fill_dict,
"Amazon": amazon_credential_fill_dict,
"VMware": vmware_credential_fill_dict,
"OpenStack": openstack_credential_fill_dict,
"Red Hat Virtualization": rhv_credential_fill_dict,
"Google Compute Engine": gce_credential_fill_dict
}
edit_page = navigate_to(self, "Edit")
changed = edit_page.fill({"name": updates.get("name")})
form_changed = edit_page.credential_form.fill(credential_type_map[self.credential_type])
if changed or form_changed:
edit_page.save_button.click()
else:
edit_page.cancel_button.click()
view = self.create_view(CredentialsListView)
wait_for(lambda: False, silent_failure=True, timeout=5)
assert view.is_displayed
view.flash.assert_no_error()
if changed or form_changed:
view.flash.assert_message(
'Modification of Credential "{}" has been successfully queued.'.format(
updates.get("name", self.name)))
else:
view.flash.assert_message(
'Edit of Credential "{}" was canceled by the user.'.format(self.name))
@property
def exists(self):
try:
navigate_to(self, "Details")
return True
except ItemNotFound:
return False
def delete(self):
view = navigate_to(self, "Details")
if self.appliance.version < "5.9":
remove_str = "Remove this Credential"
else:
remove_str = "Remove this Credential from Inventory"
view.toolbar.configuration.item_select(remove_str, handle_alert=True)
credentials_list_page = self.create_view(CredentialsListView)
wait_for(lambda: False, silent_failure=True, timeout=5)
assert credentials_list_page.is_displayed
credentials_list_page.flash.assert_success_message(
'Deletion of Credential "{}" was successfully initiated.'.format(self.name))
wait_for(
lambda: not self.exists,
delay=10,
fail_func=credentials_list_page.browser.selenium.refresh,
timeout=300
)
@attr.s
class CredentialsCollection(BaseCollection):
"""Collection object for the :py:class:`Credential`."""
ENTITY = Credential
def create(self, name, credential_type, **credentials):
add_page = navigate_to(self, "Add")
machine_credential_fill_dict = {
"username": credentials.get("username"),
"password": credentials.get("password"),
"private_key": credentials.get("private_key"),
"private_key_phrase": credentials.get("private_key_phrase"),
"privilage_escalation": credentials.get("privilage_escalation"),
"privilage_escalation_username": credentials.get("privilage_escalation_username"),
"privilage_escalation_password": credentials.get("privilage_escalation_password")
}
scm_credential_fill_dict = {
"username": credentials.get("username"),
"password": credentials.get("password"),
"private_key": credentials.get("private_key"),
"private_key_phrase": credentials.get("private_key_phrase")
}
vault_credential_fill_dict = {
"vault_password": credentials.get("vault_password")
}
amazon_credential_fill_dict = {
"access_key": credentials.get("access_key"),
"secret_key": credentials.get("secret_key"),
"sts_token": credentials.get("sts_token"),
}
vmware_credential_fill_dict = {
"username": credentials.get("username"),
"password": credentials.get("password"),
"vcenter_host": credentials.get("vcenter_host")
}
openstack_credential_fill_dict = {
"username": credentials.get("username"),
"password": credentials.get("password"),
"authentication_url": credentials.get("authentication_url"),
"project": credentials.get("project"),
"domain": credentials.get("domain")
}
rhv_credential_fill_dict = {
"username": credentials.get("username"),
"password": credentials.get("password"),
"host": credentials.get("host")
}
gce_credential_fill_dict = {
"service_account": credentials.get("service_account"),
"priv_key": credentials.get("priv_key"),
"project": credentials.get("project")
}
credential_type_map = {
"Machine": machine_credential_fill_dict,
"Scm": scm_credential_fill_dict,
"Vault": vault_credential_fill_dict,
"Amazon": amazon_credential_fill_dict,
"VMware": vmware_credential_fill_dict,
"OpenStack": openstack_credential_fill_dict,
"Red Hat Virtualization": rhv_credential_fill_dict,
"Google Compute Engine": gce_credential_fill_dict
}
add_page.fill({"name": name, "credential_type": credential_type})
add_page.credential_form.fill(credential_type_map[credential_type])
add_page.add_button.click()
credentials_list_page = self.create_view(CredentialsListView)
# Without this StaleElementReferenceException can be raised
wait_for(lambda: False, silent_failure=True, timeout=5)
assert credentials_list_page.is_displayed
credentials_list_page.flash.assert_success_message(
'Add of Credential "{}" has been successfully queued.'.format(name))
credential = self.instantiate(name, credential_type, **credentials)
wait_for(
lambda: credential.exists,
fail_func=credentials_list_page.browser.selenium.refresh,
delay=5,
timeout=300)
return credential
@navigator.register(Server)
class AnsibleCredentials(CFMENavigateStep):
VIEW = CredentialsListView
prerequisite = NavigateToSibling("LoggedIn")
def step(self):
self.view.navigation.select("Automation", "Ansible", "Credentials")
@navigator.register(Credential)
class Details(CFMENavigateStep):
VIEW = CredentialDetailsView
prerequisite = NavigateToAttribute("appliance.server", "AnsibleCredentials")
def step(self):
credentials = self.prerequisite_view.credentials
for row in credentials:
if row["Name"].text == self.obj.name:
row["Name"].click()
break
else:
raise ItemNotFound
@navigator.register(CredentialsCollection)
class Add(CFMENavigateStep):
VIEW = CredentialAddView
prerequisite = NavigateToAttribute("appliance.server", "AnsibleCredentials")
def step(self):
self.prerequisite_view.toolbar.configuration.item_select("Add New Credential")
@navigator.register(Credential)
class Edit(CFMENavigateStep):
VIEW = CredentialEditView
prerequisite = NavigateToSibling("Details")
def step(self):
self.prerequisite_view.toolbar.configuration.item_select("Edit this Credential")
@navigator.register(Credential, 'EditTags')
class EditTagsFromListCollection(CFMENavigateStep):
VIEW = TagPageView
prerequisite = NavigateToAttribute("appliance.server", "AnsibleCredentials")
def step(self):
try:
row = self.prerequisite_view.paginator.find_row_on_pages(
table=self.prerequisite_view.credentials,
name=self.obj.name)
row[0].click()
except NoSuchElementException:
raise ItemNotFound('Could not locate ansible credential table row with name {}'
.format(self.obj.name))
self.prerequisite_view.toolbar.policy.item_select('Edit Tags')
| anurag03/integration_tests | cfme/ansible/credentials.py | Python | gpl-2.0 | 18,975 | 0.002951 |
# -*- coding: utf-8 -*-
# Copyright 2010-2013 Kolab Systems AG (http://www.kolabsys.com)
#
# Jeroen van Meeuwen (Kolab Systems) <vanmeeuwen a kolabsys.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import commands
import pykolab
from pykolab import imap_utf7
from pykolab.imap import IMAP
from pykolab.translate import _
from pykolab import utils
log = pykolab.getLogger('pykolab.cli')
conf = pykolab.getConf()
def __init__():
commands.register('list_user_subscriptions', execute, description=description())
def cli_options(*args, **kw):
my_option_group = conf.add_cli_parser_option_group(_("CLI Options"))
my_option_group.add_option( '--raw',
dest = "raw",
action = "store_true",
default = False,
help = _("Display raw IMAP UTF-7 folder names"))
my_option_group.add_option( '--unsubscribed',
dest = "unsubscribed",
action = "store_true",
default = False,
help = _("List unsubscribed folders"))
def description():
return _("List the folders a user is subscribed to.")
def execute(*args, **kw):
folder_pattern = "*"
try:
user = conf.cli_args.pop(0)
try:
folder_pattern = conf.cli_args.pop(0)
except IndexError, errmsg:
pass
except IndexError, errmsg:
user = utils.ask_question(_("User ID"))
if len(user.split('@')) > 1:
domain = user.split('@')[1]
else:
domain = conf.get('kolab', 'primary_domain')
imap = IMAP()
imap.connect(domain=domain, login=False)
backend = conf.get(domain, 'imap_backend')
if backend == None:
backend = conf.get('kolab', 'imap_backend')
admin_login = conf.get(backend, 'admin_login')
admin_password = conf.get(backend, 'admin_password')
imap.login_plain(admin_login, admin_password, user)
subscribed_folders = imap.lsub(folder_pattern)
if conf.unsubscribed:
unsubscribed_folders = []
all_folders = imap.lm(folder_pattern)
for folder in all_folders:
if not folder in subscribed_folders:
unsubscribed_folders.append(folder)
if len(unsubscribed_folders) > 0:
if not conf.raw:
print "\n".join([imap_utf7.decode(x) for x in unsubscribed_folders])
else:
print "\n".join(unsubscribed_folders)
else:
print _("No unsubscribed folders for user %s") % (user)
else:
if not conf.raw:
print "\n".join([imap_utf7.decode(x) for x in subscribed_folders])
else:
print "\n".join(subscribed_folders)
| tpokorra/pykolab | pykolab/cli/cmd_list_user_subscriptions.py | Python | gpl-3.0 | 3,424 | 0.009638 |
#!/usr/bin/env python
import sys
import argparse
import regrws
import regrws.method.org
try:
from apikey import APIKEY
except ImportError:
APIKEY = None
epilog = 'API key can be omitted if APIKEY is defined in apikey.py'
parser = argparse.ArgumentParser(epilog=epilog)
parser.add_argument('-k', '--key', help='ARIN API key',
required=False if APIKEY else True, dest='api_key')
parser.add_argument('-s', '--source-address', help='Source IP address')
parser.add_argument('org_handle', metavar='ORG_HANDLE')
args = parser.parse_args()
if args.api_key:
APIKEY = args.api_key
session = regrws.restful.Session(APIKEY, args.source_address)
method = regrws.method.org.Delete(session, args.org_handle)
try:
payload_out = method.call()
except regrws.restful.RegRwsError as exception:
print exception.args
| RhubarbSin/arin-reg-rws | org_delete.py | Python | mit | 839 | 0 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.mgmt.core.exceptions import ARMErrorFormat
from msrest import Serializer
from .. import models as _models
from .._vendor import _convert_request, _format_url_section
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_list_request(
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2021-09-01-preview"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/providers/Microsoft.AppPlatform/skus')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
class SkusOperations(object):
"""SkusOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.appplatform.v2021_09_01_preview.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def list(
self,
**kwargs: Any
) -> Iterable["_models.ResourceSkuCollection"]:
"""Lists all of the available skus of the Microsoft.AppPlatform provider.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ResourceSkuCollection or the result of
cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.appplatform.v2021_09_01_preview.models.ResourceSkuCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ResourceSkuCollection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
subscription_id=self._config.subscription_id,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_request(
subscription_id=self._config.subscription_id,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("ResourceSkuCollection", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.AppPlatform/skus'} # type: ignore
| Azure/azure-sdk-for-python | sdk/appplatform/azure-mgmt-appplatform/azure/mgmt/appplatform/v2021_09_01_preview/operations/_skus_operations.py | Python | mit | 5,833 | 0.004115 |
from .base import FunctionalTest
class ItemValidationTest(FunctionalTest):
def get_error_element(self):
return self.browser.find_element_by_css_selector('.has-error')
def test_cannot_add_empty_list_items(self):
# Edith goes to the home page and accidentally tries to submit
# an empty list item. She hits Enter on the empty input box
self.browser.get(self.server_url)
self.get_item_input_box().send_keys('\n')
# The home page refreshes, and there is an error message saying
# that list items cannot be blank
error = self.get_error_element()
self.assertEqual(error.text, "You can't have an empty list item")
# She tries again with some text for the item, which now works
self.get_item_input_box().send_keys('Buy milk\n')
self.check_for_row_in_list_table('1: Buy milk')
# Perversely, she now decides to submit a second blank list item
self.get_item_input_box().send_keys('\n')
# She receives a similar warning on the list page
self.check_for_row_in_list_table('1: Buy milk')
error = self.get_error_element()
self.assertEqual(error.text, "You can't have an empty list item")
# And she can correct it by filling some text in
self.get_item_input_box().send_keys('Make tea\n')
self.check_for_row_in_list_table('1: Buy milk')
self.check_for_row_in_list_table('2: Make tea')
def test_cannot_add_duplicate_items(self):
# Edith goes to the home page and starts a new list
self.browser.get(self.server_url)
self.get_item_input_box().send_keys('Buy wellies\n')
self.check_for_row_in_list_table('1: Buy wellies')
# She accidentally tries to enter a duplicate item
self.get_item_input_box().send_keys('Buy wellies\n')
# She sees a helpful error message
self.check_for_row_in_list_table('1: Buy wellies')
error = self.get_error_element()
self.assertEqual(error.text, "You've already got this in your list")
def test_error_messages_are_cleared_on_input(self):
# Edith starts a new list in a way that causes a validation error:
self.browser.get(self.server_url)
self.get_item_input_box().send_keys('\n')
error = self.get_error_element()
self.assertTrue(error.is_displayed())
# She starts typing in the input box to clear the error
self.get_item_input_box().send_keys('a')
# She is pleased to see that the error message disappears
error = self.get_error_element()
self.assertFalse(error.is_displayed())
| jrwiegand/tdd-project | functional_tests/test_list_item_validation.py | Python | mit | 2,652 | 0 |
#!/usr/bin/env python
import re
import requests
import validators
import sys
import httplib2
import datetime
import socket
from urllib.parse import urlsplit, urljoin
from ipwhois import IPWhois
from pprint import pprint
from bs4 import BeautifulSoup, SoupStrainer
import pkg_resources
http_interface = httplib2.Http()
http_interface.follow_redirects = False
links = []
def timeStamp():
now = datetime.datetime.now()
return now.strftime("[%Y-%m-%d %H:%M:%S] ")
def checkHost(url):
try:
response, content = http_interface.request(url, method="GET")
#if response.status == 301 or response.status == 302 or response.status == 303:
if response.status == 301 or response.status == 302:
print(timeStamp() + "* Redirected to " + response['location'] + " ... Exiting")
sys.exit(1)
except httplib2.ServerNotFoundError as e:
print (e.message)
http_interface.follow_redirects = True
def addLink(url):
if url not in links:
links.append(url)
def checkLinks(url):
try:
status, response = http_interface.request(url)
for link in BeautifulSoup(response, "html.parser", parse_only=SoupStrainer("a")):
if link.has_attr('href'):
addLink(urljoin(url,link['href']))
for link in BeautifulSoup(response, "html.parser", parse_only=SoupStrainer("script")):
if link.has_attr('src'):
addLink(urljoin(url,link['src']))
except:
return
def checkLocation(url, path):
urlCheck = url + path
try:
response, content = http_interface.request(urlCheck, method="GET")
if response.status == 200:
print(timeStamp() + "!!! " + path + " found - " + urlCheck)
# Checks for stuff like pasword input on HTTP page
checkForm(url + path)
checkLinks(url + path)
except httplib2.ServerNotFoundError as e:
print(e.message)
def checkCookies(url):
r = requests.get(url)
for cookie in r.cookies:
if url.lower().find("https:",0) == 0:
if cookie.secure == False:
print(timeStamp() + "!!! " + cookie.name + " set without 'Secure' flag")
if not cookie.has_nonstandard_attr('httponly') and not cookie.has_nonstandard_attr('HttpOnly'):
print(timeStamp() + "!!! " + cookie.name + " set without 'HttpOnly' flag")
def checkHeaders(url):
secureURL = 0
# Determine target URL type
if url.lower().find("https:",0) == 0:
secureURL = 1
print()
print(timeStamp() + "* Checking headers")
try:
response, content = http_interface.request(url, method="GET")
if 'server' in response:
print(timeStamp() + "!!! Server header - " + response['server'])
if 'x-powered-by' in response:
print(timeStamp() + "!!! X-Powered-By header - " + response['x-powered-by'])
if 'x-frame-options' not in response:
print(timeStamp() + "!!! X-Frame-Options header missing")
if 'content-security-policy' not in response:
print(timeStamp() + "!!! Content-Security-Policy header missing")
if 'x-xss-protection' not in response:
print(timeStamp() + "!!! X-XSS-Protection header missing")
if 'set-cookie' in response:
checkCookies(url)
# Check for HTTPS specific headers
if secureURL == 1:
if 'strict-transport-security' not in map(str.lower, response):
print(timeStamp() + "!!! HSTS header missing")
if 'public-key-pins' not in response:
print(timeStamp() + "!!! Public-Key-Pins header missing")
except httplib2.ServerNotFoundError as e:
print (e.message)
def getIPs(domain):
try:
ips = socket.gethostbyname_ex(domain)
except socket.gaierror:
ips=[]
print(timeStamp() + "Cannot resolve " + domain)
sys.exit(1)
return ips
def checkQuickXSS(url):
url_xss = url + "/%3c%3eqadqad"
try:
print()
print(timeStamp() + "* Checking / for XSS (URL only)")
try:
response, content = http_interface.request(url_xss, method="GET")
except httplib2.HttpLib2Error as e:
print(timeStamp() + "[ERROR] " + str(e))
print(timeStamp() + "[ERROR] QAD will try and deal with this at some point..")
sys.exit(1)
if b"<>qadqad" in content:
print(timeStamp() + "!!! Reflected XSS potential at " + url_xss)
except httplib2.ServerNotFoundError as e:
print(e.message)
def getDomain(url):
domain = "{0.netloc}".format(urlsplit(url))
return domain
def performWhoIs(IP):
print()
print(timeStamp() + "* Performing WHOIS on " + IP)
obj = IPWhois(IP)
res = obj.lookup_whois()
print(timeStamp() + "- WHOIS name: " + res["nets"][0]['name'])
print(timeStamp() + "- WHOIS CIDR: " + res['asn_cidr'])
print(timeStamp() + "- More info at http://who.is/whois-ip/ip-address/" + IP)
def intro(url, domain):
print(timeStamp() + "QAD " + pkg_resources.get_distribution('QAD').version)
print(timeStamp() + "Quick and dirty web app scanner")
print(timeStamp() + "By @rascal999 - https://github.com/rascal999/")
print(timeStamp() + "*******************************")
print(timeStamp() + "* Target = " + url)
print(timeStamp() + "* IP(s) associated with target:")
IPs = getIPs(domain)
for IP in IPs[2]:
print(timeStamp() + "- " + IP)
def validateURL(url):
if not validators.url(url):
print(timeStamp() + "* Invalid URL provided")
sys.exit(1)
def checkForm(url):
from bs4 import BeautifulSoup
try:
response, content = http_interface.request(url, method="GET")
except httplib2.ServerNotFoundError as e:
print (e.message)
sys.exit(1)
if url.lower().find("https:",0) != 0:
parsed_html = BeautifulSoup(content, "html.parser")
try:
if len(parsed_html.body.find_all('input', attrs={'type':'password'})) > 0:
print(timeStamp() + "!!! Possible login/registration form over HTTP connection at " + response['content-location'])
except:
return
def checkJS():
print()
print(timeStamp() + "* JavaScript URLs found")
for link in links:
if re.search(".js$",link):
print(timeStamp() + "- " + link)
def main():
if len(sys.argv) != 2:
print(sys.argv[0] + " http(s)://<target>")
sys.exit(1)
url = sys.argv[1]
validateURL(url)
protocol = url.split('://')[0]
domain = getDomain(url)
domain_only = protocol + "://" + domain + "/"
# Hello
intro(url, domain)
# Check for 301 and 302 (not 303)
checkHost(url)
# Perform WHOIS
performWhoIs(getIPs(domain)[2][0])
# Header checks (yay!)
checkHeaders(url)
# File checks
# TODO What if user sets URL to http://donkey.dick/some-shit/ ?
# Need http://domain/ extraction
print()
print(timeStamp() + "* Checking paths")
checkLocation(domain_only, "/robots.txt")
checkLocation(domain_only, "/crossdomain.xml")
checkLocation(domain_only, "/sitemap.xml")
checkLocation(domain_only, "/admin/")
checkLocation(domain_only, "/backup/")
checkLocation(domain_only, "/upload/")
checkLocation(domain_only, "/download/")
checkLocation(domain_only, "/wp-admin/")
#checkLocation(domain_only, "/stats/")
checkLocation(domain_only, "/awstats/")
checkLocation(domain_only, "/phpbb3/")
checkLocation(domain_only, "/forum/")
checkLocation(domain_only, "/login/")
checkLocation(domain_only, "/blog/")
# Proper Noddy(tm) XSS checks
checkQuickXSS(url)
# List JS files
checkJS()
# Crawler (not threaded, could get caught in infinite loop)
#for link in links:
# checkLinks(link)
if __name__ == "__main__":
main()
# Header presence checked, what about header values?
#checkHeaders
# Missing headers
# Header versions
#checkMethods
| rascal999/qad | qad/__init__.py | Python | gpl-3.0 | 8,092 | 0.006179 |
import os
import logging
import tornado.options as opt
from motherbrain.base import conf
from motherbrain.base.conf import get_config
SITE_CONF = conf.site_conf(os.getcwd())
DATADIR = SITE_CONF.get('env.motherbrain_data', '/tmp')
API_URL = SITE_CONF.get('env.api_url')
WEBCLIENT_URL = SITE_CONF.get('env.webclient_url')
_srv_opts = {
'config': {'default': 'api_server.cfg',
'help': 'Configuration File'},
'port': {'default': 8888, 'type': int,
'help': 'Tornado Port'},
'debug': {'default': True, 'type': bool},
'cors_hosts': {'default': '''http://api.urli.st '''
'''http://urli.st '''
'''http://localhost:9999 '''
'''http://next.urli.st '''
'''http://urli.st '''
'''http://next.api.urli.st''',
'help': 'Hosts allowed to perform Cross Domain Request'},
'media_path': {'default': os.path.join(DATADIR, 'urlist_media')},
'media_url': {'default': '/media'},
'static_url': {'default': 'http://static.urli.st'},
'base_url': {'default': API_URL},
'webclient_url': {'default': WEBCLIENT_URL}
}
_motherbrain_opts = {
'dispatcher_classname': {'default': 'MBDispatcherCluster',
'help': 'Motherbrain dispatcher class'},
'addresses': {'default': '''tcp://localhost:5555 tcp://localhost:5556 '''
'''tcp://localhost:5557 tcp://localhost:5558''',
'help': 'A space separated list of addresses'},
'datadir': {'default': os.path.join(DATADIR, 'motherbrain_data')}
}
_oauth_opts = {
'cookie_secret': {'default': 'XXX'},
'cookie_domain': {'default': SITE_CONF.get('oauth.cookie_domain')},
'facebook_secret': {'default': 'XXX'},
'facebook_api_key': {'default': 'XXX'},
'facebook_redirect_uri': {'default': '{}/login/facebook'.format(API_URL)},
'twitter_consumer_key': {'default': 'XXX'},
'twitter_consumer_secret': {'default': 'XXX'},
'urlist_salt': {'default': 'XXX'}
}
_db_opts = {
'dbname': {'default': 'urlist'},
'dbhost': {'default': 'mongo1'},
'dbport': {'default': 27017, 'type': int},
'dbusr': {'default': ''},
'dbpwd': {'default': ''},
}
# Get Tornado default options
_tornado_opts = {k: v.value() for k, v in opt.options.iteritems()}
_options = {'server': _srv_opts,
'database': _db_opts,
'tornado': _tornado_opts,
'oauth': _oauth_opts,
'motherbrain': _motherbrain_opts}
_cli_args = {'server': ['port', 'debug', 'config'],
'motherbrain': ['datadir']}
config = get_config(_options, _cli_args)
if SITE_CONF:
logging.info('CONF::SITE --- Read')
| urlist/urlist | motherbrain/api_server/conf.py | Python | gpl-3.0 | 2,842 | 0.000352 |
###
# Copyright (c) 2015, butterscotchstallion
# Copyright (c) 2020, oddluck <[email protected]>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
import supybot.utils as utils
from supybot.commands import *
import supybot.plugins as plugins
import supybot.ircutils as ircutils
import supybot.ircmsgs as ircmsgs
import supybot.callbacks as callbacks
import supybot.log as log
import json, random, re
from string import Template
try:
from supybot.i18n import PluginInternationalization
_ = PluginInternationalization("IMDb")
except ImportError:
# Placeholder that allows to run the plugin on a bot
# without the i18n module
_ = lambda x: x
class lowercase_dict(dict):
def __getitem__(self, name):
return dict.__getitem__(self, name.lower())
class lowercase_template(Template):
def safe_substitute(self, mapping=None, **kws):
if mapping is None:
mapping = {}
m = lowercase_dict((k.lower(), v) for k, v in mapping.items())
m.update(lowercase_dict((k.lower(), v) for k, v in kws.items()))
return Template.safe_substitute(self, m)
class IMDb(callbacks.Plugin):
"""Queries OMDB database for information about IMDb titles"""
threaded = True
def dosearch(self, irc, channel, text):
google = None
ddg = None
match = None
if self.registryValue("google", channel) > 0:
google = irc.getCallback("google")
if not google:
log.error("IMDb: Error: Google search enabled but plugin not loaded.")
if self.registryValue("ddg", channel) > 0:
ddg = irc.getCallback("ddg")
if not ddg:
log.error("IMDb: Error: DDG search enabled but plugin not loaded.")
if not google and not ddg:
log.error("IMDb: Google and DDG plugins not loaded.")
return
query = "site:www.imdb.com/title/ %s" % text
pattern = re.compile(r"https?://www.imdb.com/title/tt\d+/$")
for i in range(1, 3):
if match:
break
if google and self.registryValue("google", channel) == i:
try:
results = google.decode(google.search(query, irc.network, channel))
for r in results:
try:
match = re.search(pattern, r["url"])
except TypeError:
match = re.search(pattern, r.link)
if match:
log.debug(
"IMDb: found link using Google search: %s"
% match.group(0)
)
break
except:
pass
elif ddg and self.registryValue("ddg", channel) == i:
try:
results = ddg.search_core(
query,
channel_context=channel,
max_results=10,
show_snippet=False,
)
for r in results:
match = re.search(pattern, r[2])
if match:
log.debug(
"IMDb: found link using DDG search %s" % match.group(0)
)
break
except:
pass
if match:
return match.group(0)
else:
return
def imdb(self, irc, msg, args, query):
"""<title>
Queries the OMDB API about an IMDb title. Search by title name or IMDb ID.
"""
channel = msg.channel
if not channel:
return
#XXX Not 'enabled' in #channel.
# config channel #channel plugins.IMBd.enable True or False (On or Off)
if not self.registryValue('enable', msg.channel, irc.network):
return
url = None
response = None
result = None
if not self.registryValue("omdbAPI"):
irc.error("Error: You must set an OMDB API key to use this plugin.")
return
id = re.match(r"tt\d+", query.strip())
if id:
url = "http://imdb.com/title/{0}".format(id.group(0))
if not id:
url = self.dosearch(irc, msg.channel, query)
if url:
id = url.split("/title/")[1].rstrip("/")
url = "http://www.omdbapi.com/?" + utils.web.urlencode(
{
"i": id,
"plot": "short",
"r": "json",
"apikey": self.registryValue("omdbAPI"),
}
)
log.debug("IMDb: requesting %s" % url)
else:
url = "http://www.omdbapi.com/?" + utils.web.urlencode(
{
"t": query,
"plot": "short",
"r": "json",
"apikey": self.registryValue("omdbAPI"),
}
)
log.debug("IMDb: requesting %s" % url)
request = utils.web.getUrl(url).decode()
response = json.loads(request)
if response["Response"] != "False" and not response.get("Error"):
imdb_template = lowercase_template(
self.registryValue("template", msg.channel)
)
response["logo"] = self.registryValue("logo", msg.channel)
response["tomatometer"] = "N/A"
response["metascore"] = "N/A"
for rating in response["Ratings"]:
if rating["Source"] == "Rotten Tomatoes":
response["tomatometer"] = rating["Value"]
if rating["Source"] == "Metacritic":
response["metascore"] = "{0}%".format(rating["Value"].split("/")[0])
result = imdb_template.safe_substitute(response)
elif response.get("Error"):
log.debug("IMDb: OMDB API: %s" % response["Error"])
if result:
irc.reply(result, prefixNick=False)
else:
irc.error(self.registryValue("noResultsMessage", msg.channel))
imdb = wrap(imdb, ["text"])
Class = IMDb
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
| Alcheri/Plugins | IMDb/plugin.py | Python | bsd-3-clause | 7,840 | 0.001531 |
import serial #import serial module
import NASA_Conn
from NASA_Conn_TT import check
def read_rfid ():
ser = serial.Serial ("/dev/ttyAMA0") #Open named port
ser.baudrate = 9600 #Set baud rate to 9600
data = ser.read(12) #Read 12 characters from serial port to data
ser.close () #Close port
return data #Return data
print ("Reading,Type ctrl+z to exit: ")
id = read_rfid ()
id=int(id,16)
check(id) #Function call
| shadow1dos/NASA | Run.py | Python | gpl-3.0 | 745 | 0.036242 |
# This file is part of Spacetime.
#
# Copyright 2010-2014 Leiden University.
# Spec module written by Willem Onderwaater.
#
# Spacetime is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# Spacetime is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from ..generic.subplots import MultiTrend
class Normalization(MultiTrend):
normalization_factor = normalization_channel = 1
def set_normalization(self, factor, channel=None):
self.normalization_factor = factor
if channel:
self.normalization_channel = next(channel.iterchannels()).value
else:
self.normalization_channel = 1
def get_ydata(self, chandata):
return chandata.value * self.normalization_factor / self.normalization_channel
| Onderwaater/spacetime | lib/spacetime/modules/spec/subplots.py | Python | gpl-2.0 | 1,206 | 0.009121 |
import copy
import numpy as np
import matplotlib.pyplot as plt
from sklearn.metrics import adjusted_mutual_info_score, adjusted_rand_score
from pyhawkes.models import DiscreteTimeNetworkHawkesModelSpikeAndSlab
from pyhawkes.plotting.plotting import plot_network
def test_gibbs_sbm(seed=None):
"""
Create a discrete time Hawkes model and generate from it.
:return:
"""
if seed is None:
seed = np.random.randint(2**32)
print "Setting seed to ", seed
np.random.seed(seed)
C = 10
K = 100
T = 1000
dt = 1.0
B = 3
# Generate from a true model
network_hypers = {'C': C, 'beta': 1.0/K}
true_model = DiscreteTimeNetworkHawkesModelSpikeAndSlab(K=K, dt=dt, B=B,
network_hypers=network_hypers)
# S,R = true_model.generate(T=T)
c = true_model.network.c
perm = np.argsort(c)
# Plot the true network
plt.ion()
plot_network(true_model.weight_model.A[np.ix_(perm, perm)],
true_model.weight_model.W[np.ix_(perm, perm)])
plt.pause(0.001)
# Make a new model for inference
network_hypers = {'C': C, 'beta': 1.0/K}
test_model = DiscreteTimeNetworkHawkesModelSpikeAndSlab(K=K, dt=dt, B=B,
network_hypers=network_hypers)
# test_model.add_data(S)
# Gibbs sample
N_samples = 10
samples = []
lps = []
for itr in xrange(N_samples):
if itr % 5 == 0:
print "Iteration: ", itr
samples.append(copy.deepcopy(test_model.get_parameters()))
lps.append(test_model.log_probability())
# Resample the network only
test_model.network.resample((true_model.weight_model.A,
true_model.weight_model.W))
plt.ioff()
# Compute sample statistics for second half of samples
c_samples = np.array([c for _,_,_,_,c,_,_,_ in samples])
print "True c: ", true_model.network.c
print "Test c: ", c_samples[-10:, :]
# Compute the adjusted mutual info score of the clusterings
amis = []
arss = []
for c in c_samples:
amis.append(adjusted_mutual_info_score(true_model.network.c, c))
arss.append(adjusted_rand_score(true_model.network.c, c))
plt.figure()
plt.plot(np.arange(N_samples), amis, '-r')
plt.plot(np.arange(N_samples), arss, '-b')
plt.xlabel("Iteration")
plt.ylabel("Clustering score")
plt.show()
test_gibbs_sbm() | mattjj/pyhawkes | test/test_sbm_gibbs.py | Python | mit | 2,521 | 0.005553 |
# Purpose: ac1009 table entries
# Created: 16.03.2011
# Copyright (C) 2011, Manfred Moitzi
# License: MIT License
from __future__ import unicode_literals
__author__ = "mozman <[email protected]>"
from ..entity import GenericWrapper
from ..tags import DXFTag
from ..classifiedtags import ClassifiedTags
from ..dxfattr import DXFAttr, DXFAttributes, DefSubclass
_LAYERTEMPLATE = """ 0
LAYER
5
0
2
LAYERNAME
70
0
62
7
6
CONTINUOUS
"""
# noinspection PyAugmentAssignment,PyUnresolvedReferences
class Layer(GenericWrapper):
TEMPLATE = ClassifiedTags.from_text(_LAYERTEMPLATE)
DXFATTRIBS = DXFAttributes(DefSubclass(None, {
'handle': DXFAttr(5, None),
'name': DXFAttr(2, None),
'flags': DXFAttr(70, None),
'color': DXFAttr(62, None), # dxf color index, if < 0 layer is off
'linetype': DXFAttr(6, None),
}))
LOCK = 0b00000100
UNLOCK = 0b11111011
def is_locked(self):
return self.dxf.flags & Layer.LOCK > 0
def lock(self):
self.dxf.flags = self.dxf.flags | Layer.LOCK
def unlock(self):
self.dxf.flags = self.dxf.flags & Layer.UNLOCK
def is_off(self):
return self.dxf.color < 0
def is_on(self):
return not self.is_off()
def on(self):
self.dxf.color = abs(self.dxf.color)
def off(self):
self.dxf.color = -abs(self.dxf.color)
def get_color(self):
return abs(self.dxf.color)
def set_color(self, color):
color = abs(color) if self.is_on() else -abs(color)
self.dxf.color = color
_STYLETEMPLATE = """ 0
STYLE
5
0
2
STYLENAME
70
0
40
0.0
41
1.0
50
0.0
71
0
42
1.0
3
arial.ttf
4
"""
class Style(GenericWrapper):
TEMPLATE = ClassifiedTags.from_text(_STYLETEMPLATE)
DXFATTRIBS = DXFAttributes(DefSubclass(None, {
'handle': DXFAttr(5, None),
'name': DXFAttr(2, None),
'flags': DXFAttr(70, None),
'height': DXFAttr(40, None), # fixed height, 0 if not fixed
'width': DXFAttr(41, None), # width factor
'oblique': DXFAttr(50, None), # oblique angle in degree, 0 = vertical
'text_generation_flags': DXFAttr(71, None), # 2 = backward, 4 = mirrored in Y
'last_height': DXFAttr(42, None), # last height used
'font': DXFAttr(3, None), # primary font file name
'bigfont': DXFAttr(4, None), # big font name, blank if none
}))
_LTYPETEMPLATE = """ 0
LTYPE
5
0
2
LTYPENAME
70
0
3
LTYPEDESCRIPTION
72
65
"""
class Linetype(GenericWrapper):
TEMPLATE = ClassifiedTags.from_text(_LTYPETEMPLATE)
DXFATTRIBS = DXFAttributes(DefSubclass(None, {
'handle': DXFAttr(5, None),
'name': DXFAttr(2, None),
'description': DXFAttr(3, None),
'length': DXFAttr(40, None),
'items': DXFAttr(73, None),
}))
@classmethod
def new(cls, handle, dxfattribs=None, dxffactory=None):
if dxfattribs is not None:
pattern = dxfattribs.pop('pattern', [0.0])
else:
pattern = [0.0]
entity = super(Linetype, cls).new(handle, dxfattribs, dxffactory)
entity._setup_pattern(pattern)
return entity
def _setup_pattern(self, pattern):
self.tags.noclass.append(DXFTag(73, len(pattern) - 1))
self.tags.noclass.append(DXFTag(40, float(pattern[0])))
self.tags.noclass.extend((DXFTag(49, float(p)) for p in pattern[1:]))
_VPORTTEMPLATE = """ 0
VPORT
5
0
2
VPORTNAME
70
0
10
0.0
20
0.0
11
1.0
21
1.0
12
70.0
22
50.0
13
0.0
23
0.0
14
0.5
24
0.5
15
0.5
25
0.5
16
0.0
26
0.0
36
1.0
17
0.0
27
0.0
37
0.0
40
70.
41
1.34
42
50.0
43
0.0
44
0.0
50
0.0
51
0.0
71
0
72
1000
73
1
74
3
75
0
76
0
77
0
78
0
"""
class Viewport(GenericWrapper):
TEMPLATE = ClassifiedTags.from_text(_VPORTTEMPLATE)
DXFATTRIBS = DXFAttributes(DefSubclass(None, {
'handle': DXFAttr(5, None),
'name': DXFAttr(2, None),
'flags': DXFAttr(70, None),
'lower_left': DXFAttr(10, 'Point2D'),
'upper_right': DXFAttr(11, 'Point2D'),
'center_point': DXFAttr(12, 'Point2D'),
'snap_base': DXFAttr(13, 'Point2D'),
'snap_spacing': DXFAttr(14, 'Point2D'),
'grid_spacing': DXFAttr(15, 'Point2D'),
'direction_point': DXFAttr(16, 'Point3D'),
'target_point': DXFAttr(17, 'Point3D'),
'height': DXFAttr(40, None),
'aspect_ratio': DXFAttr(41, None),
'lens_length': DXFAttr(42, None),
'front_clipping': DXFAttr(43, None),
'back_clipping': DXFAttr(44, None),
'snap_rotation': DXFAttr(50, None),
'view_twist': DXFAttr(51, None),
'status': DXFAttr(68, None),
'id': DXFAttr(69, None),
'view_mode': DXFAttr(71, None),
'circle_zoom': DXFAttr(72, None),
'fast_zoom': DXFAttr(73, None),
'ucs_icon': DXFAttr(74, None),
'snap_on': DXFAttr(75, None),
'grid_on': DXFAttr(76, None),
'snap_style': DXFAttr(77, None),
'snap_isopair': DXFAttr(78, None),
}))
_UCSTEMPLATE = """ 0
UCS
5
0
2
UCSNAME
70
0
10
0.0
20
0.0
30
0.0
11
1.0
21
0.0
31
0.0
12
0.0
22
1.0
32
0.0
"""
class UCS(GenericWrapper):
TEMPLATE = ClassifiedTags.from_text(_UCSTEMPLATE)
DXFATTRIBS = DXFAttributes(DefSubclass(None, {
'handle': DXFAttr(5, None),
'name': DXFAttr(2, None),
'flags': DXFAttr(70, None),
'origin': DXFAttr(10, 'Point3D'),
'xaxis': DXFAttr(11, 'Point3D'),
'yaxis': DXFAttr(12, 'Point3D'),
}))
_APPIDTEMPLATE = """ 0
APPID
5
0
2
APPNAME
70
0
"""
class AppID(GenericWrapper):
TEMPLATE = ClassifiedTags.from_text(_APPIDTEMPLATE)
DXFATTRIBS = DXFAttributes(DefSubclass(None, {
'handle': DXFAttr(5, None),
'name': DXFAttr(2, None),
'flags': DXFAttr(70, None),
}))
_VIEWTEMPLATE = """ 0
VIEW
5
0
2
VIEWNAME
70
0
10
0.0
20
0.0
11
1.0
21
1.0
31
1.0
12
0.0
22
0.0
32
0.0
40
70.
41
1.0
42
50.0
43
0.0
44
0.0
50
0.0
71
0
"""
class View(GenericWrapper):
TEMPLATE = ClassifiedTags.from_text(_VIEWTEMPLATE)
DXFATTRIBS = DXFAttributes(DefSubclass(None, {
'handle': DXFAttr(5, None),
'name': DXFAttr(2, None),
'flags': DXFAttr(70, None),
'height': DXFAttr(40, None),
'width': DXFAttr(41, None),
'center_point': DXFAttr(10, 'Point2D'),
'direction_point': DXFAttr(11, 'Point3D'),
'target_point': DXFAttr(12, 'Point3D'),
'lens_length': DXFAttr(42, None),
'front_clipping': DXFAttr(43, None),
'back_clipping': DXFAttr(44, None),
'view_twist': DXFAttr(50, None),
'view_mode': DXFAttr(71, None),
}))
_DIMSTYLETEMPLATE = """ 0
DIMSTYLE
105
0
2
DIMSTYLENAME
70
0
3
4
5
6
7
40
1.0
41
3.0
42
2.0
43
9.0
44
5.0
45
0.0
46
0.0
47
0.0
48
0.0
140
3.0
141
2.0
142
0.0
143
25.399999999999999
144
1.0
145
0.0
146
1.0
147
2.0
71
0
72
0
73
1
74
1
75
0
76
0
77
0
78
0
170
0
171
2
172
0
173
0
174
0
175
0
176
0
177
0
178
0
"""
class DimStyle(GenericWrapper):
TEMPLATE = ClassifiedTags.from_text(_DIMSTYLETEMPLATE)
DXFATTRIBS = DXFAttributes(DefSubclass(None, {
'handle': DXFAttr(105, None),
'name': DXFAttr(2, None),
'flags': DXFAttr(70, None),
'dimpost': DXFAttr(3, None),
'dimapost': DXFAttr(4, None),
'dimblk': DXFAttr(5, None),
'dimblk1': DXFAttr(6, None),
'dimblk2': DXFAttr(7, None),
'dimscale': DXFAttr(40, None),
'dimasz': DXFAttr(41, None),
'dimexo': DXFAttr(42, None),
'dimdli': DXFAttr(43, None),
'dimexe': DXFAttr(44, None),
'dimrnd': DXFAttr(45, None),
'dimdle': DXFAttr(46, None),
'dimtp': DXFAttr(47, None),
'dimtm': DXFAttr(48, None),
'dimtxt': DXFAttr(140, None),
'dimcen': DXFAttr(141, None),
'dimtsz': DXFAttr(142, None),
'dimaltf': DXFAttr(143, None),
'dimlfac': DXFAttr(144, None),
'dimtvp': DXFAttr(145, None),
'dimtfac': DXFAttr(146, None),
'dimgap': DXFAttr(147, None),
'dimtol': DXFAttr(71, None),
'dimlim': DXFAttr(72, None),
'dimtih': DXFAttr(73, None),
'dimtoh': DXFAttr(74, None),
'dimse1': DXFAttr(75, None),
'dimse2': DXFAttr(76, None),
'dimtad': DXFAttr(77, None),
'dimzin': DXFAttr(78, None),
'dimalt': DXFAttr(170, None),
'dimaltd': DXFAttr(171, None),
'dimtofl': DXFAttr(172, None),
'dimsah': DXFAttr(173, None),
'dimtix': DXFAttr(174, None),
'dimsoxd': DXFAttr(175, None),
'dimclrd': DXFAttr(176, None),
'dimclre': DXFAttr(177, None),
'dimclrt': DXFAttr(178, None),
}))
| lautr3k/RepRap-iTopie | odmt/ezdxf/ac1009/tableentries.py | Python | gpl-3.0 | 8,896 | 0.00045 |
# sql/types_api.py
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Base types API.
"""
from .. import exc, util
from . import operators
from .visitors import Visitable, VisitableType
from .base import SchemaEventTarget
# these are back-assigned by sqltypes.
BOOLEANTYPE = None
INTEGERTYPE = None
NULLTYPE = None
STRINGTYPE = None
MATCHTYPE = None
INDEXABLE = None
_resolve_value_to_type = None
class TypeEngine(Visitable):
"""The ultimate base class for all SQL datatypes.
Common subclasses of :class:`.TypeEngine` include
:class:`.String`, :class:`.Integer`, and :class:`.Boolean`.
For an overview of the SQLAlchemy typing system, see
:ref:`types_toplevel`.
.. seealso::
:ref:`types_toplevel`
"""
_sqla_type = True
_isnull = False
class Comparator(operators.ColumnOperators):
"""Base class for custom comparison operations defined at the
type level. See :attr:`.TypeEngine.comparator_factory`.
"""
__slots__ = 'expr', 'type'
default_comparator = None
def __init__(self, expr):
self.expr = expr
self.type = expr.type
@util.dependencies('sqlalchemy.sql.default_comparator')
def operate(self, default_comparator, op, *other, **kwargs):
o = default_comparator.operator_lookup[op.__name__]
return o[0](self.expr, op, *(other + o[1:]), **kwargs)
@util.dependencies('sqlalchemy.sql.default_comparator')
def reverse_operate(self, default_comparator, op, other, **kwargs):
o = default_comparator.operator_lookup[op.__name__]
return o[0](self.expr, op, other,
reverse=True, *o[1:], **kwargs)
def _adapt_expression(self, op, other_comparator):
"""evaluate the return type of <self> <op> <othertype>,
and apply any adaptations to the given operator.
This method determines the type of a resulting binary expression
given two source types and an operator. For example, two
:class:`.Column` objects, both of the type :class:`.Integer`, will
produce a :class:`.BinaryExpression` that also has the type
:class:`.Integer` when compared via the addition (``+``) operator.
However, using the addition operator with an :class:`.Integer`
and a :class:`.Date` object will produce a :class:`.Date`, assuming
"days delta" behavior by the database (in reality, most databases
other than PostgreSQL don't accept this particular operation).
The method returns a tuple of the form <operator>, <type>.
The resulting operator and type will be those applied to the
resulting :class:`.BinaryExpression` as the final operator and the
right-hand side of the expression.
Note that only a subset of operators make usage of
:meth:`._adapt_expression`,
including math operators and user-defined operators, but not
boolean comparison or special SQL keywords like MATCH or BETWEEN.
"""
return op, self.type
def __reduce__(self):
return _reconstitute_comparator, (self.expr, )
hashable = True
"""Flag, if False, means values from this type aren't hashable.
Used by the ORM when uniquing result lists.
"""
comparator_factory = Comparator
"""A :class:`.TypeEngine.Comparator` class which will apply
to operations performed by owning :class:`.ColumnElement` objects.
The :attr:`.comparator_factory` attribute is a hook consulted by
the core expression system when column and SQL expression operations
are performed. When a :class:`.TypeEngine.Comparator` class is
associated with this attribute, it allows custom re-definition of
all existing operators, as well as definition of new operators.
Existing operators include those provided by Python operator overloading
such as :meth:`.operators.ColumnOperators.__add__` and
:meth:`.operators.ColumnOperators.__eq__`,
those provided as standard
attributes of :class:`.operators.ColumnOperators` such as
:meth:`.operators.ColumnOperators.like`
and :meth:`.operators.ColumnOperators.in_`.
Rudimentary usage of this hook is allowed through simple subclassing
of existing types, or alternatively by using :class:`.TypeDecorator`.
See the documentation section :ref:`types_operators` for examples.
.. versionadded:: 0.8 The expression system was enhanced to support
customization of operators on a per-type level.
"""
should_evaluate_none = False
"""If True, the Python constant ``None`` is considered to be handled
explicitly by this type.
The ORM uses this flag to indicate that a positive value of ``None``
is passed to the column in an INSERT statement, rather than omitting
the column from the INSERT statement which has the effect of firing
off column-level defaults. It also allows types which have special
behavior for Python None, such as a JSON type, to indicate that
they'd like to handle the None value explicitly.
To set this flag on an existing type, use the
:meth:`.TypeEngine.evaluates_none` method.
.. seealso::
:meth:`.TypeEngine.evaluates_none`
.. versionadded:: 1.1
"""
def evaluates_none(self):
"""Return a copy of this type which has the :attr:`.should_evaluate_none`
flag set to True.
E.g.::
Table(
'some_table', metadata,
Column(
String(50).evaluates_none(),
nullable=True,
server_default='no value')
)
The ORM uses this flag to indicate that a positive value of ``None``
is passed to the column in an INSERT statement, rather than omitting
the column from the INSERT statement which has the effect of firing
off column-level defaults. It also allows for types which have
special behavior associated with the Python None value to indicate
that the value doesn't necessarily translate into SQL NULL; a
prime example of this is a JSON type which may wish to persist the
JSON value ``'null'``.
In all cases, the actual NULL SQL value can be always be
persisted in any column by using
the :obj:`~.expression.null` SQL construct in an INSERT statement
or associated with an ORM-mapped attribute.
.. note::
The "evaulates none" flag does **not** apply to a value
of ``None`` passed to :paramref:`.Column.default` or
:paramref:`.Column.server_default`; in these cases, ``None``
still means "no default".
.. versionadded:: 1.1
.. seealso::
:ref:`session_forcing_null` - in the ORM documentation
:paramref:`.postgresql.JSON.none_as_null` - PostgreSQL JSON
interaction with this flag.
:attr:`.TypeEngine.should_evaluate_none` - class-level flag
"""
typ = self.copy()
typ.should_evaluate_none = True
return typ
def copy(self, **kw):
return self.adapt(self.__class__)
def compare_against_backend(self, dialect, conn_type):
"""Compare this type against the given backend type.
This function is currently not implemented for SQLAlchemy
types, and for all built in types will return ``None``. However,
it can be implemented by a user-defined type
where it can be consumed by schema comparison tools such as
Alembic autogenerate.
A future release of SQLAlchemy will potentially impement this method
for builtin types as well.
The function should return True if this type is equivalent to the
given type; the type is typically reflected from the database
so should be database specific. The dialect in use is also
passed. It can also return False to assert that the type is
not equivalent.
:param dialect: a :class:`.Dialect` that is involved in the comparison.
:param conn_type: the type object reflected from the backend.
.. versionadded:: 1.0.3
"""
return None
def copy_value(self, value):
return value
def literal_processor(self, dialect):
"""Return a conversion function for processing literal values that are
to be rendered directly without using binds.
This function is used when the compiler makes use of the
"literal_binds" flag, typically used in DDL generation as well
as in certain scenarios where backends don't accept bound parameters.
.. versionadded:: 0.9.0
"""
return None
def bind_processor(self, dialect):
"""Return a conversion function for processing bind values.
Returns a callable which will receive a bind parameter value
as the sole positional argument and will return a value to
send to the DB-API.
If processing is not necessary, the method should return ``None``.
:param dialect: Dialect instance in use.
"""
return None
def result_processor(self, dialect, coltype):
"""Return a conversion function for processing result row values.
Returns a callable which will receive a result row column
value as the sole positional argument and will return a value
to return to the user.
If processing is not necessary, the method should return ``None``.
:param dialect: Dialect instance in use.
:param coltype: DBAPI coltype argument received in cursor.description.
"""
return None
def column_expression(self, colexpr):
"""Given a SELECT column expression, return a wrapping SQL expression.
This is typically a SQL function that wraps a column expression
as rendered in the columns clause of a SELECT statement.
It is used for special data types that require
columns to be wrapped in some special database function in order
to coerce the value before being sent back to the application.
It is the SQL analogue of the :meth:`.TypeEngine.result_processor`
method.
The method is evaluated at statement compile time, as opposed
to statement construction time.
See also:
:ref:`types_sql_value_processing`
"""
return None
@util.memoized_property
def _has_column_expression(self):
"""memoized boolean, check if column_expression is implemented.
Allows the method to be skipped for the vast majority of expression
types that don't use this feature.
"""
return self.__class__.column_expression.__code__ \
is not TypeEngine.column_expression.__code__
def bind_expression(self, bindvalue):
""""Given a bind value (i.e. a :class:`.BindParameter` instance),
return a SQL expression in its place.
This is typically a SQL function that wraps the existing bound
parameter within the statement. It is used for special data types
that require literals being wrapped in some special database function
in order to coerce an application-level value into a database-specific
format. It is the SQL analogue of the
:meth:`.TypeEngine.bind_processor` method.
The method is evaluated at statement compile time, as opposed
to statement construction time.
Note that this method, when implemented, should always return
the exact same structure, without any conditional logic, as it
may be used in an executemany() call against an arbitrary number
of bound parameter sets.
See also:
:ref:`types_sql_value_processing`
"""
return None
@util.memoized_property
def _has_bind_expression(self):
"""memoized boolean, check if bind_expression is implemented.
Allows the method to be skipped for the vast majority of expression
types that don't use this feature.
"""
return self.__class__.bind_expression.__code__ \
is not TypeEngine.bind_expression.__code__
def compare_values(self, x, y):
"""Compare two values for equality."""
return x == y
def get_dbapi_type(self, dbapi):
"""Return the corresponding type object from the underlying DB-API, if
any.
This can be useful for calling ``setinputsizes()``, for example.
"""
return None
@property
def python_type(self):
"""Return the Python type object expected to be returned
by instances of this type, if known.
Basically, for those types which enforce a return type,
or are known across the board to do such for all common
DBAPIs (like ``int`` for example), will return that type.
If a return type is not defined, raises
``NotImplementedError``.
Note that any type also accommodates NULL in SQL which
means you can also get back ``None`` from any type
in practice.
"""
raise NotImplementedError()
def with_variant(self, type_, dialect_name):
"""Produce a new type object that will utilize the given
type when applied to the dialect of the given name.
e.g.::
from sqlalchemy.types import String
from sqlalchemy.dialects import mysql
s = String()
s = s.with_variant(mysql.VARCHAR(collation='foo'), 'mysql')
The construction of :meth:`.TypeEngine.with_variant` is always
from the "fallback" type to that which is dialect specific.
The returned type is an instance of :class:`.Variant`, which
itself provides a :meth:`.Variant.with_variant`
that can be called repeatedly.
:param type_: a :class:`.TypeEngine` that will be selected
as a variant from the originating type, when a dialect
of the given name is in use.
:param dialect_name: base name of the dialect which uses
this type. (i.e. ``'postgresql'``, ``'mysql'``, etc.)
.. versionadded:: 0.7.2
"""
return Variant(self, {dialect_name: to_instance(type_)})
@util.memoized_property
def _type_affinity(self):
"""Return a rudimental 'affinity' value expressing the general class
of type."""
typ = None
for t in self.__class__.__mro__:
if t in (TypeEngine, UserDefinedType):
return typ
elif issubclass(t, (TypeEngine, UserDefinedType)):
typ = t
else:
return self.__class__
def dialect_impl(self, dialect):
"""Return a dialect-specific implementation for this
:class:`.TypeEngine`.
"""
try:
return dialect._type_memos[self]['impl']
except KeyError:
return self._dialect_info(dialect)['impl']
def _cached_literal_processor(self, dialect):
"""Return a dialect-specific literal processor for this type."""
try:
return dialect._type_memos[self]['literal']
except KeyError:
d = self._dialect_info(dialect)
d['literal'] = lp = d['impl'].literal_processor(dialect)
return lp
def _cached_bind_processor(self, dialect):
"""Return a dialect-specific bind processor for this type."""
try:
return dialect._type_memos[self]['bind']
except KeyError:
d = self._dialect_info(dialect)
d['bind'] = bp = d['impl'].bind_processor(dialect)
return bp
def _cached_result_processor(self, dialect, coltype):
"""Return a dialect-specific result processor for this type."""
try:
return dialect._type_memos[self][coltype]
except KeyError:
d = self._dialect_info(dialect)
# key assumption: DBAPI type codes are
# constants. Else this dictionary would
# grow unbounded.
d[coltype] = rp = d['impl'].result_processor(dialect, coltype)
return rp
def _dialect_info(self, dialect):
"""Return a dialect-specific registry which
caches a dialect-specific implementation, bind processing
function, and one or more result processing functions."""
if self in dialect._type_memos:
return dialect._type_memos[self]
else:
impl = self._gen_dialect_impl(dialect)
if impl is self:
impl = self.adapt(type(self))
# this can't be self, else we create a cycle
assert impl is not self
dialect._type_memos[self] = d = {'impl': impl}
return d
def _gen_dialect_impl(self, dialect):
return dialect.type_descriptor(self)
def adapt(self, cls, **kw):
"""Produce an "adapted" form of this type, given an "impl" class
to work with.
This method is used internally to associate generic
types with "implementation" types that are specific to a particular
dialect.
"""
return util.constructor_copy(self, cls, **kw)
def coerce_compared_value(self, op, value):
"""Suggest a type for a 'coerced' Python value in an expression.
Given an operator and value, gives the type a chance
to return a type which the value should be coerced into.
The default behavior here is conservative; if the right-hand
side is already coerced into a SQL type based on its
Python type, it is usually left alone.
End-user functionality extension here should generally be via
:class:`.TypeDecorator`, which provides more liberal behavior in that
it defaults to coercing the other side of the expression into this
type, thus applying special Python conversions above and beyond those
needed by the DBAPI to both ides. It also provides the public method
:meth:`.TypeDecorator.coerce_compared_value` which is intended for
end-user customization of this behavior.
"""
_coerced_type = _resolve_value_to_type(value)
if _coerced_type is NULLTYPE or _coerced_type._type_affinity \
is self._type_affinity:
return self
else:
return _coerced_type
def _compare_type_affinity(self, other):
return self._type_affinity is other._type_affinity
def compile(self, dialect=None):
"""Produce a string-compiled form of this :class:`.TypeEngine`.
When called with no arguments, uses a "default" dialect
to produce a string result.
:param dialect: a :class:`.Dialect` instance.
"""
# arg, return value is inconsistent with
# ClauseElement.compile()....this is a mistake.
if not dialect:
dialect = self._default_dialect()
return dialect.type_compiler.process(self)
@util.dependencies("sqlalchemy.engine.default")
def _default_dialect(self, default):
if self.__class__.__module__.startswith("sqlalchemy.dialects"):
tokens = self.__class__.__module__.split(".")[0:3]
mod = ".".join(tokens)
return getattr(__import__(mod).dialects, tokens[-1]).dialect()
else:
return default.DefaultDialect()
def __str__(self):
if util.py2k:
return unicode(self.compile()).\
encode('ascii', 'backslashreplace')
else:
return str(self.compile())
def __repr__(self):
return util.generic_repr(self)
class VisitableCheckKWArg(util.EnsureKWArgType, VisitableType):
pass
class UserDefinedType(util.with_metaclass(VisitableCheckKWArg, TypeEngine)):
"""Base for user defined types.
This should be the base of new types. Note that
for most cases, :class:`.TypeDecorator` is probably
more appropriate::
import sqlalchemy.types as types
class MyType(types.UserDefinedType):
def __init__(self, precision = 8):
self.precision = precision
def get_col_spec(self, **kw):
return "MYTYPE(%s)" % self.precision
def bind_processor(self, dialect):
def process(value):
return value
return process
def result_processor(self, dialect, coltype):
def process(value):
return value
return process
Once the type is made, it's immediately usable::
table = Table('foo', meta,
Column('id', Integer, primary_key=True),
Column('data', MyType(16))
)
The ``get_col_spec()`` method will in most cases receive a keyword
argument ``type_expression`` which refers to the owning expression
of the type as being compiled, such as a :class:`.Column` or
:func:`.cast` construct. This keyword is only sent if the method
accepts keyword arguments (e.g. ``**kw``) in its argument signature;
introspection is used to check for this in order to support legacy
forms of this function.
.. versionadded:: 1.0.0 the owning expression is passed to
the ``get_col_spec()`` method via the keyword argument
``type_expression``, if it receives ``**kw`` in its signature.
"""
__visit_name__ = "user_defined"
ensure_kwarg = 'get_col_spec'
class Comparator(TypeEngine.Comparator):
__slots__ = ()
def _adapt_expression(self, op, other_comparator):
if hasattr(self.type, 'adapt_operator'):
util.warn_deprecated(
"UserDefinedType.adapt_operator is deprecated. Create "
"a UserDefinedType.Comparator subclass instead which "
"generates the desired expression constructs, given a "
"particular operator."
)
return self.type.adapt_operator(op), self.type
else:
return op, self.type
comparator_factory = Comparator
def coerce_compared_value(self, op, value):
"""Suggest a type for a 'coerced' Python value in an expression.
Default behavior for :class:`.UserDefinedType` is the
same as that of :class:`.TypeDecorator`; by default it returns
``self``, assuming the compared value should be coerced into
the same type as this one. See
:meth:`.TypeDecorator.coerce_compared_value` for more detail.
.. versionchanged:: 0.8 :meth:`.UserDefinedType.coerce_compared_value`
now returns ``self`` by default, rather than falling onto the
more fundamental behavior of
:meth:`.TypeEngine.coerce_compared_value`.
"""
return self
class TypeDecorator(SchemaEventTarget, TypeEngine):
"""Allows the creation of types which add additional functionality
to an existing type.
This method is preferred to direct subclassing of SQLAlchemy's
built-in types as it ensures that all required functionality of
the underlying type is kept in place.
Typical usage::
import sqlalchemy.types as types
class MyType(types.TypeDecorator):
'''Prefixes Unicode values with "PREFIX:" on the way in and
strips it off on the way out.
'''
impl = types.Unicode
def process_bind_param(self, value, dialect):
return "PREFIX:" + value
def process_result_value(self, value, dialect):
return value[7:]
def copy(self, **kw):
return MyType(self.impl.length)
The class-level "impl" attribute is required, and can reference any
TypeEngine class. Alternatively, the load_dialect_impl() method
can be used to provide different type classes based on the dialect
given; in this case, the "impl" variable can reference
``TypeEngine`` as a placeholder.
Types that receive a Python type that isn't similar to the ultimate type
used may want to define the :meth:`TypeDecorator.coerce_compared_value`
method. This is used to give the expression system a hint when coercing
Python objects into bind parameters within expressions. Consider this
expression::
mytable.c.somecol + datetime.date(2009, 5, 15)
Above, if "somecol" is an ``Integer`` variant, it makes sense that
we're doing date arithmetic, where above is usually interpreted
by databases as adding a number of days to the given date.
The expression system does the right thing by not attempting to
coerce the "date()" value into an integer-oriented bind parameter.
However, in the case of ``TypeDecorator``, we are usually changing an
incoming Python type to something new - ``TypeDecorator`` by default will
"coerce" the non-typed side to be the same type as itself. Such as below,
we define an "epoch" type that stores a date value as an integer::
class MyEpochType(types.TypeDecorator):
impl = types.Integer
epoch = datetime.date(1970, 1, 1)
def process_bind_param(self, value, dialect):
return (value - self.epoch).days
def process_result_value(self, value, dialect):
return self.epoch + timedelta(days=value)
Our expression of ``somecol + date`` with the above type will coerce the
"date" on the right side to also be treated as ``MyEpochType``.
This behavior can be overridden via the
:meth:`~TypeDecorator.coerce_compared_value` method, which returns a type
that should be used for the value of the expression. Below we set it such
that an integer value will be treated as an ``Integer``, and any other
value is assumed to be a date and will be treated as a ``MyEpochType``::
def coerce_compared_value(self, op, value):
if isinstance(value, int):
return Integer()
else:
return self
.. warning::
Note that the **behavior of coerce_compared_value is not inherited
by default from that of the base type**.
If the :class:`.TypeDecorator` is augmenting a
type that requires special logic for certain types of operators,
this method **must** be overridden. A key example is when decorating
the :class:`.postgresql.JSON` and :class:`.postgresql.JSONB` types;
the default rules of :meth:`.TypeEngine.coerce_compared_value` should
be used in order to deal with operators like index operations::
class MyJsonType(TypeDecorator):
impl = postgresql.JSON
def coerce_compared_value(self, op, value):
return self.impl.coerce_compared_value(op, value)
Without the above step, index operations such as ``mycol['foo']``
will cause the index value ``'foo'`` to be JSON encoded.
"""
__visit_name__ = "type_decorator"
def __init__(self, *args, **kwargs):
"""Construct a :class:`.TypeDecorator`.
Arguments sent here are passed to the constructor
of the class assigned to the ``impl`` class level attribute,
assuming the ``impl`` is a callable, and the resulting
object is assigned to the ``self.impl`` instance attribute
(thus overriding the class attribute of the same name).
If the class level ``impl`` is not a callable (the unusual case),
it will be assigned to the same instance attribute 'as-is',
ignoring those arguments passed to the constructor.
Subclasses can override this to customize the generation
of ``self.impl`` entirely.
"""
if not hasattr(self.__class__, 'impl'):
raise AssertionError("TypeDecorator implementations "
"require a class-level variable "
"'impl' which refers to the class of "
"type being decorated")
self.impl = to_instance(self.__class__.impl, *args, **kwargs)
coerce_to_is_types = (util.NoneType, )
"""Specify those Python types which should be coerced at the expression
level to "IS <constant>" when compared using ``==`` (and same for
``IS NOT`` in conjunction with ``!=``.
For most SQLAlchemy types, this includes ``NoneType``, as well as
``bool``.
:class:`.TypeDecorator` modifies this list to only include ``NoneType``,
as typedecorator implementations that deal with boolean types are common.
Custom :class:`.TypeDecorator` classes can override this attribute to
return an empty tuple, in which case no values will be coerced to
constants.
.. versionadded:: 0.8.2
Added :attr:`.TypeDecorator.coerce_to_is_types` to allow for easier
control of ``__eq__()`` ``__ne__()`` operations.
"""
class Comparator(TypeEngine.Comparator):
__slots__ = ()
def operate(self, op, *other, **kwargs):
kwargs['_python_is_types'] = self.expr.type.coerce_to_is_types
return super(TypeDecorator.Comparator, self).operate(
op, *other, **kwargs)
def reverse_operate(self, op, other, **kwargs):
kwargs['_python_is_types'] = self.expr.type.coerce_to_is_types
return super(TypeDecorator.Comparator, self).reverse_operate(
op, other, **kwargs)
@property
def comparator_factory(self):
if TypeDecorator.Comparator in self.impl.comparator_factory.__mro__:
return self.impl.comparator_factory
else:
return type("TDComparator",
(TypeDecorator.Comparator,
self.impl.comparator_factory),
{})
def _gen_dialect_impl(self, dialect):
"""
#todo
"""
adapted = dialect.type_descriptor(self)
if adapted is not self:
return adapted
# otherwise adapt the impl type, link
# to a copy of this TypeDecorator and return
# that.
typedesc = self.load_dialect_impl(dialect).dialect_impl(dialect)
tt = self.copy()
if not isinstance(tt, self.__class__):
raise AssertionError('Type object %s does not properly '
'implement the copy() method, it must '
'return an object of type %s' %
(self, self.__class__))
tt.impl = typedesc
return tt
@property
def _type_affinity(self):
"""
#todo
"""
return self.impl._type_affinity
def _set_parent(self, column):
"""Support SchemaEentTarget"""
super(TypeDecorator, self)._set_parent(column)
if isinstance(self.impl, SchemaEventTarget):
self.impl._set_parent(column)
def _set_parent_with_dispatch(self, parent):
"""Support SchemaEentTarget"""
super(TypeDecorator, self)._set_parent_with_dispatch(parent)
if isinstance(self.impl, SchemaEventTarget):
self.impl._set_parent_with_dispatch(parent)
def type_engine(self, dialect):
"""Return a dialect-specific :class:`.TypeEngine` instance
for this :class:`.TypeDecorator`.
In most cases this returns a dialect-adapted form of
the :class:`.TypeEngine` type represented by ``self.impl``.
Makes usage of :meth:`dialect_impl` but also traverses
into wrapped :class:`.TypeDecorator` instances.
Behavior can be customized here by overriding
:meth:`load_dialect_impl`.
"""
adapted = dialect.type_descriptor(self)
if not isinstance(adapted, type(self)):
return adapted
elif isinstance(self.impl, TypeDecorator):
return self.impl.type_engine(dialect)
else:
return self.load_dialect_impl(dialect)
def load_dialect_impl(self, dialect):
"""Return a :class:`.TypeEngine` object corresponding to a dialect.
This is an end-user override hook that can be used to provide
differing types depending on the given dialect. It is used
by the :class:`.TypeDecorator` implementation of :meth:`type_engine`
to help determine what type should ultimately be returned
for a given :class:`.TypeDecorator`.
By default returns ``self.impl``.
"""
return self.impl
def __getattr__(self, key):
"""Proxy all other undefined accessors to the underlying
implementation."""
return getattr(self.impl, key)
def process_literal_param(self, value, dialect):
"""Receive a literal parameter value to be rendered inline within
a statement.
This method is used when the compiler renders a
literal value without using binds, typically within DDL
such as in the "server default" of a column or an expression
within a CHECK constraint.
The returned string will be rendered into the output string.
.. versionadded:: 0.9.0
"""
raise NotImplementedError()
def process_bind_param(self, value, dialect):
"""Receive a bound parameter value to be converted.
Subclasses override this method to return the
value that should be passed along to the underlying
:class:`.TypeEngine` object, and from there to the
DBAPI ``execute()`` method.
The operation could be anything desired to perform custom
behavior, such as transforming or serializing data.
This could also be used as a hook for validating logic.
This operation should be designed with the reverse operation
in mind, which would be the process_result_value method of
this class.
:param value: Data to operate upon, of any type expected by
this method in the subclass. Can be ``None``.
:param dialect: the :class:`.Dialect` in use.
"""
raise NotImplementedError()
def process_result_value(self, value, dialect):
"""Receive a result-row column value to be converted.
Subclasses should implement this method to operate on data
fetched from the database.
Subclasses override this method to return the
value that should be passed back to the application,
given a value that is already processed by
the underlying :class:`.TypeEngine` object, originally
from the DBAPI cursor method ``fetchone()`` or similar.
The operation could be anything desired to perform custom
behavior, such as transforming or serializing data.
This could also be used as a hook for validating logic.
:param value: Data to operate upon, of any type expected by
this method in the subclass. Can be ``None``.
:param dialect: the :class:`.Dialect` in use.
This operation should be designed to be reversible by
the "process_bind_param" method of this class.
"""
raise NotImplementedError()
@util.memoized_property
def _has_bind_processor(self):
"""memoized boolean, check if process_bind_param is implemented.
Allows the base process_bind_param to raise
NotImplementedError without needing to test an expensive
exception throw.
"""
return self.__class__.process_bind_param.__code__ \
is not TypeDecorator.process_bind_param.__code__
@util.memoized_property
def _has_literal_processor(self):
"""memoized boolean, check if process_literal_param is implemented.
"""
return self.__class__.process_literal_param.__code__ \
is not TypeDecorator.process_literal_param.__code__
def literal_processor(self, dialect):
"""Provide a literal processing function for the given
:class:`.Dialect`.
Subclasses here will typically override
:meth:`.TypeDecorator.process_literal_param` instead of this method
directly.
By default, this method makes use of
:meth:`.TypeDecorator.process_bind_param` if that method is
implemented, where :meth:`.TypeDecorator.process_literal_param` is
not. The rationale here is that :class:`.TypeDecorator` typically
deals with Python conversions of data that are above the layer of
database presentation. With the value converted by
:meth:`.TypeDecorator.process_bind_param`, the underlying type will
then handle whether it needs to be presented to the DBAPI as a bound
parameter or to the database as an inline SQL value.
.. versionadded:: 0.9.0
"""
if self._has_literal_processor:
process_param = self.process_literal_param
elif self._has_bind_processor:
# the bind processor should normally be OK
# for TypeDecorator since it isn't doing DB-level
# handling, the handling here won't be different for bound vs.
# literals.
process_param = self.process_bind_param
else:
process_param = None
if process_param:
impl_processor = self.impl.literal_processor(dialect)
if impl_processor:
def process(value):
return impl_processor(process_param(value, dialect))
else:
def process(value):
return process_param(value, dialect)
return process
else:
return self.impl.literal_processor(dialect)
def bind_processor(self, dialect):
"""Provide a bound value processing function for the
given :class:`.Dialect`.
This is the method that fulfills the :class:`.TypeEngine`
contract for bound value conversion. :class:`.TypeDecorator`
will wrap a user-defined implementation of
:meth:`process_bind_param` here.
User-defined code can override this method directly,
though its likely best to use :meth:`process_bind_param` so that
the processing provided by ``self.impl`` is maintained.
:param dialect: Dialect instance in use.
This method is the reverse counterpart to the
:meth:`result_processor` method of this class.
"""
if self._has_bind_processor:
process_param = self.process_bind_param
impl_processor = self.impl.bind_processor(dialect)
if impl_processor:
def process(value):
return impl_processor(process_param(value, dialect))
else:
def process(value):
return process_param(value, dialect)
return process
else:
return self.impl.bind_processor(dialect)
@util.memoized_property
def _has_result_processor(self):
"""memoized boolean, check if process_result_value is implemented.
Allows the base process_result_value to raise
NotImplementedError without needing to test an expensive
exception throw.
"""
return self.__class__.process_result_value.__code__ \
is not TypeDecorator.process_result_value.__code__
def result_processor(self, dialect, coltype):
"""Provide a result value processing function for the given
:class:`.Dialect`.
This is the method that fulfills the :class:`.TypeEngine`
contract for result value conversion. :class:`.TypeDecorator`
will wrap a user-defined implementation of
:meth:`process_result_value` here.
User-defined code can override this method directly,
though its likely best to use :meth:`process_result_value` so that
the processing provided by ``self.impl`` is maintained.
:param dialect: Dialect instance in use.
:param coltype: A SQLAlchemy data type
This method is the reverse counterpart to the
:meth:`bind_processor` method of this class.
"""
if self._has_result_processor:
process_value = self.process_result_value
impl_processor = self.impl.result_processor(dialect,
coltype)
if impl_processor:
def process(value):
return process_value(impl_processor(value), dialect)
else:
def process(value):
return process_value(value, dialect)
return process
else:
return self.impl.result_processor(dialect, coltype)
def coerce_compared_value(self, op, value):
"""Suggest a type for a 'coerced' Python value in an expression.
By default, returns self. This method is called by
the expression system when an object using this type is
on the left or right side of an expression against a plain Python
object which does not yet have a SQLAlchemy type assigned::
expr = table.c.somecolumn + 35
Where above, if ``somecolumn`` uses this type, this method will
be called with the value ``operator.add``
and ``35``. The return value is whatever SQLAlchemy type should
be used for ``35`` for this particular operation.
"""
return self
def copy(self, **kw):
"""Produce a copy of this :class:`.TypeDecorator` instance.
This is a shallow copy and is provided to fulfill part of
the :class:`.TypeEngine` contract. It usually does not
need to be overridden unless the user-defined :class:`.TypeDecorator`
has local state that should be deep-copied.
"""
instance = self.__class__.__new__(self.__class__)
instance.__dict__.update(self.__dict__)
return instance
def get_dbapi_type(self, dbapi):
"""Return the DBAPI type object represented by this
:class:`.TypeDecorator`.
By default this calls upon :meth:`.TypeEngine.get_dbapi_type` of the
underlying "impl".
"""
return self.impl.get_dbapi_type(dbapi)
def compare_values(self, x, y):
"""Given two values, compare them for equality.
By default this calls upon :meth:`.TypeEngine.compare_values`
of the underlying "impl", which in turn usually
uses the Python equals operator ``==``.
This function is used by the ORM to compare
an original-loaded value with an intercepted
"changed" value, to determine if a net change
has occurred.
"""
return self.impl.compare_values(x, y)
def __repr__(self):
return util.generic_repr(self, to_inspect=self.impl)
class Variant(TypeDecorator):
"""A wrapping type that selects among a variety of
implementations based on dialect in use.
The :class:`.Variant` type is typically constructed
using the :meth:`.TypeEngine.with_variant` method.
.. versionadded:: 0.7.2
.. seealso:: :meth:`.TypeEngine.with_variant` for an example of use.
"""
def __init__(self, base, mapping):
"""Construct a new :class:`.Variant`.
:param base: the base 'fallback' type
:param mapping: dictionary of string dialect names to
:class:`.TypeEngine` instances.
"""
self.impl = base
self.mapping = mapping
def load_dialect_impl(self, dialect):
if dialect.name in self.mapping:
return self.mapping[dialect.name]
else:
return self.impl
def with_variant(self, type_, dialect_name):
"""Return a new :class:`.Variant` which adds the given
type + dialect name to the mapping, in addition to the
mapping present in this :class:`.Variant`.
:param type_: a :class:`.TypeEngine` that will be selected
as a variant from the originating type, when a dialect
of the given name is in use.
:param dialect_name: base name of the dialect which uses
this type. (i.e. ``'postgresql'``, ``'mysql'``, etc.)
"""
if dialect_name in self.mapping:
raise exc.ArgumentError(
"Dialect '%s' is already present in "
"the mapping for this Variant" % dialect_name)
mapping = self.mapping.copy()
mapping[dialect_name] = type_
return Variant(self.impl, mapping)
@property
def comparator_factory(self):
"""express comparison behavior in terms of the base type"""
return self.impl.comparator_factory
def _reconstitute_comparator(expression):
return expression.comparator
def to_instance(typeobj, *arg, **kw):
if typeobj is None:
return NULLTYPE
if util.callable(typeobj):
return typeobj(*arg, **kw)
else:
return typeobj
def adapt_type(typeobj, colspecs):
if isinstance(typeobj, type):
typeobj = typeobj()
for t in typeobj.__class__.__mro__[0:-1]:
try:
impltype = colspecs[t]
break
except KeyError:
pass
else:
# couldn't adapt - so just return the type itself
# (it may be a user-defined type)
return typeobj
# if we adapted the given generic type to a database-specific type,
# but it turns out the originally given "generic" type
# is actually a subclass of our resulting type, then we were already
# given a more specific type than that required; so use that.
if (issubclass(typeobj.__class__, impltype)):
return typeobj
return typeobj.adapt(impltype)
| pcu4dros/pandora-core | workspace/lib/python3.5/site-packages/sqlalchemy/sql/type_api.py | Python | mit | 46,121 | 0.000022 |
# -*- coding: utf-8 -*-
"""
/***************************************************************************
QFieldSync
-------------------
begin : 2020-08-19
git sha : $Format:%H$
copyright : (C) 2020 by OPENGIS.ch
email : [email protected]
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
import shutil
from enum import Enum
from pathlib import Path
from typing import Any, Dict, List, Optional
from PyQt5.QtCore import QUrl
from qgis.PyQt.QtCore import QAbstractListModel, QModelIndex, QObject, Qt, pyqtSignal
from qgis.PyQt.QtNetwork import QNetworkReply
from qfieldsync.core.cloud_api import CloudNetworkAccessManager
from qfieldsync.core.cloud_project import CloudProject, ProjectFile, ProjectFileCheckout
from qfieldsync.libqfieldsync.utils.file_utils import copy_multifile
class CloudTransferrer(QObject):
# TODO show progress of individual files
progress = pyqtSignal(float)
error = pyqtSignal(str, Exception)
abort = pyqtSignal()
finished = pyqtSignal()
upload_progress = pyqtSignal(float)
download_progress = pyqtSignal(float)
upload_finished = pyqtSignal()
download_finished = pyqtSignal()
delete_finished = pyqtSignal()
def __init__(
self, network_manager: CloudNetworkAccessManager, cloud_project: CloudProject
) -> None:
super(CloudTransferrer, self).__init__(parent=None)
assert cloud_project.local_dir
self.network_manager = network_manager
self.cloud_project = cloud_project
# NOTE these `_files_to_(upload|download|delete)` uses POSIX path as keys, so beware on M$
self._files_to_upload = {}
self._files_to_download: Dict[str, ProjectFile] = {}
self._files_to_delete = {}
self.total_upload_bytes = 0
self.total_download_bytes = 0
self.delete_files_finished = 0
self.is_aborted = False
self.is_started = False
self.is_finished = False
self.is_upload_active = False
self.is_download_active = False
self.is_delete_active = False
self.is_project_list_update_active = False
self.replies = []
self.temp_dir = Path(cloud_project.local_dir).joinpath(".qfieldsync")
self.error_message = None
self.throttled_uploader = None
self.throttled_downloader = None
self.throttled_deleter = None
self.transfers_model = None
if self.temp_dir.exists():
shutil.rmtree(self.temp_dir)
self.temp_dir.mkdir()
self.temp_dir.joinpath("backup").mkdir()
self.temp_dir.joinpath(FileTransfer.Type.UPLOAD.value).mkdir()
self.temp_dir.joinpath(FileTransfer.Type.DOWNLOAD.value).mkdir()
self.upload_finished.connect(self._on_upload_finished)
self.delete_finished.connect(self._on_delete_finished)
self.download_finished.connect(self._on_download_finished)
self.network_manager.logout_success.connect(self._on_logout_success)
def sync(
self,
files_to_upload: List[ProjectFile],
files_to_download: List[ProjectFile],
files_to_delete: List[ProjectFile],
) -> None:
assert not self.is_started
self.is_started = True
# .qgs/.qgz files should be uploaded the last, since they trigger a new job
files_to_upload_sorted = [
f
for f in sorted(
files_to_upload,
key=lambda f: f.path.suffix in (".qgs", ".qgz"),
)
]
# prepare the files to be uploaded, copy them in a temporary destination
for project_file in files_to_upload_sorted:
assert project_file.local_path
project_file.flush()
temp_filename = self.temp_dir.joinpath(
FileTransfer.Type.UPLOAD.value, project_file.name
)
temp_filename.parent.mkdir(parents=True, exist_ok=True)
copy_multifile(project_file.local_path, temp_filename)
self.total_upload_bytes += project_file.local_size or 0
self._files_to_upload[str(project_file.path.as_posix())] = project_file
# prepare the files to be delete, both locally and remotely
for project_file in files_to_delete:
self._files_to_delete[str(project_file.path.as_posix())] = project_file
# prepare the files to be downloaded, download them in a temporary destination
for project_file in files_to_download:
temp_filename = self.temp_dir.joinpath(
FileTransfer.Type.DOWNLOAD.value, project_file.name
)
temp_filename.parent.mkdir(parents=True, exist_ok=True)
self.total_download_bytes += project_file.size or 0
self._files_to_download[str(project_file.path.as_posix())] = project_file
self.throttled_uploader = ThrottledFileTransferrer(
self.network_manager,
self.cloud_project,
# note the .qgs/.qgz files are sorted in the end
list(self._files_to_upload.values()),
FileTransfer.Type.UPLOAD,
)
self.throttled_deleter = ThrottledFileTransferrer(
self.network_manager,
self.cloud_project,
list(self._files_to_delete.values()),
FileTransfer.Type.DELETE,
)
self.throttled_downloader = ThrottledFileTransferrer(
self.network_manager,
self.cloud_project,
list(self._files_to_download.values()),
FileTransfer.Type.DOWNLOAD,
)
self.transfers_model = TransferFileLogsModel(
[
self.throttled_uploader,
self.throttled_deleter,
self.throttled_downloader,
]
)
self._make_backup()
self._upload()
def _upload(self) -> None:
assert not self.is_upload_active, "Upload in progress"
assert not self.is_delete_active, "Delete in progress"
assert not self.is_download_active, "Download in progress"
assert self.cloud_project.local_dir
self.is_upload_active = True
# nothing to upload
if len(self._files_to_upload) == 0:
self.upload_progress.emit(1)
self.upload_finished.emit()
# NOTE check _on_upload_finished
return
self.throttled_uploader.error.connect(self._on_throttled_upload_error)
self.throttled_uploader.progress.connect(self._on_throttled_upload_progress)
self.throttled_uploader.finished.connect(self._on_throttled_upload_finished)
self.throttled_uploader.transfer()
def _on_throttled_upload_progress(
self, filename: str, bytes_transferred: int, _bytes_total: int
) -> None:
fraction = min(bytes_transferred / max(self.total_upload_bytes, 1), 1)
self.upload_progress.emit(fraction)
def _on_throttled_upload_error(self, filename: str, error: str) -> None:
self.throttled_uploader.abort()
def _on_throttled_upload_finished(self) -> None:
self.upload_progress.emit(1)
self.upload_finished.emit()
return
def _delete(self) -> None:
assert not self.is_upload_active, "Upload in progress"
assert not self.is_delete_active, "Delete in progress"
assert not self.is_download_active, "Download in progress"
self.is_delete_active = True
# nothing to delete
if len(self._files_to_delete) == 0:
self.delete_finished.emit()
# NOTE check _on_delete_finished
return
self.throttled_deleter.error.connect(self._on_throttled_delete_error)
self.throttled_deleter.finished.connect(self._on_throttled_delete_finished)
self.throttled_deleter.transfer()
# in case all the files to delete were local only
if self.delete_files_finished == len(self._files_to_delete):
self.delete_finished.emit()
def _on_throttled_delete_error(self, filename: str, error: str) -> None:
self.throttled_deleter.abort()
def _on_throttled_delete_finished(self) -> None:
if self.delete_files_finished == len(self._files_to_delete):
self.delete_finished.emit()
def _download(self) -> None:
assert not self.is_upload_active, "Upload in progress"
assert not self.is_delete_active, "Delete in progress"
assert not self.is_download_active, "Download in progress"
self.is_download_active = True
# nothing to download
if len(self._files_to_download) == 0:
self.download_progress.emit(1)
self.download_finished.emit()
return
self.throttled_downloader.error.connect(self._on_throttled_download_error)
self.throttled_downloader.progress.connect(self._on_throttled_download_progress)
self.throttled_downloader.finished.connect(self._on_throttled_download_finished)
self.throttled_downloader.transfer()
def _on_throttled_download_progress(
self, filename: str, bytes_transferred: int, _bytes_total: int
) -> None:
fraction = min(bytes_transferred / max(self.total_download_bytes, 1), 1)
self.download_progress.emit(fraction)
def _on_throttled_download_error(self, filename: str, error: str) -> None:
self.throttled_downloader.abort()
def _on_throttled_download_finished(self) -> None:
self.download_progress.emit(1)
self.download_finished.emit()
return
def _on_upload_finished(self) -> None:
self.is_upload_active = False
self._update_project_files_list()
self._delete()
def _on_delete_finished(self) -> None:
self.is_delete_active = False
self._download()
def _on_download_finished(self) -> None:
if not self.import_qfield_project():
return
self.is_download_active = False
self.is_finished = True
if not self.is_project_list_update_active:
self.finished.emit()
def _update_project_files_list(self) -> None:
self.is_project_list_update_active = True
reply = self.network_manager.projects_cache.get_project_files(
self.cloud_project.id
)
reply.finished.connect(lambda: self._on_update_project_files_list_finished())
def _on_update_project_files_list_finished(self) -> None:
self.is_project_list_update_active = False
if not self.is_download_active:
if self.error_message:
return
self.finished.emit()
def abort_requests(self) -> None:
if self.is_aborted:
return
self.is_aborted = True
for transferrer in [
self.throttled_uploader,
self.throttled_downloader,
self.throttled_deleter,
]:
# it might be deleted
if transferrer:
transferrer.abort()
self.abort.emit()
def _make_backup(self) -> None:
for project_file in [
*list(map(lambda f: f, self._files_to_upload.values())),
*list(map(lambda f: f, self._files_to_download.values())),
]:
if project_file.local_path and project_file.local_path.exists():
dest = self.temp_dir.joinpath("backup", project_file.path)
dest.parent.mkdir(parents=True, exist_ok=True)
copy_multifile(project_file.local_path, dest)
def _temp_dir2main_dir(self, subdir: str) -> None:
subdir_path = self.temp_dir.joinpath(subdir)
if not subdir_path.exists():
raise Exception(
self.tr('Directory "{}" does not exist').format(subdir_path)
)
for filename in subdir_path.glob("**/*"):
if filename.is_dir():
filename.mkdir(parents=True, exist_ok=True)
continue
source_filename = str(filename.relative_to(subdir_path).as_posix())
dest_filename = str(self._files_to_download[source_filename].local_path)
dest_path = Path(dest_filename)
if not dest_path.parent.exists():
dest_path.parent.mkdir(parents=True)
if source_filename.endswith((".gpkg-shm", ".gpkg-wal")):
for suffix in ("-shm", "-wal"):
source_path = Path(str(self.local_path) + suffix)
dest_path = Path(str(dest_filename) + suffix)
if source_path.exists():
shutil.copyfile(source_path, dest_path)
else:
dest_path.unlink()
shutil.copyfile(filename, dest_filename)
def import_qfield_project(self) -> bool:
try:
self._temp_dir2main_dir(
str(self.temp_dir.joinpath(FileTransfer.Type.DOWNLOAD.value))
)
return True
except Exception as err:
self.error_message = self.tr(
"Failed to copy temporary downloaded files to your project directory, restore the project state before the synchronization: {}. Trying to rollback changes..."
).format(str(err))
self.error.emit(
self.error_message,
err,
)
try:
self._temp_dir2main_dir(str(self.temp_dir.joinpath("backup")))
except Exception as errInner:
self.error_message = self.tr(
'Failed to restore the backup. You project might be corrupted! Please check ".qfieldsync/backup" directory and try to copy the files back manually.'
)
self.error.emit(
self.error_message,
errInner,
)
return False
def _on_logout_success(self) -> None:
self.abort_requests()
class FileTransfer(QObject):
progress = pyqtSignal(int, int)
finished = pyqtSignal()
class Type(Enum):
DOWNLOAD = "download"
UPLOAD = "upload"
DELETE = "delete"
def __init__(
self,
network_manager: CloudNetworkAccessManager,
cloud_project: CloudProject,
type: Type,
file: ProjectFile,
destination: Path,
version: str = None,
) -> None:
super(QObject, self).__init__()
self.network_manager = network_manager
self.cloud_project = cloud_project
self.replies: List[QNetworkReply] = []
self.redirects: List[QUrl] = []
self.file = file
self.filename = file.name
# filesystem filename
self.fs_filename = destination
self.fs_filename.parent.mkdir(parents=True, exist_ok=True)
self.error: Optional[Exception] = None
self.bytes_transferred = 0
self.bytes_total = 0
self.is_aborted = False
self.is_local_delete = False
self.is_local_delete_finished = False
self.type = type
self.version = version
if self.file.checkout == ProjectFileCheckout.Local or (
self.file.checkout & ProjectFileCheckout.Cloud
and self.file.checkout & ProjectFileCheckout.Local
):
self.is_local_delete = True
def abort(self):
if not self.is_started:
return
if self.is_finished:
return
self.is_aborted = True
self.last_reply.abort()
def transfer(self) -> None:
if self.type == FileTransfer.Type.DOWNLOAD:
if self.is_redirect:
reply = self.network_manager.get(
self.last_redirect_url, str(self.fs_filename)
)
else:
params = {"version": self.version} if self.version else {}
reply = self.network_manager.cloud_get(
f"files/{self.cloud_project.id}/{self.filename}/",
local_filename=str(self.fs_filename),
params=params,
)
elif self.type == FileTransfer.Type.UPLOAD:
reply = self.network_manager.cloud_upload_files(
"files/" + self.cloud_project.id + "/" + self.filename,
filenames=[str(self.fs_filename)],
)
elif self.type == FileTransfer.Type.DELETE:
if self.is_local_delete:
try:
assert self.file.local_path
Path(self.file.local_path).unlink()
except Exception as err:
self.error = err
finally:
self.is_local_delete_finished = True
self.finished.emit()
return
reply = self.network_manager.delete_file(
self.cloud_project.id + "/" + self.filename + "/"
)
else:
raise NotImplementedError()
self.replies.append(reply)
reply.redirected.connect(lambda *args: self._on_redirected(*args))
reply.downloadProgress.connect(lambda *args: self._on_progress(*args))
reply.uploadProgress.connect(lambda *args: self._on_progress(*args))
reply.finished.connect(lambda *args: self._on_finished(*args))
def _on_progress(self, bytes_transferred: int, bytes_total: int) -> None:
# there are always at least a few bytes to send, so ignore this situation
if bytes_transferred < self.bytes_transferred or bytes_total < self.bytes_total:
return
self.bytes_transferred = bytes_transferred
self.bytes_total = bytes_total
self.progress.emit(bytes_transferred, bytes_total)
def _on_redirected(self, url: QUrl) -> None:
self.redirects.append(url)
self.last_reply.abort()
def _on_finished(self) -> None:
if self.is_redirect:
if self.type == FileTransfer.Type.DOWNLOAD:
self.transfer()
return
else:
raise NotImplementedError("Redirects on upload are not supported")
try:
self.network_manager.handle_response(self.last_reply, False)
if (
self.type == FileTransfer.Type.DOWNLOAD
and not Path(self.fs_filename).is_file()
):
self.error = Exception(
f'Downloaded file "{self.fs_filename}" not found!'
)
except Exception as err:
self.error = err
self.finished.emit()
@property
def last_reply(self) -> QNetworkReply:
if not self.replies:
raise ValueError("There are no replies yet!")
return self.replies[-1]
@property
def last_redirect_url(self) -> QUrl:
if not self.replies:
raise ValueError("There are no redirects!")
return self.redirects[-1]
@property
def is_started(self) -> bool:
return self.is_local_delete_finished or len(self.replies) > 0
@property
def is_finished(self) -> bool:
if self.is_aborted:
return True
if self.is_local_delete_finished:
return True
if not self.replies:
return False
if self.is_redirect:
return False
return self.replies[-1].isFinished()
@property
def is_redirect(self) -> bool:
if not self.replies:
return False
return len(self.replies) == len(self.redirects)
@property
def is_failed(self) -> bool:
if self.is_local_delete and self.error:
return True
if not self.replies:
return False
return self.last_reply.isFinished() and (
self.error is not None or self.last_reply.error() != QNetworkReply.NoError
)
class ThrottledFileTransferrer(QObject):
error = pyqtSignal(str, str)
finished = pyqtSignal()
aborted = pyqtSignal()
file_finished = pyqtSignal(str)
progress = pyqtSignal(str, int, int)
def __init__(
self,
network_manager,
cloud_project,
files: List[ProjectFile],
transfer_type: FileTransfer.Type,
max_parallel_requests: int = 8,
) -> None:
super(QObject, self).__init__()
self.transfers: Dict[str, FileTransfer] = {}
self.network_manager = network_manager
self.cloud_project = cloud_project
self.files = files
self.filenames = [f.name for f in files]
self.max_parallel_requests = max_parallel_requests
self.finished_count = 0
self.temp_dir = Path(cloud_project.local_dir).joinpath(".qfieldsync")
self.transfer_type = transfer_type
for file in self.files:
transfer = FileTransfer(
self.network_manager,
self.cloud_project,
self.transfer_type,
file,
self.temp_dir.joinpath(str(self.transfer_type.value), file.name),
)
transfer.progress.connect(
lambda *args: self._on_transfer_progress(transfer, *args)
)
transfer.finished.connect(
lambda *args: self._on_transfer_finished(transfer, *args)
)
assert file.name not in self.transfers
self.transfers[file.name] = transfer
def transfer(self):
transfers_count = 0
for transfer in self.transfers.values():
if transfer.is_finished:
continue
transfers_count += 1
# skip a started request but still waiting for a response
if not transfer.is_started:
transfer.transfer()
if transfers_count == self.max_parallel_requests:
break
def abort(self) -> None:
for transfer in self.transfers.values():
transfer.abort()
self.aborted.emit()
def _on_transfer_progress(self, transfer, bytes_received: int, bytes_total: int):
bytes_received_sum = sum([t.bytes_transferred for t in self.transfers.values()])
bytes_total_sum = sum([t.bytes_total for t in self.transfers.values()])
self.progress.emit(transfer.filename, bytes_received_sum, bytes_total_sum)
def _on_transfer_finished(self, transfer: FileTransfer) -> None:
self.transfer()
if transfer.error:
if transfer.type == FileTransfer.Type.DOWNLOAD:
msg = self.tr('Downloading file "{}" failed!').format(
transfer.fs_filename
)
elif transfer.type == FileTransfer.Type.UPLOAD:
msg = self.tr('Uploading file "{}" failed!').format(
transfer.fs_filename
)
elif transfer.type == FileTransfer.Type.DELETE:
msg = self.tr('Deleting file "{}" failed!').format(transfer.fs_filename)
else:
raise NotImplementedError()
self.error.emit(
transfer.filename,
msg,
)
self.finished_count += 1
self.file_finished.emit(transfer.filename)
if self.finished_count == len(self.transfers):
self.finished.emit()
return
class TransferFileLogsModel(QAbstractListModel):
def __init__(
self, transferrers: List[ThrottledFileTransferrer], parent: QObject = None
):
super(TransferFileLogsModel, self).__init__()
self.transfers: List[FileTransfer] = []
self.filename_to_index: Dict[str, int] = {}
for transferrer in transferrers:
for filename, transfer in transferrer.transfers.items():
self.filename_to_index[filename] = len(self.transfers)
self.transfers.append(transfer)
transferrer.file_finished.connect(self._on_updated_transfer)
transferrer.error.connect(self._on_updated_transfer)
transferrer.progress.connect(self._on_updated_transfer)
def rowCount(self, parent: QModelIndex) -> int:
return len(self.transfers)
def data(self, index: QModelIndex, role: int) -> Any:
if index.row() < 0 or index.row() >= self.rowCount(QModelIndex()):
return None
if role == Qt.DisplayRole:
return self._data_string(self.transfers[index.row()])
return None
def index(self, row: int, col: int, _index: QModelIndex) -> QModelIndex:
return self.createIndex(row, col)
def _data_string(self, transfer: FileTransfer) -> str:
error_msg = ""
if transfer.is_failed:
error_msg = (
str(transfer.error)
if transfer.error
else "[{}] {}".format(
transfer.last_reply.error(), transfer.last_reply.errorString()
)
)
if transfer.type == FileTransfer.Type.DOWNLOAD:
if transfer.is_aborted:
return self.tr('Aborted "{}" download'.format(transfer.filename))
elif transfer.is_failed:
return self.tr(
'Failed to download "{}": {}'.format(transfer.filename, error_msg)
)
elif transfer.is_finished:
return self.tr('Downloaded "{}"'.format(transfer.filename))
elif transfer.is_started:
percentage = (
transfer.bytes_transferred / transfer.bytes_total
if transfer.bytes_total > 0
else 0
)
return self.tr(
'Downloading "{}" {}%'.format(
transfer.filename, round(percentage * 100)
)
)
else:
return self.tr('File to download "{}"'.format(transfer.filename))
elif transfer.type == FileTransfer.Type.UPLOAD:
if transfer.is_aborted:
return self.tr('Aborted "{}" upload'.format(transfer.filename))
elif transfer.is_failed:
return self.tr(
'Failed to upload "{}": {}'.format(transfer.filename, error_msg)
)
elif transfer.is_finished:
return self.tr('Uploaded "{}"'.format(transfer.filename))
elif transfer.is_started:
percentage = (
transfer.bytes_transferred / transfer.bytes_total
if transfer.bytes_total > 0
else 0
)
return self.tr(
'Uploading "{}" {}%'.format(
transfer.filename, round(percentage * 100)
)
)
else:
return self.tr('File to upload "{}"'.format(transfer.filename))
elif transfer.type == FileTransfer.Type.DELETE:
if transfer.file.checkout & ProjectFileCheckout.Cloud:
if transfer.is_aborted:
return self.tr(
'Aborted "{}" deleting one the cloud'.format(transfer.filename)
)
elif transfer.is_failed:
return self.tr(
'Failed delete "{}" on the cloud'.format(transfer.filename)
)
elif transfer.is_finished:
return self.tr(
'File "{}" deleted on the cloud'.format(transfer.filename)
)
elif transfer.is_started:
return self.tr(
'Deleting "{}" on the cloud'.format(transfer.filename)
)
else:
return self.tr(
'File "{}" will be deleted on the cloud'.format(
transfer.filename
)
)
else:
if transfer.is_aborted:
return self.tr(
'Aborted "{}" deleting locally'.format(transfer.filename)
)
elif transfer.is_failed:
return self.tr(
'Failed delete "{}" locally'.format(transfer.filename)
)
elif transfer.is_finished:
return self.tr(
'File "{}" locally deleted'.format(transfer.filename)
)
elif transfer.is_started:
return self.tr('Locally deleting "{}"'.format(transfer.filename))
else:
return self.tr(
'File "{}" to will be locally deleted'.format(transfer.filename)
)
else:
raise NotImplementedError("Unknown transfer type")
def _on_updated_transfer(self, filename, *args) -> None:
row = self.filename_to_index[filename]
index = self.createIndex(row, 0)
self.dataChanged.emit(index, index, [Qt.DisplayRole])
| opengisch/QFieldSync | qfieldsync/core/cloud_transferrer.py | Python | lgpl-3.0 | 29,804 | 0.001409 |
# Copyright (C) 2014 Andrey Antukh <[email protected]>
# Copyright (C) 2014 Jesús Espino <[email protected]>
# Copyright (C) 2014 David Barragán <[email protected]>
# Copyright (C) 2014 Anler Hernández <[email protected]>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import pytest
from unittest import mock
import functools
class Object:
pass
@pytest.fixture
def object():
return Object()
class PartialMethodCaller:
def __init__(self, obj, **partial_params):
self.obj = obj
self.partial_params = partial_params
def __getattr__(self, name):
return functools.partial(getattr(self.obj, name), **self.partial_params)
@pytest.fixture
def client():
from django.test.client import Client
class _Client(Client):
def login(self, user=None, backend="django.contrib.auth.backends.ModelBackend", **credentials):
if user is None:
return super().login(**credentials)
with mock.patch('django.contrib.auth.authenticate') as authenticate:
user.backend = backend
authenticate.return_value = user
return super().login(**credentials)
@property
def json(self):
return PartialMethodCaller(obj=self, content_type='application/json;charset="utf-8"')
return _Client()
@pytest.fixture
def outbox():
from django.core import mail
return mail.outbox
| CoolCloud/taiga-back | tests/fixtures.py | Python | agpl-3.0 | 2,031 | 0.001972 |
""" Python Character Mapping Codec generated from 'VENDORS/APPLE/ARABIC.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_map)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='mac-arabic',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Map
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({
0x0080: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS
0x0081: 0x00a0, # NO-BREAK SPACE, right-left
0x0082: 0x00c7, # LATIN CAPITAL LETTER C WITH CEDILLA
0x0083: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE
0x0084: 0x00d1, # LATIN CAPITAL LETTER N WITH TILDE
0x0085: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS
0x0086: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x0087: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE
0x0088: 0x00e0, # LATIN SMALL LETTER A WITH GRAVE
0x0089: 0x00e2, # LATIN SMALL LETTER A WITH CIRCUMFLEX
0x008a: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS
0x008b: 0x06ba, # ARABIC LETTER NOON GHUNNA
0x008c: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK, right-left
0x008d: 0x00e7, # LATIN SMALL LETTER C WITH CEDILLA
0x008e: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE
0x008f: 0x00e8, # LATIN SMALL LETTER E WITH GRAVE
0x0090: 0x00ea, # LATIN SMALL LETTER E WITH CIRCUMFLEX
0x0091: 0x00eb, # LATIN SMALL LETTER E WITH DIAERESIS
0x0092: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE
0x0093: 0x2026, # HORIZONTAL ELLIPSIS, right-left
0x0094: 0x00ee, # LATIN SMALL LETTER I WITH CIRCUMFLEX
0x0095: 0x00ef, # LATIN SMALL LETTER I WITH DIAERESIS
0x0096: 0x00f1, # LATIN SMALL LETTER N WITH TILDE
0x0097: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE
0x0098: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK, right-left
0x0099: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX
0x009a: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS
0x009b: 0x00f7, # DIVISION SIGN, right-left
0x009c: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE
0x009d: 0x00f9, # LATIN SMALL LETTER U WITH GRAVE
0x009e: 0x00fb, # LATIN SMALL LETTER U WITH CIRCUMFLEX
0x009f: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS
0x00a0: 0x0020, # SPACE, right-left
0x00a1: 0x0021, # EXCLAMATION MARK, right-left
0x00a2: 0x0022, # QUOTATION MARK, right-left
0x00a3: 0x0023, # NUMBER SIGN, right-left
0x00a4: 0x0024, # DOLLAR SIGN, right-left
0x00a5: 0x066a, # ARABIC PERCENT SIGN
0x00a6: 0x0026, # AMPERSAND, right-left
0x00a7: 0x0027, # APOSTROPHE, right-left
0x00a8: 0x0028, # LEFT PARENTHESIS, right-left
0x00a9: 0x0029, # RIGHT PARENTHESIS, right-left
0x00aa: 0x002a, # ASTERISK, right-left
0x00ab: 0x002b, # PLUS SIGN, right-left
0x00ac: 0x060c, # ARABIC COMMA
0x00ad: 0x002d, # HYPHEN-MINUS, right-left
0x00ae: 0x002e, # FULL STOP, right-left
0x00af: 0x002f, # SOLIDUS, right-left
0x00b0: 0x0660, # ARABIC-INDIC DIGIT ZERO, right-left (need override)
0x00b1: 0x0661, # ARABIC-INDIC DIGIT ONE, right-left (need override)
0x00b2: 0x0662, # ARABIC-INDIC DIGIT TWO, right-left (need override)
0x00b3: 0x0663, # ARABIC-INDIC DIGIT THREE, right-left (need override)
0x00b4: 0x0664, # ARABIC-INDIC DIGIT FOUR, right-left (need override)
0x00b5: 0x0665, # ARABIC-INDIC DIGIT FIVE, right-left (need override)
0x00b6: 0x0666, # ARABIC-INDIC DIGIT SIX, right-left (need override)
0x00b7: 0x0667, # ARABIC-INDIC DIGIT SEVEN, right-left (need override)
0x00b8: 0x0668, # ARABIC-INDIC DIGIT EIGHT, right-left (need override)
0x00b9: 0x0669, # ARABIC-INDIC DIGIT NINE, right-left (need override)
0x00ba: 0x003a, # COLON, right-left
0x00bb: 0x061b, # ARABIC SEMICOLON
0x00bc: 0x003c, # LESS-THAN SIGN, right-left
0x00bd: 0x003d, # EQUALS SIGN, right-left
0x00be: 0x003e, # GREATER-THAN SIGN, right-left
0x00bf: 0x061f, # ARABIC QUESTION MARK
0x00c0: 0x274a, # EIGHT TEARDROP-SPOKED PROPELLER ASTERISK, right-left
0x00c1: 0x0621, # ARABIC LETTER HAMZA
0x00c2: 0x0622, # ARABIC LETTER ALEF WITH MADDA ABOVE
0x00c3: 0x0623, # ARABIC LETTER ALEF WITH HAMZA ABOVE
0x00c4: 0x0624, # ARABIC LETTER WAW WITH HAMZA ABOVE
0x00c5: 0x0625, # ARABIC LETTER ALEF WITH HAMZA BELOW
0x00c6: 0x0626, # ARABIC LETTER YEH WITH HAMZA ABOVE
0x00c7: 0x0627, # ARABIC LETTER ALEF
0x00c8: 0x0628, # ARABIC LETTER BEH
0x00c9: 0x0629, # ARABIC LETTER TEH MARBUTA
0x00ca: 0x062a, # ARABIC LETTER TEH
0x00cb: 0x062b, # ARABIC LETTER THEH
0x00cc: 0x062c, # ARABIC LETTER JEEM
0x00cd: 0x062d, # ARABIC LETTER HAH
0x00ce: 0x062e, # ARABIC LETTER KHAH
0x00cf: 0x062f, # ARABIC LETTER DAL
0x00d0: 0x0630, # ARABIC LETTER THAL
0x00d1: 0x0631, # ARABIC LETTER REH
0x00d2: 0x0632, # ARABIC LETTER ZAIN
0x00d3: 0x0633, # ARABIC LETTER SEEN
0x00d4: 0x0634, # ARABIC LETTER SHEEN
0x00d5: 0x0635, # ARABIC LETTER SAD
0x00d6: 0x0636, # ARABIC LETTER DAD
0x00d7: 0x0637, # ARABIC LETTER TAH
0x00d8: 0x0638, # ARABIC LETTER ZAH
0x00d9: 0x0639, # ARABIC LETTER AIN
0x00da: 0x063a, # ARABIC LETTER GHAIN
0x00db: 0x005b, # LEFT SQUARE BRACKET, right-left
0x00dc: 0x005c, # REVERSE SOLIDUS, right-left
0x00dd: 0x005d, # RIGHT SQUARE BRACKET, right-left
0x00de: 0x005e, # CIRCUMFLEX ACCENT, right-left
0x00df: 0x005f, # LOW LINE, right-left
0x00e0: 0x0640, # ARABIC TATWEEL
0x00e1: 0x0641, # ARABIC LETTER FEH
0x00e2: 0x0642, # ARABIC LETTER QAF
0x00e3: 0x0643, # ARABIC LETTER KAF
0x00e4: 0x0644, # ARABIC LETTER LAM
0x00e5: 0x0645, # ARABIC LETTER MEEM
0x00e6: 0x0646, # ARABIC LETTER NOON
0x00e7: 0x0647, # ARABIC LETTER HEH
0x00e8: 0x0648, # ARABIC LETTER WAW
0x00e9: 0x0649, # ARABIC LETTER ALEF MAKSURA
0x00ea: 0x064a, # ARABIC LETTER YEH
0x00eb: 0x064b, # ARABIC FATHATAN
0x00ec: 0x064c, # ARABIC DAMMATAN
0x00ed: 0x064d, # ARABIC KASRATAN
0x00ee: 0x064e, # ARABIC FATHA
0x00ef: 0x064f, # ARABIC DAMMA
0x00f0: 0x0650, # ARABIC KASRA
0x00f1: 0x0651, # ARABIC SHADDA
0x00f2: 0x0652, # ARABIC SUKUN
0x00f3: 0x067e, # ARABIC LETTER PEH
0x00f4: 0x0679, # ARABIC LETTER TTEH
0x00f5: 0x0686, # ARABIC LETTER TCHEH
0x00f6: 0x06d5, # ARABIC LETTER AE
0x00f7: 0x06a4, # ARABIC LETTER VEH
0x00f8: 0x06af, # ARABIC LETTER GAF
0x00f9: 0x0688, # ARABIC LETTER DDAL
0x00fa: 0x0691, # ARABIC LETTER RREH
0x00fb: 0x007b, # LEFT CURLY BRACKET, right-left
0x00fc: 0x007c, # VERTICAL LINE, right-left
0x00fd: 0x007d, # RIGHT CURLY BRACKET, right-left
0x00fe: 0x0698, # ARABIC LETTER JEH
0x00ff: 0x06d2, # ARABIC LETTER YEH BARREE
})
### Decoding Table
decoding_table = (
'\x00' # 0x0000 -> CONTROL CHARACTER
'\x01' # 0x0001 -> CONTROL CHARACTER
'\x02' # 0x0002 -> CONTROL CHARACTER
'\x03' # 0x0003 -> CONTROL CHARACTER
'\x04' # 0x0004 -> CONTROL CHARACTER
'\x05' # 0x0005 -> CONTROL CHARACTER
'\x06' # 0x0006 -> CONTROL CHARACTER
'\x07' # 0x0007 -> CONTROL CHARACTER
'\x08' # 0x0008 -> CONTROL CHARACTER
'\t' # 0x0009 -> CONTROL CHARACTER
'\n' # 0x000a -> CONTROL CHARACTER
'\x0b' # 0x000b -> CONTROL CHARACTER
'\x0c' # 0x000c -> CONTROL CHARACTER
'\r' # 0x000d -> CONTROL CHARACTER
'\x0e' # 0x000e -> CONTROL CHARACTER
'\x0f' # 0x000f -> CONTROL CHARACTER
'\x10' # 0x0010 -> CONTROL CHARACTER
'\x11' # 0x0011 -> CONTROL CHARACTER
'\x12' # 0x0012 -> CONTROL CHARACTER
'\x13' # 0x0013 -> CONTROL CHARACTER
'\x14' # 0x0014 -> CONTROL CHARACTER
'\x15' # 0x0015 -> CONTROL CHARACTER
'\x16' # 0x0016 -> CONTROL CHARACTER
'\x17' # 0x0017 -> CONTROL CHARACTER
'\x18' # 0x0018 -> CONTROL CHARACTER
'\x19' # 0x0019 -> CONTROL CHARACTER
'\x1a' # 0x001a -> CONTROL CHARACTER
'\x1b' # 0x001b -> CONTROL CHARACTER
'\x1c' # 0x001c -> CONTROL CHARACTER
'\x1d' # 0x001d -> CONTROL CHARACTER
'\x1e' # 0x001e -> CONTROL CHARACTER
'\x1f' # 0x001f -> CONTROL CHARACTER
' ' # 0x0020 -> SPACE, left-right
'!' # 0x0021 -> EXCLAMATION MARK, left-right
'"' # 0x0022 -> QUOTATION MARK, left-right
'#' # 0x0023 -> NUMBER SIGN, left-right
'$' # 0x0024 -> DOLLAR SIGN, left-right
'%' # 0x0025 -> PERCENT SIGN, left-right
'&' # 0x0026 -> AMPERSAND, left-right
"'" # 0x0027 -> APOSTROPHE, left-right
'(' # 0x0028 -> LEFT PARENTHESIS, left-right
')' # 0x0029 -> RIGHT PARENTHESIS, left-right
'*' # 0x002a -> ASTERISK, left-right
'+' # 0x002b -> PLUS SIGN, left-right
',' # 0x002c -> COMMA, left-right; in Arabic-script context, displayed as 0x066C ARABIC THOUSANDS SEPARATOR
'-' # 0x002d -> HYPHEN-MINUS, left-right
'.' # 0x002e -> FULL STOP, left-right; in Arabic-script context, displayed as 0x066B ARABIC DECIMAL SEPARATOR
'/' # 0x002f -> SOLIDUS, left-right
'0' # 0x0030 -> DIGIT ZERO; in Arabic-script context, displayed as 0x0660 ARABIC-INDIC DIGIT ZERO
'1' # 0x0031 -> DIGIT ONE; in Arabic-script context, displayed as 0x0661 ARABIC-INDIC DIGIT ONE
'2' # 0x0032 -> DIGIT TWO; in Arabic-script context, displayed as 0x0662 ARABIC-INDIC DIGIT TWO
'3' # 0x0033 -> DIGIT THREE; in Arabic-script context, displayed as 0x0663 ARABIC-INDIC DIGIT THREE
'4' # 0x0034 -> DIGIT FOUR; in Arabic-script context, displayed as 0x0664 ARABIC-INDIC DIGIT FOUR
'5' # 0x0035 -> DIGIT FIVE; in Arabic-script context, displayed as 0x0665 ARABIC-INDIC DIGIT FIVE
'6' # 0x0036 -> DIGIT SIX; in Arabic-script context, displayed as 0x0666 ARABIC-INDIC DIGIT SIX
'7' # 0x0037 -> DIGIT SEVEN; in Arabic-script context, displayed as 0x0667 ARABIC-INDIC DIGIT SEVEN
'8' # 0x0038 -> DIGIT EIGHT; in Arabic-script context, displayed as 0x0668 ARABIC-INDIC DIGIT EIGHT
'9' # 0x0039 -> DIGIT NINE; in Arabic-script context, displayed as 0x0669 ARABIC-INDIC DIGIT NINE
':' # 0x003a -> COLON, left-right
';' # 0x003b -> SEMICOLON, left-right
'<' # 0x003c -> LESS-THAN SIGN, left-right
'=' # 0x003d -> EQUALS SIGN, left-right
'>' # 0x003e -> GREATER-THAN SIGN, left-right
'?' # 0x003f -> QUESTION MARK, left-right
'@' # 0x0040 -> COMMERCIAL AT
'A' # 0x0041 -> LATIN CAPITAL LETTER A
'B' # 0x0042 -> LATIN CAPITAL LETTER B
'C' # 0x0043 -> LATIN CAPITAL LETTER C
'D' # 0x0044 -> LATIN CAPITAL LETTER D
'E' # 0x0045 -> LATIN CAPITAL LETTER E
'F' # 0x0046 -> LATIN CAPITAL LETTER F
'G' # 0x0047 -> LATIN CAPITAL LETTER G
'H' # 0x0048 -> LATIN CAPITAL LETTER H
'I' # 0x0049 -> LATIN CAPITAL LETTER I
'J' # 0x004a -> LATIN CAPITAL LETTER J
'K' # 0x004b -> LATIN CAPITAL LETTER K
'L' # 0x004c -> LATIN CAPITAL LETTER L
'M' # 0x004d -> LATIN CAPITAL LETTER M
'N' # 0x004e -> LATIN CAPITAL LETTER N
'O' # 0x004f -> LATIN CAPITAL LETTER O
'P' # 0x0050 -> LATIN CAPITAL LETTER P
'Q' # 0x0051 -> LATIN CAPITAL LETTER Q
'R' # 0x0052 -> LATIN CAPITAL LETTER R
'S' # 0x0053 -> LATIN CAPITAL LETTER S
'T' # 0x0054 -> LATIN CAPITAL LETTER T
'U' # 0x0055 -> LATIN CAPITAL LETTER U
'V' # 0x0056 -> LATIN CAPITAL LETTER V
'W' # 0x0057 -> LATIN CAPITAL LETTER W
'X' # 0x0058 -> LATIN CAPITAL LETTER X
'Y' # 0x0059 -> LATIN CAPITAL LETTER Y
'Z' # 0x005a -> LATIN CAPITAL LETTER Z
'[' # 0x005b -> LEFT SQUARE BRACKET, left-right
'\\' # 0x005c -> REVERSE SOLIDUS, left-right
']' # 0x005d -> RIGHT SQUARE BRACKET, left-right
'^' # 0x005e -> CIRCUMFLEX ACCENT, left-right
'_' # 0x005f -> LOW LINE, left-right
'`' # 0x0060 -> GRAVE ACCENT
'a' # 0x0061 -> LATIN SMALL LETTER A
'b' # 0x0062 -> LATIN SMALL LETTER B
'c' # 0x0063 -> LATIN SMALL LETTER C
'd' # 0x0064 -> LATIN SMALL LETTER D
'e' # 0x0065 -> LATIN SMALL LETTER E
'f' # 0x0066 -> LATIN SMALL LETTER F
'g' # 0x0067 -> LATIN SMALL LETTER G
'h' # 0x0068 -> LATIN SMALL LETTER H
'i' # 0x0069 -> LATIN SMALL LETTER I
'j' # 0x006a -> LATIN SMALL LETTER J
'k' # 0x006b -> LATIN SMALL LETTER K
'l' # 0x006c -> LATIN SMALL LETTER L
'm' # 0x006d -> LATIN SMALL LETTER M
'n' # 0x006e -> LATIN SMALL LETTER N
'o' # 0x006f -> LATIN SMALL LETTER O
'p' # 0x0070 -> LATIN SMALL LETTER P
'q' # 0x0071 -> LATIN SMALL LETTER Q
'r' # 0x0072 -> LATIN SMALL LETTER R
's' # 0x0073 -> LATIN SMALL LETTER S
't' # 0x0074 -> LATIN SMALL LETTER T
'u' # 0x0075 -> LATIN SMALL LETTER U
'v' # 0x0076 -> LATIN SMALL LETTER V
'w' # 0x0077 -> LATIN SMALL LETTER W
'x' # 0x0078 -> LATIN SMALL LETTER X
'y' # 0x0079 -> LATIN SMALL LETTER Y
'z' # 0x007a -> LATIN SMALL LETTER Z
'{' # 0x007b -> LEFT CURLY BRACKET, left-right
'|' # 0x007c -> VERTICAL LINE, left-right
'}' # 0x007d -> RIGHT CURLY BRACKET, left-right
'~' # 0x007e -> TILDE
'\x7f' # 0x007f -> CONTROL CHARACTER
'\xc4' # 0x0080 -> LATIN CAPITAL LETTER A WITH DIAERESIS
'\xa0' # 0x0081 -> NO-BREAK SPACE, right-left
'\xc7' # 0x0082 -> LATIN CAPITAL LETTER C WITH CEDILLA
'\xc9' # 0x0083 -> LATIN CAPITAL LETTER E WITH ACUTE
'\xd1' # 0x0084 -> LATIN CAPITAL LETTER N WITH TILDE
'\xd6' # 0x0085 -> LATIN CAPITAL LETTER O WITH DIAERESIS
'\xdc' # 0x0086 -> LATIN CAPITAL LETTER U WITH DIAERESIS
'\xe1' # 0x0087 -> LATIN SMALL LETTER A WITH ACUTE
'\xe0' # 0x0088 -> LATIN SMALL LETTER A WITH GRAVE
'\xe2' # 0x0089 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
'\xe4' # 0x008a -> LATIN SMALL LETTER A WITH DIAERESIS
'\u06ba' # 0x008b -> ARABIC LETTER NOON GHUNNA
'\xab' # 0x008c -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK, right-left
'\xe7' # 0x008d -> LATIN SMALL LETTER C WITH CEDILLA
'\xe9' # 0x008e -> LATIN SMALL LETTER E WITH ACUTE
'\xe8' # 0x008f -> LATIN SMALL LETTER E WITH GRAVE
'\xea' # 0x0090 -> LATIN SMALL LETTER E WITH CIRCUMFLEX
'\xeb' # 0x0091 -> LATIN SMALL LETTER E WITH DIAERESIS
'\xed' # 0x0092 -> LATIN SMALL LETTER I WITH ACUTE
'\u2026' # 0x0093 -> HORIZONTAL ELLIPSIS, right-left
'\xee' # 0x0094 -> LATIN SMALL LETTER I WITH CIRCUMFLEX
'\xef' # 0x0095 -> LATIN SMALL LETTER I WITH DIAERESIS
'\xf1' # 0x0096 -> LATIN SMALL LETTER N WITH TILDE
'\xf3' # 0x0097 -> LATIN SMALL LETTER O WITH ACUTE
'\xbb' # 0x0098 -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK, right-left
'\xf4' # 0x0099 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
'\xf6' # 0x009a -> LATIN SMALL LETTER O WITH DIAERESIS
'\xf7' # 0x009b -> DIVISION SIGN, right-left
'\xfa' # 0x009c -> LATIN SMALL LETTER U WITH ACUTE
'\xf9' # 0x009d -> LATIN SMALL LETTER U WITH GRAVE
'\xfb' # 0x009e -> LATIN SMALL LETTER U WITH CIRCUMFLEX
'\xfc' # 0x009f -> LATIN SMALL LETTER U WITH DIAERESIS
' ' # 0x00a0 -> SPACE, right-left
'!' # 0x00a1 -> EXCLAMATION MARK, right-left
'"' # 0x00a2 -> QUOTATION MARK, right-left
'#' # 0x00a3 -> NUMBER SIGN, right-left
'$' # 0x00a4 -> DOLLAR SIGN, right-left
'\u066a' # 0x00a5 -> ARABIC PERCENT SIGN
'&' # 0x00a6 -> AMPERSAND, right-left
"'" # 0x00a7 -> APOSTROPHE, right-left
'(' # 0x00a8 -> LEFT PARENTHESIS, right-left
')' # 0x00a9 -> RIGHT PARENTHESIS, right-left
'*' # 0x00aa -> ASTERISK, right-left
'+' # 0x00ab -> PLUS SIGN, right-left
'\u060c' # 0x00ac -> ARABIC COMMA
'-' # 0x00ad -> HYPHEN-MINUS, right-left
'.' # 0x00ae -> FULL STOP, right-left
'/' # 0x00af -> SOLIDUS, right-left
'\u0660' # 0x00b0 -> ARABIC-INDIC DIGIT ZERO, right-left (need override)
'\u0661' # 0x00b1 -> ARABIC-INDIC DIGIT ONE, right-left (need override)
'\u0662' # 0x00b2 -> ARABIC-INDIC DIGIT TWO, right-left (need override)
'\u0663' # 0x00b3 -> ARABIC-INDIC DIGIT THREE, right-left (need override)
'\u0664' # 0x00b4 -> ARABIC-INDIC DIGIT FOUR, right-left (need override)
'\u0665' # 0x00b5 -> ARABIC-INDIC DIGIT FIVE, right-left (need override)
'\u0666' # 0x00b6 -> ARABIC-INDIC DIGIT SIX, right-left (need override)
'\u0667' # 0x00b7 -> ARABIC-INDIC DIGIT SEVEN, right-left (need override)
'\u0668' # 0x00b8 -> ARABIC-INDIC DIGIT EIGHT, right-left (need override)
'\u0669' # 0x00b9 -> ARABIC-INDIC DIGIT NINE, right-left (need override)
':' # 0x00ba -> COLON, right-left
'\u061b' # 0x00bb -> ARABIC SEMICOLON
'<' # 0x00bc -> LESS-THAN SIGN, right-left
'=' # 0x00bd -> EQUALS SIGN, right-left
'>' # 0x00be -> GREATER-THAN SIGN, right-left
'\u061f' # 0x00bf -> ARABIC QUESTION MARK
'\u274a' # 0x00c0 -> EIGHT TEARDROP-SPOKED PROPELLER ASTERISK, right-left
'\u0621' # 0x00c1 -> ARABIC LETTER HAMZA
'\u0622' # 0x00c2 -> ARABIC LETTER ALEF WITH MADDA ABOVE
'\u0623' # 0x00c3 -> ARABIC LETTER ALEF WITH HAMZA ABOVE
'\u0624' # 0x00c4 -> ARABIC LETTER WAW WITH HAMZA ABOVE
'\u0625' # 0x00c5 -> ARABIC LETTER ALEF WITH HAMZA BELOW
'\u0626' # 0x00c6 -> ARABIC LETTER YEH WITH HAMZA ABOVE
'\u0627' # 0x00c7 -> ARABIC LETTER ALEF
'\u0628' # 0x00c8 -> ARABIC LETTER BEH
'\u0629' # 0x00c9 -> ARABIC LETTER TEH MARBUTA
'\u062a' # 0x00ca -> ARABIC LETTER TEH
'\u062b' # 0x00cb -> ARABIC LETTER THEH
'\u062c' # 0x00cc -> ARABIC LETTER JEEM
'\u062d' # 0x00cd -> ARABIC LETTER HAH
'\u062e' # 0x00ce -> ARABIC LETTER KHAH
'\u062f' # 0x00cf -> ARABIC LETTER DAL
'\u0630' # 0x00d0 -> ARABIC LETTER THAL
'\u0631' # 0x00d1 -> ARABIC LETTER REH
'\u0632' # 0x00d2 -> ARABIC LETTER ZAIN
'\u0633' # 0x00d3 -> ARABIC LETTER SEEN
'\u0634' # 0x00d4 -> ARABIC LETTER SHEEN
'\u0635' # 0x00d5 -> ARABIC LETTER SAD
'\u0636' # 0x00d6 -> ARABIC LETTER DAD
'\u0637' # 0x00d7 -> ARABIC LETTER TAH
'\u0638' # 0x00d8 -> ARABIC LETTER ZAH
'\u0639' # 0x00d9 -> ARABIC LETTER AIN
'\u063a' # 0x00da -> ARABIC LETTER GHAIN
'[' # 0x00db -> LEFT SQUARE BRACKET, right-left
'\\' # 0x00dc -> REVERSE SOLIDUS, right-left
']' # 0x00dd -> RIGHT SQUARE BRACKET, right-left
'^' # 0x00de -> CIRCUMFLEX ACCENT, right-left
'_' # 0x00df -> LOW LINE, right-left
'\u0640' # 0x00e0 -> ARABIC TATWEEL
'\u0641' # 0x00e1 -> ARABIC LETTER FEH
'\u0642' # 0x00e2 -> ARABIC LETTER QAF
'\u0643' # 0x00e3 -> ARABIC LETTER KAF
'\u0644' # 0x00e4 -> ARABIC LETTER LAM
'\u0645' # 0x00e5 -> ARABIC LETTER MEEM
'\u0646' # 0x00e6 -> ARABIC LETTER NOON
'\u0647' # 0x00e7 -> ARABIC LETTER HEH
'\u0648' # 0x00e8 -> ARABIC LETTER WAW
'\u0649' # 0x00e9 -> ARABIC LETTER ALEF MAKSURA
'\u064a' # 0x00ea -> ARABIC LETTER YEH
'\u064b' # 0x00eb -> ARABIC FATHATAN
'\u064c' # 0x00ec -> ARABIC DAMMATAN
'\u064d' # 0x00ed -> ARABIC KASRATAN
'\u064e' # 0x00ee -> ARABIC FATHA
'\u064f' # 0x00ef -> ARABIC DAMMA
'\u0650' # 0x00f0 -> ARABIC KASRA
'\u0651' # 0x00f1 -> ARABIC SHADDA
'\u0652' # 0x00f2 -> ARABIC SUKUN
'\u067e' # 0x00f3 -> ARABIC LETTER PEH
'\u0679' # 0x00f4 -> ARABIC LETTER TTEH
'\u0686' # 0x00f5 -> ARABIC LETTER TCHEH
'\u06d5' # 0x00f6 -> ARABIC LETTER AE
'\u06a4' # 0x00f7 -> ARABIC LETTER VEH
'\u06af' # 0x00f8 -> ARABIC LETTER GAF
'\u0688' # 0x00f9 -> ARABIC LETTER DDAL
'\u0691' # 0x00fa -> ARABIC LETTER RREH
'{' # 0x00fb -> LEFT CURLY BRACKET, right-left
'|' # 0x00fc -> VERTICAL LINE, right-left
'}' # 0x00fd -> RIGHT CURLY BRACKET, right-left
'\u0698' # 0x00fe -> ARABIC LETTER JEH
'\u06d2' # 0x00ff -> ARABIC LETTER YEH BARREE
)
### Encoding Map
encoding_map = {
0x0000: 0x0000, # CONTROL CHARACTER
0x0001: 0x0001, # CONTROL CHARACTER
0x0002: 0x0002, # CONTROL CHARACTER
0x0003: 0x0003, # CONTROL CHARACTER
0x0004: 0x0004, # CONTROL CHARACTER
0x0005: 0x0005, # CONTROL CHARACTER
0x0006: 0x0006, # CONTROL CHARACTER
0x0007: 0x0007, # CONTROL CHARACTER
0x0008: 0x0008, # CONTROL CHARACTER
0x0009: 0x0009, # CONTROL CHARACTER
0x000a: 0x000a, # CONTROL CHARACTER
0x000b: 0x000b, # CONTROL CHARACTER
0x000c: 0x000c, # CONTROL CHARACTER
0x000d: 0x000d, # CONTROL CHARACTER
0x000e: 0x000e, # CONTROL CHARACTER
0x000f: 0x000f, # CONTROL CHARACTER
0x0010: 0x0010, # CONTROL CHARACTER
0x0011: 0x0011, # CONTROL CHARACTER
0x0012: 0x0012, # CONTROL CHARACTER
0x0013: 0x0013, # CONTROL CHARACTER
0x0014: 0x0014, # CONTROL CHARACTER
0x0015: 0x0015, # CONTROL CHARACTER
0x0016: 0x0016, # CONTROL CHARACTER
0x0017: 0x0017, # CONTROL CHARACTER
0x0018: 0x0018, # CONTROL CHARACTER
0x0019: 0x0019, # CONTROL CHARACTER
0x001a: 0x001a, # CONTROL CHARACTER
0x001b: 0x001b, # CONTROL CHARACTER
0x001c: 0x001c, # CONTROL CHARACTER
0x001d: 0x001d, # CONTROL CHARACTER
0x001e: 0x001e, # CONTROL CHARACTER
0x001f: 0x001f, # CONTROL CHARACTER
0x0020: 0x0020, # SPACE, left-right
0x0020: 0x00a0, # SPACE, right-left
0x0021: 0x0021, # EXCLAMATION MARK, left-right
0x0021: 0x00a1, # EXCLAMATION MARK, right-left
0x0022: 0x0022, # QUOTATION MARK, left-right
0x0022: 0x00a2, # QUOTATION MARK, right-left
0x0023: 0x0023, # NUMBER SIGN, left-right
0x0023: 0x00a3, # NUMBER SIGN, right-left
0x0024: 0x0024, # DOLLAR SIGN, left-right
0x0024: 0x00a4, # DOLLAR SIGN, right-left
0x0025: 0x0025, # PERCENT SIGN, left-right
0x0026: 0x0026, # AMPERSAND, left-right
0x0026: 0x00a6, # AMPERSAND, right-left
0x0027: 0x0027, # APOSTROPHE, left-right
0x0027: 0x00a7, # APOSTROPHE, right-left
0x0028: 0x0028, # LEFT PARENTHESIS, left-right
0x0028: 0x00a8, # LEFT PARENTHESIS, right-left
0x0029: 0x0029, # RIGHT PARENTHESIS, left-right
0x0029: 0x00a9, # RIGHT PARENTHESIS, right-left
0x002a: 0x002a, # ASTERISK, left-right
0x002a: 0x00aa, # ASTERISK, right-left
0x002b: 0x002b, # PLUS SIGN, left-right
0x002b: 0x00ab, # PLUS SIGN, right-left
0x002c: 0x002c, # COMMA, left-right; in Arabic-script context, displayed as 0x066C ARABIC THOUSANDS SEPARATOR
0x002d: 0x002d, # HYPHEN-MINUS, left-right
0x002d: 0x00ad, # HYPHEN-MINUS, right-left
0x002e: 0x002e, # FULL STOP, left-right; in Arabic-script context, displayed as 0x066B ARABIC DECIMAL SEPARATOR
0x002e: 0x00ae, # FULL STOP, right-left
0x002f: 0x002f, # SOLIDUS, left-right
0x002f: 0x00af, # SOLIDUS, right-left
0x0030: 0x0030, # DIGIT ZERO; in Arabic-script context, displayed as 0x0660 ARABIC-INDIC DIGIT ZERO
0x0031: 0x0031, # DIGIT ONE; in Arabic-script context, displayed as 0x0661 ARABIC-INDIC DIGIT ONE
0x0032: 0x0032, # DIGIT TWO; in Arabic-script context, displayed as 0x0662 ARABIC-INDIC DIGIT TWO
0x0033: 0x0033, # DIGIT THREE; in Arabic-script context, displayed as 0x0663 ARABIC-INDIC DIGIT THREE
0x0034: 0x0034, # DIGIT FOUR; in Arabic-script context, displayed as 0x0664 ARABIC-INDIC DIGIT FOUR
0x0035: 0x0035, # DIGIT FIVE; in Arabic-script context, displayed as 0x0665 ARABIC-INDIC DIGIT FIVE
0x0036: 0x0036, # DIGIT SIX; in Arabic-script context, displayed as 0x0666 ARABIC-INDIC DIGIT SIX
0x0037: 0x0037, # DIGIT SEVEN; in Arabic-script context, displayed as 0x0667 ARABIC-INDIC DIGIT SEVEN
0x0038: 0x0038, # DIGIT EIGHT; in Arabic-script context, displayed as 0x0668 ARABIC-INDIC DIGIT EIGHT
0x0039: 0x0039, # DIGIT NINE; in Arabic-script context, displayed as 0x0669 ARABIC-INDIC DIGIT NINE
0x003a: 0x003a, # COLON, left-right
0x003a: 0x00ba, # COLON, right-left
0x003b: 0x003b, # SEMICOLON, left-right
0x003c: 0x003c, # LESS-THAN SIGN, left-right
0x003c: 0x00bc, # LESS-THAN SIGN, right-left
0x003d: 0x003d, # EQUALS SIGN, left-right
0x003d: 0x00bd, # EQUALS SIGN, right-left
0x003e: 0x003e, # GREATER-THAN SIGN, left-right
0x003e: 0x00be, # GREATER-THAN SIGN, right-left
0x003f: 0x003f, # QUESTION MARK, left-right
0x0040: 0x0040, # COMMERCIAL AT
0x0041: 0x0041, # LATIN CAPITAL LETTER A
0x0042: 0x0042, # LATIN CAPITAL LETTER B
0x0043: 0x0043, # LATIN CAPITAL LETTER C
0x0044: 0x0044, # LATIN CAPITAL LETTER D
0x0045: 0x0045, # LATIN CAPITAL LETTER E
0x0046: 0x0046, # LATIN CAPITAL LETTER F
0x0047: 0x0047, # LATIN CAPITAL LETTER G
0x0048: 0x0048, # LATIN CAPITAL LETTER H
0x0049: 0x0049, # LATIN CAPITAL LETTER I
0x004a: 0x004a, # LATIN CAPITAL LETTER J
0x004b: 0x004b, # LATIN CAPITAL LETTER K
0x004c: 0x004c, # LATIN CAPITAL LETTER L
0x004d: 0x004d, # LATIN CAPITAL LETTER M
0x004e: 0x004e, # LATIN CAPITAL LETTER N
0x004f: 0x004f, # LATIN CAPITAL LETTER O
0x0050: 0x0050, # LATIN CAPITAL LETTER P
0x0051: 0x0051, # LATIN CAPITAL LETTER Q
0x0052: 0x0052, # LATIN CAPITAL LETTER R
0x0053: 0x0053, # LATIN CAPITAL LETTER S
0x0054: 0x0054, # LATIN CAPITAL LETTER T
0x0055: 0x0055, # LATIN CAPITAL LETTER U
0x0056: 0x0056, # LATIN CAPITAL LETTER V
0x0057: 0x0057, # LATIN CAPITAL LETTER W
0x0058: 0x0058, # LATIN CAPITAL LETTER X
0x0059: 0x0059, # LATIN CAPITAL LETTER Y
0x005a: 0x005a, # LATIN CAPITAL LETTER Z
0x005b: 0x005b, # LEFT SQUARE BRACKET, left-right
0x005b: 0x00db, # LEFT SQUARE BRACKET, right-left
0x005c: 0x005c, # REVERSE SOLIDUS, left-right
0x005c: 0x00dc, # REVERSE SOLIDUS, right-left
0x005d: 0x005d, # RIGHT SQUARE BRACKET, left-right
0x005d: 0x00dd, # RIGHT SQUARE BRACKET, right-left
0x005e: 0x005e, # CIRCUMFLEX ACCENT, left-right
0x005e: 0x00de, # CIRCUMFLEX ACCENT, right-left
0x005f: 0x005f, # LOW LINE, left-right
0x005f: 0x00df, # LOW LINE, right-left
0x0060: 0x0060, # GRAVE ACCENT
0x0061: 0x0061, # LATIN SMALL LETTER A
0x0062: 0x0062, # LATIN SMALL LETTER B
0x0063: 0x0063, # LATIN SMALL LETTER C
0x0064: 0x0064, # LATIN SMALL LETTER D
0x0065: 0x0065, # LATIN SMALL LETTER E
0x0066: 0x0066, # LATIN SMALL LETTER F
0x0067: 0x0067, # LATIN SMALL LETTER G
0x0068: 0x0068, # LATIN SMALL LETTER H
0x0069: 0x0069, # LATIN SMALL LETTER I
0x006a: 0x006a, # LATIN SMALL LETTER J
0x006b: 0x006b, # LATIN SMALL LETTER K
0x006c: 0x006c, # LATIN SMALL LETTER L
0x006d: 0x006d, # LATIN SMALL LETTER M
0x006e: 0x006e, # LATIN SMALL LETTER N
0x006f: 0x006f, # LATIN SMALL LETTER O
0x0070: 0x0070, # LATIN SMALL LETTER P
0x0071: 0x0071, # LATIN SMALL LETTER Q
0x0072: 0x0072, # LATIN SMALL LETTER R
0x0073: 0x0073, # LATIN SMALL LETTER S
0x0074: 0x0074, # LATIN SMALL LETTER T
0x0075: 0x0075, # LATIN SMALL LETTER U
0x0076: 0x0076, # LATIN SMALL LETTER V
0x0077: 0x0077, # LATIN SMALL LETTER W
0x0078: 0x0078, # LATIN SMALL LETTER X
0x0079: 0x0079, # LATIN SMALL LETTER Y
0x007a: 0x007a, # LATIN SMALL LETTER Z
0x007b: 0x007b, # LEFT CURLY BRACKET, left-right
0x007b: 0x00fb, # LEFT CURLY BRACKET, right-left
0x007c: 0x007c, # VERTICAL LINE, left-right
0x007c: 0x00fc, # VERTICAL LINE, right-left
0x007d: 0x007d, # RIGHT CURLY BRACKET, left-right
0x007d: 0x00fd, # RIGHT CURLY BRACKET, right-left
0x007e: 0x007e, # TILDE
0x007f: 0x007f, # CONTROL CHARACTER
0x00a0: 0x0081, # NO-BREAK SPACE, right-left
0x00ab: 0x008c, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK, right-left
0x00bb: 0x0098, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK, right-left
0x00c4: 0x0080, # LATIN CAPITAL LETTER A WITH DIAERESIS
0x00c7: 0x0082, # LATIN CAPITAL LETTER C WITH CEDILLA
0x00c9: 0x0083, # LATIN CAPITAL LETTER E WITH ACUTE
0x00d1: 0x0084, # LATIN CAPITAL LETTER N WITH TILDE
0x00d6: 0x0085, # LATIN CAPITAL LETTER O WITH DIAERESIS
0x00dc: 0x0086, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x00e0: 0x0088, # LATIN SMALL LETTER A WITH GRAVE
0x00e1: 0x0087, # LATIN SMALL LETTER A WITH ACUTE
0x00e2: 0x0089, # LATIN SMALL LETTER A WITH CIRCUMFLEX
0x00e4: 0x008a, # LATIN SMALL LETTER A WITH DIAERESIS
0x00e7: 0x008d, # LATIN SMALL LETTER C WITH CEDILLA
0x00e8: 0x008f, # LATIN SMALL LETTER E WITH GRAVE
0x00e9: 0x008e, # LATIN SMALL LETTER E WITH ACUTE
0x00ea: 0x0090, # LATIN SMALL LETTER E WITH CIRCUMFLEX
0x00eb: 0x0091, # LATIN SMALL LETTER E WITH DIAERESIS
0x00ed: 0x0092, # LATIN SMALL LETTER I WITH ACUTE
0x00ee: 0x0094, # LATIN SMALL LETTER I WITH CIRCUMFLEX
0x00ef: 0x0095, # LATIN SMALL LETTER I WITH DIAERESIS
0x00f1: 0x0096, # LATIN SMALL LETTER N WITH TILDE
0x00f3: 0x0097, # LATIN SMALL LETTER O WITH ACUTE
0x00f4: 0x0099, # LATIN SMALL LETTER O WITH CIRCUMFLEX
0x00f6: 0x009a, # LATIN SMALL LETTER O WITH DIAERESIS
0x00f7: 0x009b, # DIVISION SIGN, right-left
0x00f9: 0x009d, # LATIN SMALL LETTER U WITH GRAVE
0x00fa: 0x009c, # LATIN SMALL LETTER U WITH ACUTE
0x00fb: 0x009e, # LATIN SMALL LETTER U WITH CIRCUMFLEX
0x00fc: 0x009f, # LATIN SMALL LETTER U WITH DIAERESIS
0x060c: 0x00ac, # ARABIC COMMA
0x061b: 0x00bb, # ARABIC SEMICOLON
0x061f: 0x00bf, # ARABIC QUESTION MARK
0x0621: 0x00c1, # ARABIC LETTER HAMZA
0x0622: 0x00c2, # ARABIC LETTER ALEF WITH MADDA ABOVE
0x0623: 0x00c3, # ARABIC LETTER ALEF WITH HAMZA ABOVE
0x0624: 0x00c4, # ARABIC LETTER WAW WITH HAMZA ABOVE
0x0625: 0x00c5, # ARABIC LETTER ALEF WITH HAMZA BELOW
0x0626: 0x00c6, # ARABIC LETTER YEH WITH HAMZA ABOVE
0x0627: 0x00c7, # ARABIC LETTER ALEF
0x0628: 0x00c8, # ARABIC LETTER BEH
0x0629: 0x00c9, # ARABIC LETTER TEH MARBUTA
0x062a: 0x00ca, # ARABIC LETTER TEH
0x062b: 0x00cb, # ARABIC LETTER THEH
0x062c: 0x00cc, # ARABIC LETTER JEEM
0x062d: 0x00cd, # ARABIC LETTER HAH
0x062e: 0x00ce, # ARABIC LETTER KHAH
0x062f: 0x00cf, # ARABIC LETTER DAL
0x0630: 0x00d0, # ARABIC LETTER THAL
0x0631: 0x00d1, # ARABIC LETTER REH
0x0632: 0x00d2, # ARABIC LETTER ZAIN
0x0633: 0x00d3, # ARABIC LETTER SEEN
0x0634: 0x00d4, # ARABIC LETTER SHEEN
0x0635: 0x00d5, # ARABIC LETTER SAD
0x0636: 0x00d6, # ARABIC LETTER DAD
0x0637: 0x00d7, # ARABIC LETTER TAH
0x0638: 0x00d8, # ARABIC LETTER ZAH
0x0639: 0x00d9, # ARABIC LETTER AIN
0x063a: 0x00da, # ARABIC LETTER GHAIN
0x0640: 0x00e0, # ARABIC TATWEEL
0x0641: 0x00e1, # ARABIC LETTER FEH
0x0642: 0x00e2, # ARABIC LETTER QAF
0x0643: 0x00e3, # ARABIC LETTER KAF
0x0644: 0x00e4, # ARABIC LETTER LAM
0x0645: 0x00e5, # ARABIC LETTER MEEM
0x0646: 0x00e6, # ARABIC LETTER NOON
0x0647: 0x00e7, # ARABIC LETTER HEH
0x0648: 0x00e8, # ARABIC LETTER WAW
0x0649: 0x00e9, # ARABIC LETTER ALEF MAKSURA
0x064a: 0x00ea, # ARABIC LETTER YEH
0x064b: 0x00eb, # ARABIC FATHATAN
0x064c: 0x00ec, # ARABIC DAMMATAN
0x064d: 0x00ed, # ARABIC KASRATAN
0x064e: 0x00ee, # ARABIC FATHA
0x064f: 0x00ef, # ARABIC DAMMA
0x0650: 0x00f0, # ARABIC KASRA
0x0651: 0x00f1, # ARABIC SHADDA
0x0652: 0x00f2, # ARABIC SUKUN
0x0660: 0x00b0, # ARABIC-INDIC DIGIT ZERO, right-left (need override)
0x0661: 0x00b1, # ARABIC-INDIC DIGIT ONE, right-left (need override)
0x0662: 0x00b2, # ARABIC-INDIC DIGIT TWO, right-left (need override)
0x0663: 0x00b3, # ARABIC-INDIC DIGIT THREE, right-left (need override)
0x0664: 0x00b4, # ARABIC-INDIC DIGIT FOUR, right-left (need override)
0x0665: 0x00b5, # ARABIC-INDIC DIGIT FIVE, right-left (need override)
0x0666: 0x00b6, # ARABIC-INDIC DIGIT SIX, right-left (need override)
0x0667: 0x00b7, # ARABIC-INDIC DIGIT SEVEN, right-left (need override)
0x0668: 0x00b8, # ARABIC-INDIC DIGIT EIGHT, right-left (need override)
0x0669: 0x00b9, # ARABIC-INDIC DIGIT NINE, right-left (need override)
0x066a: 0x00a5, # ARABIC PERCENT SIGN
0x0679: 0x00f4, # ARABIC LETTER TTEH
0x067e: 0x00f3, # ARABIC LETTER PEH
0x0686: 0x00f5, # ARABIC LETTER TCHEH
0x0688: 0x00f9, # ARABIC LETTER DDAL
0x0691: 0x00fa, # ARABIC LETTER RREH
0x0698: 0x00fe, # ARABIC LETTER JEH
0x06a4: 0x00f7, # ARABIC LETTER VEH
0x06af: 0x00f8, # ARABIC LETTER GAF
0x06ba: 0x008b, # ARABIC LETTER NOON GHUNNA
0x06d2: 0x00ff, # ARABIC LETTER YEH BARREE
0x06d5: 0x00f6, # ARABIC LETTER AE
0x2026: 0x0093, # HORIZONTAL ELLIPSIS, right-left
0x274a: 0x00c0, # EIGHT TEARDROP-SPOKED PROPELLER ASTERISK, right-left
}
| amrdraz/brython | www/src/Lib/encodings/mac_arabic.py | Python | bsd-3-clause | 37,165 | 0.018916 |
from distutils.core import setup
import py2exe
import sys
# this allows to run it with a simple double click.
sys.argv.append('py2exe')
py2exe_options = {
"includes": ["sip"],
"dll_excludes": ["MSVCP90.dll", ],
"compressed": 1,
"optimize": 2,
"ascii": 0,
# "bundle_files": 1,
}
setup(
name='main',
version='1.0',
windows=[{"script":"main.py"}],
zipfile=None,
options={'py2exe': py2exe_options}
)
| xcool/webtools | setup.py | Python | gpl-3.0 | 498 | 0.008032 |
import sys
import unittest
from dynd import nd, ndt
import numpy as np
from datetime import date, datetime
from numpy.testing import *
class TestNumpyDTypeInterop(unittest.TestCase):
def setUp(self):
if sys.byteorder == 'little':
self.nonnative = '>'
else:
self.nonnative = '<'
def test_ndt_type_from_numpy_scalar_types(self):
# Tests converting numpy scalar types to dynd types
self.assertEqual(ndt.bool, ndt.type(np.bool))
self.assertEqual(ndt.bool, ndt.type(np.bool_))
self.assertEqual(ndt.int8, ndt.type(np.int8))
self.assertEqual(ndt.int16, ndt.type(np.int16))
self.assertEqual(ndt.int32, ndt.type(np.int32))
self.assertEqual(ndt.int64, ndt.type(np.int64))
self.assertEqual(ndt.uint8, ndt.type(np.uint8))
self.assertEqual(ndt.uint16, ndt.type(np.uint16))
self.assertEqual(ndt.uint32, ndt.type(np.uint32))
self.assertEqual(ndt.uint64, ndt.type(np.uint64))
self.assertEqual(ndt.float32, ndt.type(np.float32))
self.assertEqual(ndt.float64, ndt.type(np.float64))
self.assertEqual(ndt.complex_float32, ndt.type(np.complex64))
self.assertEqual(ndt.complex_float64, ndt.type(np.complex128))
def test_ndt_type_from_numpy_dtype(self):
# Tests converting numpy dtypes to dynd types
# native byte order
self.assertEqual(ndt.bool, ndt.type(np.dtype(np.bool)))
self.assertEqual(ndt.int8, ndt.type(np.dtype(np.int8)))
self.assertEqual(ndt.int16, ndt.type(np.dtype(np.int16)))
self.assertEqual(ndt.int32, ndt.type(np.dtype(np.int32)))
self.assertEqual(ndt.int64, ndt.type(np.dtype(np.int64)))
self.assertEqual(ndt.uint8, ndt.type(np.dtype(np.uint8)))
self.assertEqual(ndt.uint16, ndt.type(np.dtype(np.uint16)))
self.assertEqual(ndt.uint32, ndt.type(np.dtype(np.uint32)))
self.assertEqual(ndt.uint64, ndt.type(np.dtype(np.uint64)))
self.assertEqual(ndt.float32, ndt.type(np.dtype(np.float32)))
self.assertEqual(ndt.float64, ndt.type(np.dtype(np.float64)))
self.assertEqual(ndt.complex_float32, ndt.type(np.dtype(np.complex64)))
self.assertEqual(ndt.complex_float64, ndt.type(np.dtype(np.complex128)))
self.assertEqual(ndt.make_fixedstring(10, 'ascii'),
ndt.type(np.dtype('S10')))
self.assertEqual(ndt.make_fixedstring(10, 'utf_32'),
ndt.type(np.dtype('U10')))
# non-native byte order
nonnative = self.nonnative
self.assertEqual(ndt.make_byteswap(ndt.int16),
ndt.type(np.dtype(nonnative + 'i2')))
self.assertEqual(ndt.make_byteswap(ndt.int32),
ndt.type(np.dtype(nonnative + 'i4')))
self.assertEqual(ndt.make_byteswap(ndt.int64),
ndt.type(np.dtype(nonnative + 'i8')))
self.assertEqual(ndt.make_byteswap(ndt.uint16),
ndt.type(np.dtype(nonnative + 'u2')))
self.assertEqual(ndt.make_byteswap(ndt.uint32),
ndt.type(np.dtype(nonnative + 'u4')))
self.assertEqual(ndt.make_byteswap(ndt.uint64),
ndt.type(np.dtype(nonnative + 'u8')))
self.assertEqual(ndt.make_byteswap(ndt.float32),
ndt.type(np.dtype(nonnative + 'f4')))
self.assertEqual(ndt.make_byteswap(ndt.float64),
ndt.type(np.dtype(nonnative + 'f8')))
self.assertEqual(ndt.make_byteswap(ndt.complex_float32),
ndt.type(np.dtype(nonnative + 'c8')))
self.assertEqual(ndt.make_byteswap(ndt.complex_float64),
ndt.type(np.dtype(nonnative + 'c16')))
def test_ndt_type_from_numpy_dtype_struct(self):
# aligned struct
tp0 = ndt.type(np.dtype([('x', np.int32), ('y', np.int64)],
align=True))
tp1 = ndt.type('c{x : int32, y : int64}')
self.assertEqual(tp0, tp1)
# unaligned struct
tp0 = ndt.type(np.dtype([('x', np.int32), ('y', np.int64)]))
tp1 = ndt.make_cstruct([ndt.make_unaligned(ndt.int32),
ndt.make_unaligned(ndt.int64)],
['x', 'y'])
self.assertEqual(tp0, tp1)
def test_ndt_type_as_numpy(self):
self.assertEqual(ndt.bool.as_numpy(), np.dtype('bool'))
self.assertEqual(ndt.int8.as_numpy(), np.dtype('int8'))
self.assertEqual(ndt.int16.as_numpy(), np.dtype('int16'))
self.assertEqual(ndt.int32.as_numpy(), np.dtype('int32'))
self.assertEqual(ndt.int64.as_numpy(), np.dtype('int64'))
self.assertEqual(ndt.uint8.as_numpy(), np.dtype('uint8'))
self.assertEqual(ndt.uint16.as_numpy(), np.dtype('uint16'))
self.assertEqual(ndt.uint32.as_numpy(), np.dtype('uint32'))
self.assertEqual(ndt.uint64.as_numpy(), np.dtype('uint64'))
self.assertEqual(ndt.float32.as_numpy(), np.dtype('float32'))
self.assertEqual(ndt.float64.as_numpy(), np.dtype('float64'))
self.assertEqual(ndt.complex_float32.as_numpy(), np.dtype('complex64'))
self.assertEqual(ndt.complex_float64.as_numpy(), np.dtype('complex128'))
# nonnative byte order
nonnative = self.nonnative
self.assertEqual(ndt.make_byteswap(ndt.int16).as_numpy(),
np.dtype(nonnative + 'i2'))
self.assertEqual(ndt.make_byteswap(ndt.float64).as_numpy(),
np.dtype(nonnative + 'f8'))
# aligned struct
tp0 = ndt.type('c{x : int32, y : int64}').as_numpy()
tp1 = np.dtype([('x', np.int32), ('y', np.int64)], align=True)
self.assertEqual(tp0, tp1)
# unaligned struct
tp0 = ndt.make_cstruct([ndt.make_unaligned(ndt.int32),
ndt.make_unaligned(ndt.int64)],
['x', 'y']).as_numpy()
tp1 = np.dtype([('x', np.int32), ('y', np.int64)])
self.assertEqual(tp0, tp1)
# check some types which can't be converted
self.assertRaises(TypeError, ndt.date.as_numpy)
self.assertRaises(TypeError, ndt.datetime.as_numpy)
self.assertRaises(TypeError, ndt.bytes.as_numpy)
self.assertRaises(TypeError, ndt.string.as_numpy)
class TestNumpyViewInterop(unittest.TestCase):
def setUp(self):
if sys.byteorder == 'little':
self.nonnative = '>'
else:
self.nonnative = '<'
def test_dynd_scalar_view(self):
a = np.array(3, dtype='int64')
n = nd.view(a)
self.assertEqual(nd.type_of(n), ndt.int64)
self.assertEqual(nd.as_py(n), 3)
self.assertEqual(n.access_flags, 'readwrite')
# Ensure it's a view
n[...] = 4
self.assertEqual(a[()], 4)
def test_dynd_scalar_array(self):
a = np.array(3, dtype='int64')
n = nd.array(a)
self.assertEqual(nd.type_of(n), ndt.int64)
self.assertEqual(nd.as_py(n), 3)
self.assertEqual(n.access_flags, 'immutable')
# Ensure it's not a view
a[...] = 4
self.assertEqual(nd.as_py(n), 3)
def test_dynd_scalar_asarray(self):
a = np.array(3, dtype='int64')
n = nd.asarray(a)
self.assertEqual(nd.type_of(n), ndt.int64)
self.assertEqual(nd.as_py(n), 3)
self.assertEqual(n.access_flags, 'readwrite')
# Ensure it's a view
n[...] = 4
self.assertEqual(a[()], 4)
def test_dynd_view_of_numpy_array(self):
# Tests viewing a numpy array as a dynd.array
nonnative = self.nonnative
a = np.arange(10, dtype=np.int32)
n = nd.view(a)
self.assertEqual(nd.dtype_of(n), ndt.int32)
self.assertEqual(nd.ndim_of(n), a.ndim)
self.assertEqual(n.shape, a.shape)
self.assertEqual(n.strides, a.strides)
a = np.arange(12, dtype=(nonnative + 'i4')).reshape(3,4)
n = nd.view(a)
self.assertEqual(nd.dtype_of(n), ndt.make_byteswap(ndt.int32))
self.assertEqual(nd.ndim_of(n), a.ndim)
self.assertEqual(n.shape, a.shape)
self.assertEqual(n.strides, a.strides)
a = np.arange(49, dtype='i1')
a = a[1:].view(dtype=np.int32).reshape(4,3)
n = nd.view(a)
self.assertEqual(nd.dtype_of(n), ndt.make_unaligned(ndt.int32))
self.assertEqual(nd.ndim_of(n), a.ndim)
self.assertEqual(n.shape, a.shape)
self.assertEqual(n.strides, a.strides)
a = np.arange(49, dtype='i1')
a = a[1:].view(dtype=(nonnative + 'i4')).reshape(2,2,3)
n = nd.view(a)
self.assertEqual(nd.dtype_of(n),
ndt.make_unaligned(ndt.make_byteswap(ndt.int32)))
self.assertEqual(nd.ndim_of(n), a.ndim)
self.assertEqual(n.shape, a.shape)
self.assertEqual(n.strides, a.strides)
def test_numpy_view_of_dynd_array(self):
# Tests viewing a dynd.array as a numpy array
nonnative = self.nonnative
n = nd.range(10, dtype=ndt.int32)
a = np.asarray(n)
self.assertEqual(a.dtype, np.dtype(np.int32))
self.assertTrue(a.flags.aligned)
self.assertEqual(a.ndim, nd.ndim_of(n))
self.assertEqual(a.shape, n.shape)
self.assertEqual(a.strides, n.strides)
# Make sure it's a view
a[1] = 100
self.assertEqual(nd.as_py(n[1]), 100)
n = nd.view(np.arange(12, dtype=(nonnative + 'i4')).reshape(3,4))
a = np.asarray(n)
self.assertEqual(a.dtype, np.dtype(nonnative + 'i4'))
self.assertTrue(a.flags.aligned)
self.assertEqual(a.ndim, nd.ndim_of(n))
self.assertEqual(a.shape, n.shape)
self.assertEqual(a.strides, n.strides)
# Make sure it's a view
a[1,2] = 100
self.assertEqual(nd.as_py(n[1,2]), 100)
n = nd.view(np.arange(49, dtype='i1')[1:].view(dtype=np.int32).reshape(4,3))
a = np.asarray(n)
self.assertEqual(a.dtype, np.dtype(np.int32))
self.assertFalse(a.flags.aligned)
self.assertEqual(a.ndim, nd.ndim_of(n))
self.assertEqual(a.shape, n.shape)
self.assertEqual(a.strides, n.strides)
# Make sure it's a view
a[1,2] = 100
self.assertEqual(nd.as_py(n[1,2]), 100)
n = nd.view(np.arange(49, dtype='i1')[1:].view(
dtype=(nonnative + 'i4')).reshape(2,2,3))
a = np.asarray(n)
self.assertEqual(a.dtype, np.dtype(nonnative + 'i4'))
self.assertFalse(a.flags.aligned)
self.assertEqual(a.ndim, nd.ndim_of(n))
self.assertEqual(a.shape, n.shape)
self.assertEqual(a.strides, n.strides)
# Make sure it's a view
a[1,1,1] = 100
self.assertEqual(nd.as_py(n[1,1,1]), 100)
def test_numpy_view_of_noncontig_dynd_array(self):
n = nd.range(10)[1::3]
a = np.asarray(n)
self.assertEqual(a.dtype, np.dtype('i4'))
self.assertFalse(a.flags.c_contiguous)
self.assertEqual(a.ndim, nd.ndim_of(n))
self.assertEqual(a.shape, n.shape)
self.assertEqual(a.strides, n.strides)
# Make sure it's a view as needed
a[1] = 100
self.assertEqual(nd.as_py(n[1]), 100)
def test_numpy_dynd_fixedstring_interop(self):
# Tests converting fixed-size string arrays to/from numpy
# ASCII Numpy -> dynd
a = np.array(['abc', 'testing', 'array'])
b = nd.view(a)
if sys.version_info >= (3, 0):
self.assertEqual(ndt.make_fixedstring(7, 'utf_32'), nd.dtype_of(b))
else:
self.assertEqual(ndt.make_fixedstring(7, 'ascii'), nd.dtype_of(b))
self.assertEqual(nd.dtype_of(b), ndt.type(a.dtype))
# Make sure it's ascii
a = a.astype('S7')
b = nd.view(a)
# ASCII dynd -> Numpy
c = np.asarray(b)
self.assertEqual(a.dtype, c.dtype)
assert_array_equal(a, c)
# verify 'a' and 'c' are looking at the same data
a[1] = 'modify'
assert_array_equal(a, c)
# ASCII dynd -> UTF32 dynd
b_u = b.ucast(ndt.make_fixedstring(7, 'utf_32'))
self.assertEqual(
ndt.make_convert(
ndt.make_fixedstring(7, 'utf_32'),
ndt.make_fixedstring(7, 'ascii')),
nd.dtype_of(b_u))
# Evaluate to its value array
b_u = b_u.eval()
self.assertEqual(
ndt.make_fixedstring(7, 'utf_32'),
nd.dtype_of(b_u))
# UTF32 dynd -> Numpy
c_u = np.asarray(b_u)
self.assertEqual(nd.dtype_of(b_u), ndt.type(c_u.dtype))
assert_array_equal(a.astype('U'), c_u)
# 'a' and 'c_u' are not looking at the same data
a[1] = 'diff'
self.assertFalse(np.all(a == c_u))
def test_numpy_blockref_string(self):
# Blockref strings don't have a corresponding Numpy construct
# Therefore numpy makes an object array scalar out of them.
a = nd.array("abcdef")
self.assertEqual(nd.dtype_of(a), ndt.string)
# Some versions of NumPy produce an error instead,
# so this assertion is removed
#self.assertEqual(np.asarray(a).dtype, np.dtype(object))
a = nd.array(u"abcdef \uc548\ub155")
self.assertEqual(nd.dtype_of(a), ndt.string)
# Some versions of NumPy produce an error instead,
# so this assertion is removed
#self.assertEqual(np.asarray(a).dtype, np.dtype(object))
def test_readwrite_access_flags(self):
def assign_to(x,y):
x[0] = y
# Tests that read/write access control is preserved to/from numpy
a = np.arange(10.)
# Writeable
b = nd.view(a)
b[0] = 2.0
self.assertEqual(nd.as_py(b[0]), 2.0)
self.assertEqual(a[0], 2.0)
# Readonly view of writeable
b = nd.view(a, access='r')
self.assertRaises(RuntimeError, assign_to, b, 3.0)
# should still be 2.0
self.assertEqual(nd.as_py(b[0]), 2.0)
self.assertEqual(a[0], 2.0)
# Not writeable
a.flags.writeable = False
b = nd.view(a)
self.assertRaises(RuntimeError, assign_to, b, 3.0)
# should still be 2.0
self.assertEqual(nd.as_py(b[0]), 2.0)
self.assertEqual(a[0], 2.0)
# Trying to get a readwrite view raises an error
self.assertRaises(RuntimeError, nd.view, a, access='rw')
class TestAsNumpy(unittest.TestCase):
def test_struct_as_numpy(self):
# Aligned cstruct
a = nd.array([[1, 2], [3, 4]], dtype='{x : int32, y: int64}')
b = nd.as_numpy(a)
self.assertEqual(b.dtype,
np.dtype([('x', np.int32), ('y', np.int64)], align=True))
self.assertEqual(nd.as_py(a.x), b['x'].tolist())
self.assertEqual(nd.as_py(a.y), b['y'].tolist())
# Unaligned cstruct
a = nd.array([[1, 2], [3, 4]],
dtype='{x : unaligned[int32], y: unaligned[int64]}')
b = nd.as_numpy(a)
self.assertEqual(b.dtype, np.dtype([('x', np.int32), ('y', np.int64)]))
self.assertEqual(nd.as_py(a.x), b['x'].tolist())
self.assertEqual(nd.as_py(a.y), b['y'].tolist())
def test_cstruct_as_numpy(self):
# Aligned cstruct
a = nd.array([[1, 2], [3, 4]], dtype='c{x : int32, y: int64}')
b = nd.as_numpy(a)
self.assertEqual(b.dtype,
np.dtype([('x', np.int32), ('y', np.int64)], align=True))
self.assertEqual(nd.as_py(a.x), b['x'].tolist())
self.assertEqual(nd.as_py(a.y), b['y'].tolist())
# Unaligned cstruct
a = nd.array([[1, 2], [3, 4]],
dtype='c{x : unaligned[int32], y: unaligned[int64]}')
b = nd.as_numpy(a)
self.assertEqual(b.dtype, np.dtype([('x', np.int32), ('y', np.int64)]))
self.assertEqual(nd.as_py(a.x), b['x'].tolist())
self.assertEqual(nd.as_py(a.y), b['y'].tolist())
def test_cstruct_via_pep3118(self):
# Aligned cstruct
a = nd.array([[1, 2], [3, 4]], dtype='c{x : int32, y: int64}')
b = np.asarray(a)
self.assertEqual(b.dtype,
np.dtype([('x', np.int32), ('y', np.int64)], align=True))
self.assertEqual(nd.as_py(a.x), b['x'].tolist())
self.assertEqual(nd.as_py(a.y), b['y'].tolist())
# Unaligned cstruct
a = nd.array([[1, 2], [3, 4]],
dtype='c{x : unaligned[int32], y: unaligned[int64]}')
b = np.asarray(a)
self.assertEqual(b.dtype, np.dtype([('x', np.int32), ('y', np.int64)]))
self.assertEqual(nd.as_py(a.x), b['x'].tolist())
self.assertEqual(nd.as_py(a.y), b['y'].tolist())
def test_fixed_dim(self):
a = nd.array([1, 3, 5], type='3 * int32')
b = nd.as_numpy(a)
self.assertEqual(b.dtype, np.dtype('int32'))
self.assertEqual(b.tolist(), [1, 3, 5])
def test_fixed_dim_via_pep3118(self):
a = nd.array([1, 3, 5], type='3 * int32')
b = np.asarray(a)
self.assertEqual(b.dtype, np.dtype('int32'))
self.assertEqual(b.tolist(), [1, 3, 5])
class TestNumpyScalarInterop(unittest.TestCase):
def test_numpy_scalar_conversion_dtypes(self):
self.assertEqual(nd.dtype_of(nd.array(np.bool_(True))), ndt.bool)
self.assertEqual(nd.dtype_of(nd.array(np.bool(True))), ndt.bool)
self.assertEqual(nd.dtype_of(nd.array(np.int8(100))), ndt.int8)
self.assertEqual(nd.dtype_of(nd.array(np.int16(100))), ndt.int16)
self.assertEqual(nd.dtype_of(nd.array(np.int32(100))), ndt.int32)
self.assertEqual(nd.dtype_of(nd.array(np.int64(100))), ndt.int64)
self.assertEqual(nd.dtype_of(nd.array(np.uint8(100))), ndt.uint8)
self.assertEqual(nd.dtype_of(nd.array(np.uint16(100))), ndt.uint16)
self.assertEqual(nd.dtype_of(nd.array(np.uint32(100))), ndt.uint32)
self.assertEqual(nd.dtype_of(nd.array(np.uint64(100))), ndt.uint64)
self.assertEqual(nd.dtype_of(nd.array(np.float32(100.))), ndt.float32)
self.assertEqual(nd.dtype_of(nd.array(np.float64(100.))), ndt.float64)
self.assertEqual(nd.dtype_of(nd.array(np.complex64(100j))),
ndt.complex_float32)
self.assertEqual(nd.dtype_of(nd.array(np.complex128(100j))),
ndt.complex_float64)
if np.__version__ >= '1.7':
self.assertEqual(nd.dtype_of(nd.array(np.datetime64('2000-12-13'))),
ndt.date)
self.assertEqual(nd.dtype_of(nd.array(np.datetime64('2000-12-13T12:30'))),
ndt.type('datetime[tz="UTC"]'))
def test_numpy_scalar_conversion_values(self):
self.assertEqual(nd.as_py(nd.array(np.bool_(True))), True)
self.assertEqual(nd.as_py(nd.array(np.bool_(False))), False)
self.assertEqual(nd.as_py(nd.array(np.int8(100))), 100)
self.assertEqual(nd.as_py(nd.array(np.int8(-100))), -100)
self.assertEqual(nd.as_py(nd.array(np.int16(20000))), 20000)
self.assertEqual(nd.as_py(nd.array(np.int16(-20000))), -20000)
self.assertEqual(nd.as_py(nd.array(np.int32(1000000000))), 1000000000)
self.assertEqual(nd.as_py(nd.array(np.int64(-1000000000000))),
-1000000000000)
self.assertEqual(nd.as_py(nd.array(np.int64(1000000000000))),
1000000000000)
self.assertEqual(nd.as_py(nd.array(np.int32(-1000000000))),
-1000000000)
self.assertEqual(nd.as_py(nd.array(np.uint8(200))), 200)
self.assertEqual(nd.as_py(nd.array(np.uint16(50000))), 50000)
self.assertEqual(nd.as_py(nd.array(np.uint32(3000000000))), 3000000000)
self.assertEqual(nd.as_py(nd.array(np.uint64(10000000000000000000))),
10000000000000000000)
self.assertEqual(nd.as_py(nd.array(np.float32(2.5))), 2.5)
self.assertEqual(nd.as_py(nd.array(np.float64(2.5))), 2.5)
self.assertEqual(nd.as_py(nd.array(np.complex64(2.5-1j))), 2.5-1j)
self.assertEqual(nd.as_py(nd.array(np.complex128(2.5-1j))), 2.5-1j)
if np.__version__ >= '1.7':
# Various date units
self.assertEqual(nd.as_py(nd.array(np.datetime64('2000'))),
date(2000, 1, 1))
self.assertEqual(nd.as_py(nd.array(np.datetime64('2000-12'))),
date(2000, 12, 1))
self.assertEqual(nd.as_py(nd.array(np.datetime64('2000-12-13'))),
date(2000, 12, 13))
# Various datetime units
self.assertEqual(nd.as_py(nd.array(np.datetime64('2000-12-13T12Z'))),
datetime(2000, 12, 13, 12, 0))
self.assertEqual(nd.as_py(nd.array(np.datetime64('2000-12-13T12:30Z'))),
datetime(2000, 12, 13, 12, 30))
self.assertEqual(nd.as_py(nd.array(np.datetime64('1823-12-13T12:30Z'))),
datetime(1823, 12, 13, 12, 30))
self.assertEqual(nd.as_py(nd.array(np.datetime64('2000-12-13T12:30:24Z'))),
datetime(2000, 12, 13, 12, 30, 24))
self.assertEqual(nd.as_py(nd.array(np.datetime64('2000-12-13T12:30:24.123Z'))),
datetime(2000, 12, 13, 12, 30, 24, 123000))
self.assertEqual(nd.as_py(nd.array(np.datetime64('2000-12-13T12:30:24.123456Z'))),
datetime(2000, 12, 13, 12, 30, 24, 123456))
self.assertEqual(nd.as_py(nd.array(np.datetime64('2000-12-13T12:30:24.123456124Z'))),
datetime(2000, 12, 13, 12, 30, 24, 123456))
self.assertEqual(str(nd.array(np.datetime64('2000-12-13T12:30:24.123456124Z'))),
'2000-12-13T12:30:24.1234561Z')
self.assertEqual(str(nd.array(np.datetime64('1842-12-13T12:30:24.123456124Z'))),
'1842-12-13T12:30:24.1234561Z')
def test_numpy_struct_scalar(self):
# Create a NumPy struct scalar object, by indexing into
# a structured array
a = np.array([(10, 11, 12)], dtype='i4,i8,f8')[0]
aligned_tp = ndt.type('c{f0: int32, f1: int64, f2: float64}')
val = {'f0': 10, 'f1': 11, 'f2': 12}
# Construct using nd.array
b = nd.array(a)
self.assertEqual(nd.type_of(b), aligned_tp)
self.assertEqual(nd.as_py(b), val)
self.assertEqual(b.access_flags, 'immutable')
b = nd.array(a, access='rw')
self.assertEqual(nd.type_of(b), aligned_tp)
self.assertEqual(nd.as_py(b), val)
self.assertEqual(b.access_flags, 'readwrite')
# Construct using nd.asarray
b = nd.asarray(a)
self.assertEqual(nd.type_of(b), aligned_tp)
self.assertEqual(nd.as_py(b), val)
self.assertEqual(b.access_flags, 'immutable')
b = nd.asarray(a, access='rw')
self.assertEqual(nd.type_of(b), aligned_tp)
self.assertEqual(nd.as_py(b), val)
self.assertEqual(b.access_flags, 'readwrite')
# nd.view should fail
self.assertRaises(RuntimeError, nd.view, a)
def test_expr_struct_conversion(self):
a = nd.array([date(2000, 12, 13), date(1995, 5, 2)]).to_struct()
b = nd.as_numpy(a, allow_copy=True)
self.assertTrue(isinstance(b, np.ndarray))
# Use the NumPy assertions which support arrays
assert_equal(b['year'], [2000, 1995])
assert_equal(b['month'], [12, 5])
assert_equal(b['day'], [13, 2])
def test_var_dim_conversion(self):
# A simple instantiated var_dim array should be
# viewable with numpy without changes
a = nd.array([1, 2, 3, 4, 5], type='var * int32')
b = nd.as_numpy(a)
self.assertTrue(isinstance(b, np.ndarray))
self.assertEqual(b.dtype, np.dtype('int32'))
# Use the NumPy assertions which support arrays
assert_equal(b, [1, 2, 3, 4, 5])
def test_date_from_numpy(self):
a = np.array(['2000-12-13', '1995-05-02'], dtype='M8[D]')
b = nd.array(a)
self.assertEqual(nd.type_of(b), ndt.type('strided * date'))
self.assertEqual(nd.as_py(b), [date(2000, 12, 13), date(1995, 5, 2)])
def test_date_as_numpy(self):
a = nd.array([date(2000, 12, 13), date(1995, 5, 2)])
b = nd.as_numpy(a, allow_copy=True)
assert_equal(b, np.array(['2000-12-13', '1995-05-02'], dtype='M8[D]'))
def tt_datetime_from_numpy(self):
# NumPy hours unit
a = np.array(['2000-12-13T12Z', '1955-05-02T02Z'],
dtype='M8[h]')
b = nd.array(a)
self.assertEqual(nd.type_of(b), ndt.type('strided * datetime[tz="UTC"]'))
self.assertEqual(nd.as_py(b), [datetime(2000, 12, 13, 12),
datetime(1955, 5, 2, 2)])
# NumPy minutes unit
a = np.array(['2000-12-13T12:30Z', '1955-05-02T02:23Z'],
dtype='M8[m]')
b = nd.array(a)
self.assertEqual(nd.type_of(b), ndt.type('strided * datetime[tz="UTC"]'))
self.assertEqual(nd.as_py(b), [datetime(2000, 12, 13, 12, 30),
datetime(1955, 5, 2, 2, 23)])
# NumPy seconds unit
a = np.array(['2000-12-13T12:30:51Z', '1955-05-02T02:23:29Z'],
dtype='M8[s]')
b = nd.array(a)
self.assertEqual(nd.type_of(b), ndt.type('strided * datetime[tz="UTC"]'))
self.assertEqual(nd.as_py(b), [datetime(2000, 12, 13, 12, 30, 51),
datetime(1955, 5, 2, 2, 23, 29)])
# NumPy milliseconds unit
a = np.array(['2000-12-13T12:30:51.123Z', '1955-05-02T02:23:29.456Z'],
dtype='M8[ms]')
b = nd.array(a)
self.assertEqual(nd.type_of(b), ndt.type('strided * datetime[tz="UTC"]'))
self.assertEqual(nd.as_py(b), [datetime(2000, 12, 13, 12, 30, 51, 123000),
datetime(1955, 5, 2, 2, 23, 29, 456000)])
# NumPy microseconds unit
a = np.array(['2000-12-13T12:30:51.123456Z', '1955-05-02T02:23:29.456123Z'],
dtype='M8[us]')
b = nd.array(a)
self.assertEqual(nd.type_of(b), ndt.type('strided * datetime[tz="UTC"]'))
self.assertEqual(nd.as_py(b), [datetime(2000, 12, 13, 12, 30, 51, 123456),
datetime(1955, 5, 2, 2, 23, 29, 456123)])
# NumPy nanoseconds unit (truncated to 100 nanosecond ticks)
a = np.array(['2000-12-13T12:30:51.123456987Z', '1955-05-02T02:23:29.456123798Z'],
dtype='M8[ns]')
b = nd.array(a)
self.assertEqual(nd.type_of(b), ndt.type('strided * datetime[tz="UTC"]'))
self.assertEqual([str(x) for x in b], ["2000-12-13T12:30:51.1234569",
"1955-05-02T02:23:29.4561237"])
def test_datetime_as_numpy(self):
a = nd.array(['2000-12-13T12:30',
'1995-05-02T2:15:33'],
dtype='datetime[tz="UTC"]')
b = nd.as_numpy(a, allow_copy=True)
assert_equal(b, np.array(['2000-12-13T12:30Z', '1995-05-02T02:15:33Z'],
dtype='M8[us]'))
if __name__ == '__main__':
unittest.main()
| aterrel/dynd-python | dynd/tests/test_numpy_interop.py | Python | bsd-2-clause | 27,383 | 0.002922 |
"""
Copyright (C) 2015 Quinn D Granfor <[email protected]>
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
version 2, as published by the Free Software Foundation.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License version 2 for more details.
You should have received a copy of the GNU General Public License
version 2 along with this program; if not, write to the Free
Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
MA 02110-1301, USA.
"""
import sys
from common import common_config_ini
from common import common_logging_elasticsearch_httpx
from common import common_metadata_scudlee
from common import common_signal
from common import common_system
# verify this program isn't already running!
if common_system.com_process_list(
process_name='/usr/bin/python3 /mediakraken/subprogram_match_anime_id.py'):
sys.exit(0)
# start logging
common_logging_elasticsearch_httpx.com_es_httpx_post(message_type='info',
message_text='START',
index_name='subprogram_match_anime_id')
# set signal exit breaks
common_signal.com_signal_set_break()
# open the database
option_config_json, db_connection = common_config_ini.com_config_read()
# same code in subprograb update create collections
def store_update_record(db_connection, collection_name, guid_list):
"""
# store/update the record
"""
collection_guid = db_connection.db_collection_by_name(collection_name)
if collection_guid is None:
# insert
db_connection.db_collection_insert(collection_name, guid_list)
else:
# update
db_connection.db_collection_update(collection_guid, guid_list)
# check for new scudlee download
common_metadata_scudlee.mk_scudlee_fetch_xml()
# begin the media match on NULL matches
for row_data in common_metadata_scudlee.mk_scudlee_anime_list_parse():
common_logging_elasticsearch_httpx.com_es_httpx_post(message_type='info',
message_text={'row': 'row_data'})
if row_data is not None:
# skip media with "no" match...rowdata2 is imdbid
# just check for non int then it's a non tvdb id
if type(row_data[1]) != int and row_data[2] is None:
pass
else:
# should be valid data, do the update
db_connection.db_meta_update_media_id_from_scudlee(row_data[1],
row_data[2],
row_data[0])
# begin the collections match/create/update
for row_data in common_metadata_scudlee.mk_scudlee_anime_set_parse():
# db_connection.db_meta_update_Collection_Media_ID_From_Scudlee(row_data[0],row_data[1])
if row_data[1] == "music video":
pass
else:
store_update_record(db_connection, row_data[0], row_data[1])
# commit all changes to db
db_connection.db_commit()
# close the database
db_connection.db_close()
| MediaKraken/MediaKraken_Deployment | source/subprogram_match_anime_id.py | Python | gpl-3.0 | 3,312 | 0.000906 |
import json
import argparse
import numpy
import sys
import copy
from astropy.coordinates import SkyCoord
from astropy import units
import operator
class Program(object):
def __init__(self, runid="16BP06", pi_login="gladman"):
self.config = {"runid": runid,
"pi_login": pi_login,
"program_configuration": {"mjdates": [],
"observing_blocks": [],
"observing_groups": []
}}
def add_target(self, target):
self.config["program_configuration"]["mjdates"].append(target)
def add_observing_block(self, observing_block):
self.config["program_configuration"]["observing_blocks"].append(observing_block)
def add_observing_group(self, observing_group):
self.config["program_configuration"]["observing_groups"].append(observing_group)
class Target(object):
def __init__(self, filename=None):
self.config = json.load(open(filename))
@property
def token(self):
return self.config["identifier"]["client_token"]
@property
def mag(self):
return self.config["moving_target"]["ephemeris_points"][0]["mag"]
@property
def coordinate(self):
return SkyCoord(self.config["moving_target"]["ephemeris_points"][0]["coordinate"]["ra"],
self.config["moving_target"]["ephemeris_points"][0]["coordinate"]["dec"],
unit='degree')
class ObservingBlock(object):
def __init__(self, client_token, target_token):
self.config = {"identifier": {"client_token": client_token},
"target_identifier": {"client_token": target_token},
"constraint_identifiers": [{"server_token": "C1"}],
"instrument_config_identifiers": [{"server_token": "I1"}]}
@property
def token(self):
return self.config["identifier"]["client_token"]
class ObservingGroup(object):
def __init__(self, client_token):
self.config = {"identifier": {"client_token": client_token},
"observing_block_identifiers": []}
def add_ob(self, client_token):
self.config["observing_block_identifiers"].append({"client_token": client_token})
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('ogname')
parser.add_argument('mjdates', nargs='+')
args = parser.parse_args()
# Break the mjdates into OBs based on their max mag of source in pointing.
cuts = numpy.array([23.0, 23.5, 24.0, 24.5, 25.0, 25.5, 26.0, 30.0])
IC_exptimes = [50, 100, 200, 300, 400, 500, 600, 700]
program = Program()
ob_tokens = []
mags = {}
ob_coordinate = {}
for filename in args.mjdates:
target = Target(filename)
program.add_target(target.config)
ob_token = "OB-{}-{}".format(target.token, target.mag)
ob = ObservingBlock(ob_token, target.token)
idx = (target.mag > cuts).sum() + 4
ob.config["instrument_config_identifiers"] = [{"server_token": "I{}".format(idx)}]
program.add_observing_block(ob.config)
ob_tokens.append(ob_token)
mags[ob_token] = target.mag
ob_coordinate[ob_token] = target.coordinate
sf = lambda x, y: cmp(x.ra, y.ra)
order_tokens = sorted(ob_coordinate, cmp=sf, key=ob_coordinate.get)
total_itime = 0
ogs = {}
scheduled = {}
og_idx = 0
while len(scheduled) < len(ob_tokens):
og_idx += 1
og_token = "OG_{}_{}_{}".format(args.ogname, og_idx, 0)
sys.stdout.write("{}: ".format(og_token))
og = ObservingGroup(og_token)
og_coord = None
og_itime = 0
for ob_token in order_tokens:
if ob_token not in scheduled:
if og_coord is None:
og_coord = ob_coordinate[ob_token]
if ob_coordinate[ob_token].separation(og_coord) > 30 * units.degree:
continue
og.add_ob(ob_token)
scheduled[ob_token] = True
sys.stdout.write("{} ".format(ob_token))
sys.stdout.flush()
idx = (mags[ob_token] > cuts).sum()
print ob_token, mags[ob_token], idx + 4
og_itime += IC_exptimes[idx] + 40
if og_itime > 3000.0:
break
break
total_itime += og_itime
sys.stdout.write(" {}s \n".format(og_itime))
program.add_observing_group(og.config)
nrepeats = 0
for repeat in range(nrepeats):
total_itime += og_itime
og_token = "OG_{}_{}_{}".format(args.ogname, og_idx, repeat + 1)
og = copy.deepcopy(og)
og.config["identifier"]["client_token"] = og_token
program.add_observing_group(og.config)
print "Total I-Time: {} hrs".format(total_itime/3600.)
json.dump(program.config, open('program.json', 'w'), indent=4, sort_keys=True)
| CFIS-Octarine/octarine | planning/ph2.py | Python | gpl-3.0 | 5,099 | 0.001961 |
# -*- encoding: utf-8 -*-
"""
Tests for django.core.servers.
"""
from __future__ import unicode_literals
import os
import socket
from django.core.exceptions import ImproperlyConfigured
from django.test import LiveServerTestCase
from django.test.utils import override_settings
from django.utils.http import urlencode
from django.utils.six.moves.urllib.error import HTTPError
from django.utils.six.moves.urllib.request import urlopen
from django.utils._os import upath
from .models import Person
TEST_ROOT = os.path.dirname(upath(__file__))
TEST_SETTINGS = {
'MEDIA_URL': '/media/',
'MEDIA_ROOT': os.path.join(TEST_ROOT, 'media'),
'STATIC_URL': '/static/',
'STATIC_ROOT': os.path.join(TEST_ROOT, 'static'),
}
class LiveServerBase(LiveServerTestCase):
available_apps = [
'servers',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
]
fixtures = ['testdata.json']
urls = 'servers.urls'
@classmethod
def setUpClass(cls):
# Override settings
cls.settings_override = override_settings(**TEST_SETTINGS)
cls.settings_override.enable()
super(LiveServerBase, cls).setUpClass()
@classmethod
def tearDownClass(cls):
# Restore original settings
cls.settings_override.disable()
super(LiveServerBase, cls).tearDownClass()
def urlopen(self, url):
return urlopen(self.live_server_url + url)
class LiveServerAddress(LiveServerBase):
"""
Ensure that the address set in the environment variable is valid.
Refs #2879.
"""
@classmethod
def setUpClass(cls):
# Backup original environment variable
address_predefined = 'DJANGO_LIVE_TEST_SERVER_ADDRESS' in os.environ
old_address = os.environ.get('DJANGO_LIVE_TEST_SERVER_ADDRESS')
# Just the host is not accepted
cls.raises_exception('localhost', ImproperlyConfigured)
# The host must be valid
cls.raises_exception('blahblahblah:8081', socket.error)
# The list of ports must be in a valid format
cls.raises_exception('localhost:8081,', ImproperlyConfigured)
cls.raises_exception('localhost:8081,blah', ImproperlyConfigured)
cls.raises_exception('localhost:8081-', ImproperlyConfigured)
cls.raises_exception('localhost:8081-blah', ImproperlyConfigured)
cls.raises_exception('localhost:8081-8082-8083', ImproperlyConfigured)
# If contrib.staticfiles isn't configured properly, the exception
# should bubble up to the main thread.
old_STATIC_URL = TEST_SETTINGS['STATIC_URL']
TEST_SETTINGS['STATIC_URL'] = None
cls.raises_exception('localhost:8081', ImproperlyConfigured)
TEST_SETTINGS['STATIC_URL'] = old_STATIC_URL
# Restore original environment variable
if address_predefined:
os.environ['DJANGO_LIVE_TEST_SERVER_ADDRESS'] = old_address
else:
del os.environ['DJANGO_LIVE_TEST_SERVER_ADDRESS']
@classmethod
def tearDownClass(cls):
# skip it, as setUpClass doesn't call its parent either
pass
@classmethod
def raises_exception(cls, address, exception):
os.environ['DJANGO_LIVE_TEST_SERVER_ADDRESS'] = address
try:
super(LiveServerAddress, cls).setUpClass()
raise Exception("The line above should have raised an exception")
except exception:
pass
finally:
super(LiveServerAddress, cls).tearDownClass()
def test_test_test(self):
# Intentionally empty method so that the test is picked up by the
# test runner and the overriden setUpClass() method is executed.
pass
class LiveServerViews(LiveServerBase):
def test_404(self):
"""
Ensure that the LiveServerTestCase serves 404s.
Refs #2879.
"""
try:
self.urlopen('/')
except HTTPError as err:
self.assertEqual(err.code, 404, 'Expected 404 response')
else:
self.fail('Expected 404 response')
def test_view(self):
"""
Ensure that the LiveServerTestCase serves views.
Refs #2879.
"""
f = self.urlopen('/example_view/')
self.assertEqual(f.read(), b'example view')
def test_static_files(self):
"""
Ensure that the LiveServerTestCase serves static files.
Refs #2879.
"""
f = self.urlopen('/static/example_static_file.txt')
self.assertEqual(f.read().rstrip(b'\r\n'), b'example static file')
def test_media_files(self):
"""
Ensure that the LiveServerTestCase serves media files.
Refs #2879.
"""
f = self.urlopen('/media/example_media_file.txt')
self.assertEqual(f.read().rstrip(b'\r\n'), b'example media file')
def test_environ(self):
f = self.urlopen('/environ_view/?%s' % urlencode({'q': 'тест'}))
self.assertIn(b"QUERY_STRING: 'q=%D1%82%D0%B5%D1%81%D1%82'", f.read())
class LiveServerDatabase(LiveServerBase):
def test_fixtures_loaded(self):
"""
Ensure that fixtures are properly loaded and visible to the
live server thread.
Refs #2879.
"""
f = self.urlopen('/model_view/')
self.assertEqual(f.read().splitlines(), [b'jane', b'robert'])
def test_database_writes(self):
"""
Ensure that data written to the database by a view can be read.
Refs #2879.
"""
self.urlopen('/create_model_instance/')
self.assertQuerysetEqual(
Person.objects.all().order_by('pk'),
['jane', 'robert', 'emily'],
lambda b: b.name
)
| atruberg/django-custom | tests/servers/tests.py | Python | bsd-3-clause | 5,773 | 0.000173 |
# -*- coding: utf-8 -*-
"""
Created on Tue Dec 9 11:10:56 2014
@author: MasterMac
"""
sec = int(raw_input())
hour = sec / 60**2
minute = sec / 60 % 60
second = sec % 60
print hour, minute, second
| hacpai/show-me-the-code | Python/0002/main.py | Python | gpl-2.0 | 202 | 0.00495 |
from src.discrete import Environment
from src.queue import Queue
from random import expovariate, seed
from tabulate import tabulate
from math import e
class MD1Queue(Queue):
"""MD1 Queue discrete time simulator"""
def __init__(self, lamb, mu, env):
super().__init__(env)
self.lamb = lamb
self.mu = mu
@property
def arrival_rate(self):
return expovariate(self.lamb)
@property
def service_rate(self):
return 1/self.mu
def expected_dist(self, i):
if i == 0:
return 1 - 1/self.lamb
l = 1/self.lamb
return (1 - l) * (e**(i*l) - 1)
if __name__ == "__main__":
seed(122)
lamb = 130
mu = 200
N = int(1e5)
QueueSim = Environment(verbosity=True)
Q = MD1Queue(lamb, mu, QueueSim)
Q.generate_arrival_events(N)
QueueSim.run()
avg_wait = 0
avg_service = 0
for pkt in Q.all_pkts:
avg_wait += pkt[2] - pkt[1]
avg_service += pkt[0]
avg_wait = avg_wait/N
avg_service = avg_service/N
rho = lamb/mu
expected_wait = rho/(2*mu*(1-rho))
expected_overall = 1/mu + rho/(2*mu*(1-rho))
expected_in_sys = rho + .5 * rho**2 / (1 - rho)
tbl = [["Average wait time:", avg_wait],
["Average service time:", avg_wait],
["Overall time:", avg_service + avg_wait],
["Predicted wait time:", expected_wait],
["Wait time error:", 0],
["Predicted overall time:", expected_overall],
["Overall time error:", (avg_service + avg_wait - expected_overall) / expected_overall]]
print(tabulate(tbl, floatfmt=".3f", tablefmt="fancy_grid"))
Q.print_distribution()
print("Num expected in sys = {}".format(expected_in_sys))
| maxwellgerber/ee122_final_proj | md1.py | Python | mit | 1,637 | 0.020159 |
import asposebarcodecloud
from asposebarcodecloud.BarcodeApi import BarcodeApi
from asposebarcodecloud.BarcodeApi import ApiException
from asposebarcodecloud.models import BarcodeReader
import asposestoragecloud
from asposestoragecloud.StorageApi import StorageApi
from asposestoragecloud.StorageApi import ResponseMessage
import ConfigParser
config = ConfigParser.ConfigParser()
config.readfp(open(r'../../data/config.properties'))
apiKey = config.get('AppConfig', 'api_key')
appSid = config.get('AppConfig', 'app_sid')
out_folder = config.get('AppConfig', 'out_folder')
data_folder = "../../data/" #resouece data folder
#ExStart:1
#Instantiate Aspose Storage API SDK
storage_apiClient = asposestoragecloud.ApiClient.ApiClient(apiKey, appSid, True)
storageApi = StorageApi(storage_apiClient)
#Instantiate Aspose Barcode API SDK
api_client = asposebarcodecloud.ApiClient.ApiClient(apiKey, appSid, True)
barcodeApi = BarcodeApi(api_client);
#Set input file name
name = "sample-barcode.jpeg"
body = BarcodeReader.BarcodeReader()
#Set if FNC symbol stripping should be performed
body.StripFNC = True
#Set mode for checksum validation during recognition
body.ChecksumValidation = "ON"
#Set special mode of barcode binarization
body.BinarizationHints = "ComplexBackground"
try:
#upload file to aspose cloud storage
storageApi.PutCreate(Path=name, file=data_folder + name)
#invoke Aspose.BarCode Cloud SDK API to recognition of a barcode from file on server with parameters in body
response = barcodeApi.PutBarcodeRecognizeFromBody(name, body)
if response.Status == "OK":
barcodes = response.Barcodes
for barcode in barcodes:
print "Type :: " + barcode.BarcodeType
print "Codetext :: " + barcode.BarcodeValue
except ApiException as ex:
print "ApiException:"
print "Code:" + str(ex.code)
print "Message:" + ex.message
#ExEnd:1 | farooqsheikhpk/Aspose.BarCode-for-Cloud | Examples/Python/managing-recognition/without-cloud-storage/read-barcodes-with-checksum.py | Python | mit | 2,040 | 0.012745 |
"""Home Assistant command line scripts."""
from __future__ import annotations
import argparse
import asyncio
import importlib
import logging
import os
import sys
from typing import Sequence
from homeassistant import runner
from homeassistant.bootstrap import async_mount_local_lib_path
from homeassistant.config import get_default_config_dir
from homeassistant.requirements import pip_kwargs
from homeassistant.util.package import install_package, is_installed, is_virtual_env
# mypy: allow-untyped-defs, no-warn-return-any
def run(args: list) -> int:
"""Run a script."""
scripts = []
path = os.path.dirname(__file__)
for fil in os.listdir(path):
if fil == "__pycache__":
continue
if os.path.isdir(os.path.join(path, fil)):
scripts.append(fil)
elif fil != "__init__.py" and fil.endswith(".py"):
scripts.append(fil[:-3])
if not args:
print("Please specify a script to run.")
print("Available scripts:", ", ".join(scripts))
return 1
if args[0] not in scripts:
print("Invalid script specified.")
print("Available scripts:", ", ".join(scripts))
return 1
script = importlib.import_module(f"homeassistant.scripts.{args[0]}")
config_dir = extract_config_dir()
loop = asyncio.get_event_loop()
if not is_virtual_env():
loop.run_until_complete(async_mount_local_lib_path(config_dir))
_pip_kwargs = pip_kwargs(config_dir)
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
for req in getattr(script, "REQUIREMENTS", []):
if is_installed(req):
continue
if not install_package(req, **_pip_kwargs):
print("Aborting script, could not install dependency", req)
return 1
asyncio.set_event_loop_policy(runner.HassEventLoopPolicy(False))
return script.run(args[1:]) # type: ignore
def extract_config_dir(args: Sequence[str] | None = None) -> str:
"""Extract the config dir from the arguments or get the default."""
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument("-c", "--config", default=None)
parsed_args = parser.parse_known_args(args)[0]
return (
os.path.join(os.getcwd(), parsed_args.config)
if parsed_args.config
else get_default_config_dir()
)
| w1ll1am23/home-assistant | homeassistant/scripts/__init__.py | Python | apache-2.0 | 2,358 | 0.000424 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-28 15:30
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('clubs', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='club',
name='time',
),
migrations.AddField(
model_name='club',
name='year',
field=models.IntegerField(default=2017),
preserve_default=False,
),
]
| waymao/SU-System | clubs/migrations/0002_auto_20170828_2330.py | Python | gpl-3.0 | 569 | 0 |
#
# DUT Connection classes
#
# Copyright (C) 2013 Curt Brune <[email protected]>
#
# SPDX-License-Identifier: GPL-2.0
'''
Defines a Connection object that the test fixture uses to communicate
with a DUT.
'''
#-------------------------------------------------------------------------------
#
# Imports
#
try:
import sys
import os
import re
import io
import logging
import pexpect
except ImportError, e:
raise ImportError (str(e) + "- required module not found")
class Connection(object):
'''
Base connection class
'''
proto = None
def __init__(self, dut):
self._dut = dut
self._child = None
def open(self, prompt=""):
'''
Open the DUT communication channel.
prompt -- default CLI prompt to synchronize to
'''
self._prompt = prompt
to = int(self._dut.get_config('timeout'))
logging.info("Opening connection: " + self.command)
self._child = pexpect.spawn(self.command, timeout=to)
logging.info("Logging console output: " + self._dut.args.console_log.name)
self._child.logfile = self._dut.args.console_log
self._login()
def _login(self):
'''
After open() _login() is called to provide any required chat.
'''
pass
def close(self):
'''
Close the DUT communication channel
'''
self._child.close(force=True)
def expect(self, pattern, timeout=-1):
'''
Monitor DUT communication channel, looking for a pattern.
pattern -- pattern to look for
timeout -- how many seconds to wait for pattern to show up. -1 is connection default.
'''
try:
self._child.expect(pattern, timeout=timeout)
except pexpect.EOF, e:
logging.critical("pexpect received EOF while expecting: " + pattern)
raise
except pexpect.TIMEOUT, e:
if timeout != -1:
to = timeout
else:
to = self._child.timeout
logging.critical("pexpect received TIMEOUT (%d secs) while expecting: %s" %
(to, pattern))
raise
logging.debug("before text: %s" % (self._child.before))
logging.debug("match text: %s" % (self._child.match))
logging.debug("after text: %s" % (self._child.after))
return self._child.before
def send(self, line, timeout=-1):
'''
Send line to DUT and wait for DUT prompt.
line -- string to send to DUT. A newline is added automatically.
timeout -- how many seconds to wait for prompt. -1 is connection default.
'''
self._child.sendline(line)
try:
output = self.expect(self.prompt, timeout)
except pexpect.EOF, e:
logging.critical("pexpect received EOF while sending: " + line)
sys.exit(1)
except pexpect.TIMEOUT, e:
if timeout != -1:
to = timeout
else:
to = self._child.timeout
logging.critical("pexpect received TIMEOUT (%d secs) while sending: >%s<" %
(to, line))
sys.exit(1)
# Return the output, split into lines. Also skip the first
# line as it is just an echo of the command sent.
return output.splitlines()[1:]
def sendline(self, line):
'''
Send line to DUT and return immediately.
line -- string to send to DUT. A newline is added automatically.
'''
self._child.sendline(line)
@property
def prompt(self):
'''
Return the current prompt pattern.
'''
return self._prompt
@prompt.setter
def prompt(self, pattern):
'''
Set the prompt to wait for when issuing CLI commands.
pattern -- The pattern representing the prompt.
'''
old_prompt = self._prompt
self._prompt = pattern
class SimpleTelnetConnection(Connection):
'''
Simple telnet connection that does not require a password.
'''
proto = "telnet-simple"
def __init__(self, dut):
server = dut.get_config('telnet_server')
port = dut.get_config('telnet_port')
self.command = "/usr/bin/telnet %s %s" % (server, port)
Connection.__init__(self, dut)
class AuthTelnetConnection(SimpleTelnetConnection):
'''
Authenticated telnet connection that requires a username and
password.
'''
proto = "telnet"
def _login(self):
index = self._child.expect(["login: ", pexpect.EOF, pexpect.TIMEOUT])
if index == 1:
logging.critical("pexect received EOF during telnet login")
sys.exit(1)
elif index == 2:
to = self._child.timeout
logging.critical("received TIMEOUT (%d secs) during telnet login" %
(to))
sys.exit(1)
user = self._dut.get_config('telnet_user')
self._child.sendline(user)
index = self._child.expect(["Password: ", pexpect.EOF, pexpect.TIMEOUT])
if index == 1:
logging.critical("pexect received EOF during telnet password")
sys.exit(1)
elif index == 2:
to = self._child.timeout
logging.critical("received TIMEOUT (%d secs) during telnet password" %
(to))
sys.exit(1)
pw = self._dut.get_config('telnet_pass')
self._child.sendline(pw)
class SSHConnection(Connection):
'''
Authenticated SSH connection that requires a username and password.
'''
proto = "ssh"
def __init__(self, dut):
server = dut.get_config('ssh_server')
port = dut.get_config('ssh_port')
user = dut.get_config('ssh_user')
self.command = "/usr/bin/ssh -p %s %s@%s" % (port, user, server)
Connection.__init__(self, dut)
def _login(self):
index = self._child.expect(["Password: ", pexpect.EOF, pexpect.TIMEOUT])
if index == 1:
logging.critical("pexect received EOF during ssh login")
sys.exit(1)
elif index == 2:
to = self._child.timeout
logging.critical("pexect received TIMEOUT (%d secs) during ssh login" %
(to))
sys.exit(1)
pw = self._dut.get_config('ssh_pass')
self._child.sendline(pw)
#
# Registry of available Connection classes
#
connection_protos = (SimpleTelnetConnection,
AuthTelnetConnection,
SSHConnection,)
class NoSuchConnection(RuntimeError):
pass
def find_connection(proto):
for c in connection_protos:
if c.proto == proto:
return c
raise NoSuchConnection('Connection proto not found: %s' % (proto))
| abaumhauer/onie | test/lib/connection.py | Python | gpl-2.0 | 6,914 | 0.003182 |
"""
This file is part of pyS5p
https://github.com/rmvanhees/pys5p.git
Performs unit-tests on S5Pplot methods: draw_signal, draw_quality,
draw_cmp_images, draw_trend1d and draw_lines (xarray version)
Copyright (c) 2020-2021 SRON - Netherlands Institute for Space Research
All Rights Reserved
License: BSD-3-Clause
"""
from datetime import datetime, timedelta
import numpy as np
import xarray as xr
from pys5p.lib.plotlib import FIGinfo
from pys5p.s5p_xarray import data_to_xr
from pys5p.s5p_plot import S5Pplot
def get_test_data(data_sel=None, xy_min=-5, xy_max=5, delta=0.01, error=0):
"""
Generate synthetic data to simulate a square-detector image
"""
if data_sel is None:
data_sel = [(), ()]
res = np.arange(xy_min, xy_max, delta)
xmesh, ymesh = np.meshgrid(res[data_sel[1]], res[data_sel[0]])
zz1 = np.exp(-xmesh ** 2 - ymesh ** 2)
zz2 = np.exp(-(xmesh - 1) ** 2 - (ymesh - 1) ** 2)
data = (zz1 - zz2) * 2
data += np.random.default_rng().normal(0., error, data.shape)
return data_to_xr(data, long_name='bogus data', units='Volt')
def run_draw_signal(plot):
"""
Run unit tests on S5Pplot::draw_signal
"""
msm = get_test_data(error=.1)
# msm_ref = get_test_data(error=0.025)
nlines = 40 # 35
fig_info_in = FIGinfo('right')
for ii in range(nlines):
fig_info_in.add(f'line {ii:02d}', 5 * '0123456789')
plot.draw_signal(msm,
title='Unit test of S5Pplot [draw_signal]',
sub_title='method=data; aspect=1; fig_pos=above')
plot.draw_signal(msm, zscale='diff',
title='Unit test of S5Pplot [draw_signal]',
sub_title='method=diff; aspect=1; fig_pos=above')
plot.draw_signal(msm, zscale='ratio',
title='Unit test of S5Pplot [draw_signal]',
sub_title='method=ratio; aspect=1; fig_pos=above')
#plot.draw_signal(msm, zscale='log',
# title='Unit test of S5Pplot [draw_signal]',
# sub_title='method=error; aspect=1; fig_pos=above')
plot.draw_signal(msm, fig_info=fig_info_in.copy(),
title='Unit test of S5Pplot [draw_signal]',
sub_title='aspect=1; fig_pos=right')
msm = get_test_data(data_sel=np.s_[500-250:500+250, :], error=.1)
plot.draw_signal(msm,
title='Unit test of S5Pplot [draw_signal]',
sub_title='aspect=2; fig_pos=above')
plot.draw_signal(msm, fig_info=fig_info_in.copy(),
title='Unit test of S5Pplot [draw_signal]',
sub_title='aspect=2; fig_pos=right')
msm = get_test_data(data_sel=np.s_[500-125:500+125, 500-375:500+375],
error=.1)
plot.draw_signal(msm,
title='Unit test of S5Pplot [draw_signal]',
sub_title='aspect=3; fig_pos=above')
plot.draw_signal(msm,
fig_info=fig_info_in.copy(),
title='Unit test of S5Pplot [draw_signal]',
sub_title='aspect=3; fig_pos=right')
msm = get_test_data(data_sel=np.s_[500-128:500+128, :], error=.1)
plot.draw_signal(msm,
title='Unit test of S5Pplot [draw_signal]',
sub_title='aspect=4; fig_pos=above')
plot.draw_signal(msm, fig_info=fig_info_in.copy(),
title='Unit test of S5Pplot [draw_signal]',
sub_title='aspect=4; fig_pos=right')
def run_draw_quality(plot):
"""
Run unit tests on S5Pplot::draw_quality
"""
print('Run unit tests on S5Pplot::draw_quality')
row = np.linspace(0, 1., 1000)
ref_data = np.repeat(row[None, :], 256, axis=0)
data = ref_data.copy()
data[125:175, 30:50] = 0.4
data[125:175, 50:70] = 0.95
data[75:125, 250:300] = 0.05
data[125:175, 600:650] = 0.95
data[75:125, 900:925] = 0.05
data[75:125, 825:875] = 0.4
plot.draw_quality(data,
title='Unit test of S5Pplot [draw_quality]',
sub_title='no reference')
plot.draw_quality(data, ref_data=ref_data,
title='Unit test of S5Pplot [draw_quality]',
sub_title='with reference')
def run_draw_cmp_swir(plot):
"""
Run unit tests on S5Pplot::draw_cmp_swir
"""
print('Run unit tests on S5Pplot::draw_cmp_swir')
msm = get_test_data(data_sel=np.s_[500-128:500+128, :], error=.1)
msm_ref = get_test_data(data_sel=np.s_[500-128:500+128, :], error=.025)
plot.draw_cmp_swir(msm, msm_ref.values,
title='Unit test of S5Pplot [draw_cmp_images]',
sub_title='test image')
plot.draw_cmp_swir(msm, msm_ref.values, add_residual=False,
title='Unit test of S5Pplot [draw_cmp_images]',
sub_title='test image')
plot.draw_cmp_swir(msm, msm_ref.values, add_model=False,
title='Unit test of S5Pplot [draw_cmp_images]',
sub_title='test image')
def run_draw_trend1d(plot):
"""
Run unit tests on S5Pplot::draw_trend1d
"""
print('Run unit tests on S5Pplot::draw_trend1d')
xx = np.arange(200) / 100
hk_params = [
('detector_temp', 'SWIR detector temperature', 'K', np.float32),
('grating_temp', 'SWIR grating temperature', 'K', np.float32),
('obm_temp', 'SWIR OBM temperature', 'K', np.float32)]
hk_dtype = np.dtype(
[(parm[0], parm[3]) for parm in hk_params])
hk_min = np.zeros(200, dtype=hk_dtype)
hk_avg = np.zeros(200, dtype=hk_dtype)
hk_max = np.zeros(200, dtype=hk_dtype)
data = 140. + (100 - np.arange(200)) / 1000
hk_min['detector_temp'][:] = data - .0125
hk_avg['detector_temp'][:] = data
hk_max['detector_temp'][:] = data + .0075
data = 202.1 + (100 - np.arange(200)) / 1000
hk_min['grating_temp'][:] = data - .15
hk_avg['grating_temp'][:] = data
hk_max['grating_temp'][:] = data + .175
data = 208.2 + (100 - np.arange(200)) / 1000
hk_min['obm_temp'][:] = data - .15
hk_avg['obm_temp'][:] = data
hk_max['obm_temp'][:] = data + .175
units = [parm[2] for parm in hk_params]
long_name = [parm[1] for parm in hk_params]
msm_mean = data_to_xr(hk_avg, dims=['orbit'], name='hk_mean',
long_name=long_name, units=units)
msm_range = data_to_xr(np.stack([hk_min, hk_max], axis=1),
dims=['orbit', 'range'], name='hk_range',
long_name=long_name, units=units)
hk_ds = xr.merge([msm_mean, msm_range])
msm1 = data_to_xr(np.sin(xx * np.pi), dims=['orbit'])
msm2 = data_to_xr(np.cos(xx * np.pi), dims=['orbit'])
plot.draw_trend1d(msm1,
title='Unit test of S5Pplot [draw_trend1d]',
sub_title='one dataset, no house-keeping')
plot.draw_trend1d(msm1, msm2=msm2,
title='Unit test of S5Pplot [draw_trend1d]',
sub_title='two datasets, no house-keeping')
hk_keys = [parm[0] for parm in hk_params]
plot.draw_trend1d(msm1, msm2=msm2,
hk_data=hk_ds, hk_keys=hk_keys[0:2],
title='Unit test of S5Pplot [draw_trend1d]',
sub_title='two datasets and house-keeping')
plot.draw_trend1d(msm1, msm2=msm2,
hk_data=hk_ds, hk_keys=hk_keys,
title='Unit test of S5Pplot [draw_trend1d]',
sub_title='two datasets and house-keeping')
def run_draw_lines(plot):
"""
Run unit tests on S5Pplot::draw_lines
"""
print('Run unit tests on S5Pplot::draw_lines')
xx = np.arange(200) / 100
plot.draw_lines(xx, np.sin(xx * np.pi), color=0,
label='sinus', marker='o', linestyle='-')
plot.draw_lines(xx, np.cos(xx * np.pi), color=1,
label='cosinus', marker='o', linestyle='-')
plot.draw_lines(None, None, ylim=[-1.05, 1.05],
xlabel='x-axis [Pi]', ylabel='y-axis',
title='Unit test of S5Pplot [draw_lines]',
sub_title='draw_lines [no time_axis]')
xx = np.arange(500) / 100
plot.draw_lines(xx, np.sin(xx * np.pi), color=0,
label='sinus', marker='o', linestyle='-')
plot.draw_lines(xx, np.cos(xx * np.pi), color=1,
label='cosinus', marker='o', linestyle='-')
plot.draw_lines(None, None, ylim=[-1.05, 1.05],
xlabel='x-axis [Pi]', ylabel='y-axis',
title='Unit test of S5Pplot [draw_lines]',
sub_title='draw_lines [no time_axis]')
customdate = datetime(2016, 1, 1, 13, 0, 0)
yy = [2, 4, 6, 8, 10, 12, 14, 16, 18, 20]
xx = [customdate + timedelta(hours=i, minutes=4*i)
for i in range(len(yy))]
plot.draw_lines(xx, yy, color=0, label='mydata',
marker='o', linestyle='-')
plot.draw_lines(None, None, title='Unit test of S5Pplot [draw_lines]',
xlabel='x-axis', ylabel='y-axis',
sub_title='draw_lines [time_axis]')
def run_draw_qhist(plot):
"""
Run unit tests on S5Pplot::draw_qhist
"""
print('Run unit tests on S5Pplot::draw_qhist')
buff0 = np.repeat(0.9 + np.random.rand(1000) / 10, 56)
buff1 = np.repeat(np.random.rand(1000) / 10, 10)
buff2 = 0.1 + np.random.rand(1000) / 10
buff3 = np.repeat(0.2 + np.random.rand(1000) / 10, 2)
buff4 = np.repeat(0.3 + np.random.rand(1000) / 10, 3)
buff5 = np.repeat(0.4 + np.random.rand(1000) / 10, 4)
buff6 = np.repeat(0.5 + np.random.rand(1000) / 10, 8)
buff7 = np.repeat(0.6 + np.random.rand(1000) / 10, 12)
buff8 = np.repeat(0.7 + np.random.rand(1000) / 10, 20)
buff9 = np.repeat(0.8 + np.random.rand(1000) / 10, 40)
buffa = np.repeat(0.9 + np.random.rand(1000) / 10, 100)
frame = np.concatenate((buff0, buff1, buff2, buff3, buff4,
buff5, buff6, buff7, buff8, buff9,
buffa)).reshape(256, 1000)
msm = xr.merge([data_to_xr(frame, name='dpqm',
long_name='pixel-quality map'),
data_to_xr(frame, name='dpqm_dark',
long_name='pixel-quality map (dark)'),
data_to_xr(frame, name='dpqm_noise',
long_name='pixel-quality map (noise average)'),
data_to_xr(frame, name='dpqm_noise_var',
long_name='pixel-quality map (noise variance)')])
plot.draw_qhist(msm, title='Unit test of S5Pplot [draw_qhist]')
# --------------------------------------------------
def main():
"""
main function
"""
plot = S5Pplot('unit_test_s5p_plot2.pdf')
check_draw_signal = True
check_draw_cmp_images = True
check_draw_quality = True
check_draw_qhist = True
check_draw_trend1d = True
check_draw_lines = True
# ---------- UNIT TEST: draw_signal ----------
if check_draw_signal:
run_draw_signal(plot)
# ---------- UNIT TEST: draw_cmp_images ----------
if check_draw_cmp_images:
run_draw_cmp_swir(plot)
# ---------- UNIT TEST: draw_quality ----------
if check_draw_quality:
run_draw_quality(plot)
# ---------- UNIT TEST: draw_qhist ----------
if check_draw_qhist:
run_draw_qhist(plot)
# ---------- UNIT TEST: draw_trend1d ----------
if check_draw_trend1d:
run_draw_trend1d(plot)
# ---------- UNIT TEST: draw_lines ----------
if check_draw_lines:
run_draw_lines(plot)
# close figure
plot.close()
# - main code --------------------------------------
if __name__ == '__main__':
main()
| rmvanhees/pys5p | examples/unit_test_s5p_plot.py | Python | bsd-3-clause | 11,888 | 0.000252 |
#! /usr/bin/env python2
#-*- coding:utf-8 -*-
from builtins import range
import unittest
import logging
from miasm.analysis.machine import Machine
import miasm.os_dep.win_api_x86_32 as winapi
from miasm.os_dep.win_api_x86_32 import get_win_str_a, get_win_str_w
from miasm.core.utils import pck32
from miasm.jitter.csts import PAGE_READ, PAGE_WRITE
from miasm.core.locationdb import LocationDB
machine = Machine("x86_32")
loc_db = LocationDB()
jit = machine.jitter(loc_db)
jit.init_stack()
heap = winapi.winobjs.heap
class TestWinAPI(unittest.TestCase):
def test_DebuggingFunctions(self):
# BOOL WINAPI IsDebuggerPresent(void);
jit.push_uint32_t(0) # @return
winapi.kernel32_IsDebuggerPresent(jit)
vBool = jit.cpu.EAX
self.assertFalse(vBool)
def test_msvcrt_sprintf(self):
def alloc_str(s):
s += b"\x00"
ptr = heap.alloc(jit, len(s))
jit.vm.set_mem(ptr, s)
return ptr
fmt = alloc_str(b"'%s' %d")
str_ = alloc_str(b"coucou")
buf = heap.alloc(jit,1024)
jit.push_uint32_t(1111)
jit.push_uint32_t(str_)
jit.push_uint32_t(fmt)
jit.push_uint32_t(buf)
jit.push_uint32_t(0) # ret_ad
winapi.msvcrt_sprintf(jit)
ret = get_win_str_a(jit, buf)
self.assertEqual(ret, "'coucou' 1111")
def test_msvcrt_swprintf(self):
def alloc_str(s):
s = s.encode("utf-16le")
s += b"\x00\x00"
ptr = heap.alloc(jit, len(s))
jit.vm.set_mem(ptr, s)
return ptr
fmt = alloc_str("'%s' %d")
str_ = alloc_str("coucou")
buf = heap.alloc(jit,1024)
jit.push_uint32_t(1111)
jit.push_uint32_t(str_)
jit.push_uint32_t(fmt)
jit.push_uint32_t(buf)
jit.push_uint32_t(0) # ret_ad
winapi.msvcrt_swprintf(jit)
ret = get_win_str_w(jit, buf)
self.assertEqual(ret, u"'coucou' 1111")
def test_msvcrt_realloc(self):
jit.push_uint32_t(10)
jit.push_uint32_t(0) # ret_ad
winapi.msvcrt_malloc(jit)
ptr = jit.cpu.EAX
jit.push_uint32_t(20)
jit.push_uint32_t(ptr)
jit.push_uint32_t(0) # ret_ad
winapi.msvcrt_realloc(jit)
ptr2 = jit.cpu.EAX
self.assertNotEqual(ptr, ptr2)
self.assertEqual(heap.get_size(jit.vm,ptr2), 20)
def test_GetCurrentDirectory(self):
# DWORD WINAPI GetCurrentDirectory(size, buf)
# Test with a buffer long enough
addr = 0x80000
size = len(winapi.winobjs.cur_dir)+1
jit.vm.add_memory_page(addr, PAGE_READ | PAGE_WRITE, b"\x00" * (size), "")
jit.push_uint32_t(addr) # buf
jit.push_uint32_t(size) # size
jit.push_uint32_t(0) # @return
winapi.kernel32_GetCurrentDirectoryA(jit)
dir_ = get_win_str_a(jit, addr)
size_ret = jit.cpu.EAX
self.assertEqual(len(dir_), size_ret)
# Test with a buffer too small
jit.vm.set_mem(addr, b"\xFF"*size)
jit.push_uint32_t(addr) # buf
jit.push_uint32_t(5) # size
jit.push_uint32_t(0) # @return
winapi.kernel32_GetCurrentDirectoryA(jit)
size_ret = jit.cpu.EAX
self.assertEqual(len(dir_)+1, size_ret)
dir_short = get_win_str_a(jit, addr)
self.assertEqual(dir_short, dir_[:4])
def test_MemoryManagementFunctions(self):
# HGLOBAL WINAPI GlobalAlloc(_In_ UINT uFlags, _In_ SIZE_T dwBytes);
jit.push_uint32_t(10) # dwBytes
jit.push_uint32_t(0) # uFlags
jit.push_uint32_t(0) # @return
winapi.kernel32_GlobalAlloc(jit)
hMem = jit.cpu.EAX
self.assertTrue(hMem)
# HGLOBAL WINAPI GlobalFree(_In_ HGLOBAL hMem);
jit.push_uint32_t(hMem) # hMem
jit.push_uint32_t(0) # @return
winapi.kernel32_GlobalFree(jit)
hMem = jit.cpu.EAX
self.assertFalse(hMem)
# LPVOID WINAPI HeapAlloc(_In_ HANDLE hHeap, _In_ DWORD dwFlags, _In_ SIZE_T dwBytes);
jit.push_uint32_t(10) # dwBytes
jit.push_uint32_t(0) # dwFlags
jit.push_uint32_t(0) # hHeap
jit.push_uint32_t(0) # @return
winapi.kernel32_HeapAlloc(jit)
lpMem = jit.cpu.EAX
self.assertTrue(lpMem)
# BOOL WINAPI HeapFree(_In_ HANDLE hHeap, _In_ DWORD dwFlags, _In_ LPVOID lpMem);
jit.push_uint32_t(lpMem) # lpMem
jit.push_uint32_t(0) # dwFlags
jit.push_uint32_t(0) # hHeap
jit.push_uint32_t(0) # @return
winapi.kernel32_HeapFree(jit)
vBool = jit.cpu.EAX
self.assertTrue(vBool)
# HLOCAL WINAPI LocalAlloc(_In_ UINT uFlags, _In_ SIZE_T uBytes);
jit.push_uint32_t(10) # uBytes
jit.push_uint32_t(0) # uFlags
jit.push_uint32_t(0) # @return
winapi.kernel32_LocalAlloc(jit)
hMem = jit.cpu.EAX
self.assertTrue(hMem)
# HLOCAL WINAPI LocalFree(_In_ HLOCAL hMem);
jit.push_uint32_t(hMem) # hMem
jit.push_uint32_t(0) # @return
winapi.kernel32_LocalFree(jit)
hMem = jit.cpu.EAX
self.assertFalse(hMem)
def test_ProcessAndThreadFunctions(self):
# HANDLE WINAPI GetCurrentProcess(void);
jit.push_uint32_t(0) # @return
winapi.kernel32_GetCurrentProcess(jit)
hProc = jit.cpu.EAX
self.assertTrue(hProc)
# DWORD WINAPI GetCurrentProcessId(void);
jit.push_uint32_t(0) # @return
winapi.kernel32_GetCurrentProcessId(jit)
dwProc = jit.cpu.EAX
self.assertTrue(dwProc)
def test_SystemInformationFunctions(self):
# DWORD WINAPI GetVersion(void);
jit.push_uint32_t(0) # @return
winapi.kernel32_GetVersion(jit)
dwVer = jit.cpu.EAX
self.assertTrue(dwVer)
# BOOL WINAPI GetVersionEx(_Inout_ LPOSVERSIONINFO lpVersionInfo);
jit.vm.set_mem(jit.stack_base, pck32(0x9c))
jit.push_uint32_t(jit.stack_base) # lpVersionInfo
jit.push_uint32_t(0) # @return
winapi.kernel32_GetVersionExA(jit)
vBool = jit.cpu.EAX
self.assertTrue(vBool)
def test_TimeFunctions(self):
# DWORD WINAPI GetTickCount(void);
jit.push_uint32_t(0) # @return
winapi.kernel32_GetTickCount(jit)
dwTime = jit.cpu.EAX
self.assertTrue(dwTime)
def test_ToolHelpFunctions(self):
# HANDLE WINAPI CreateToolhelp32Snapshot(_In_ DWORD dwFlags, _In_ DWORD th32ProcessID);
jit.push_uint32_t(0) # th32ProcessID
jit.push_uint32_t(0) # dwFlags
jit.push_uint32_t(0) # @return
winapi.kernel32_CreateToolhelp32Snapshot(jit)
hSnap = jit.cpu.EAX
self.assertTrue(hSnap)
# BOOL WINAPI Process32First(_In_ HANDLE hSnapshot, _Inout_ LPPROCESSENTRY32 lppe);
jit.push_uint32_t(jit.stack_base) # lppe
jit.push_uint32_t(hSnap) # hSnapshot
jit.push_uint32_t(0) # @return
winapi.kernel32_Process32First(jit)
vBool = jit.cpu.EAX
self.assertTrue(vBool)
# BOOL WINAPI Process32Next(_In_ HANDLE hSnapshot, _Out_ LPPROCESSENTRY32 lppe);
for i in range(3, -1, -1):
jit.push_uint32_t(jit.stack_base) # lppe
jit.push_uint32_t(hSnap) # hSnapshot
jit.push_uint32_t(0) # @return
winapi.kernel32_Process32Next(jit)
vBool = jit.cpu.EAX
if i: self.assertTrue(vBool)
else: self.assertFalse(vBool)
def test_VirtualXXFunctions(self):
def call_vprotect(jitter, addr, size, protect):
jitter.push_uint32_t(0x0)
jitter.push_uint32_t(protect)
jitter.push_uint32_t(size)
jitter.push_uint32_t(addr)
jitter.push_uint32_t(0)
winapi.kernel32_VirtualProtect(jitter)
jit.push_uint32_t(0x2)
jit.push_uint32_t(0x2)
jit.push_uint32_t(0x4000)
jit.push_uint32_t(0x1000)
jit.push_uint32_t(0)
winapi.kernel32_VirtualAlloc(jit)
alloc_addr = jit.cpu.EAX
self.assertEqual(jit.vm.get_all_memory()[alloc_addr]["size"], 0x4000)
self.assertEqual(jit.vm.get_all_memory()[alloc_addr]["access"],
winapi.ACCESS_DICT[0x2])
# Full area
call_vprotect(jit, alloc_addr, 0x4000, 0x1)
self.assertEqual(jit.vm.get_all_memory()[alloc_addr]["access"],
winapi.ACCESS_DICT[0x1])
# Splits area [0--1000] [1000 -- 3000] [3000 -- 4000]
call_vprotect(jit, alloc_addr+0x1000, 0x2000, 0x40)
print(jit.vm)
for (addr, size, access) in [
(alloc_addr, 0x1000, 0x1),
(alloc_addr + 0x1000, 0x2000, 0x40),
(alloc_addr + 0x3000, 0x1000, 0x1)
]:
self.assertEqual(jit.vm.get_all_memory()[addr]["size"], size)
self.assertEqual(jit.vm.get_all_memory()[addr]["access"],
winapi.ACCESS_DICT[access])
# Protect over split areas
call_vprotect(jit, alloc_addr, 0x4000, 0x4)
for (addr, size) in [
(alloc_addr, 0x1000),
(alloc_addr + 0x1000, 0x2000),
(alloc_addr + 0x3000, 0x1000)
]:
self.assertEqual(jit.vm.get_all_memory()[addr]["size"], size)
self.assertEqual(jit.vm.get_all_memory()[addr]["access"],
winapi.ACCESS_DICT[0x4])
if __name__ == '__main__':
testsuite = unittest.TestLoader().loadTestsFromTestCase(TestWinAPI)
report = unittest.TextTestRunner(verbosity=2).run(testsuite)
exit(len(report.errors + report.failures))
| serpilliere/miasm | test/os_dep/win_api_x86_32.py | Python | gpl-2.0 | 9,960 | 0.002309 |
# TODO: this must be a stub!
| vg/netsukuku | pyntk/ntk/sim/network/route.py | Python | gpl-2.0 | 29 | 0 |
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
from ansible.compat.tests.mock import patch
from ansible.modules.network.nxos import nxos_feature
from .nxos_module import TestNxosModule, load_fixture, set_module_args
class TestNxosFeatureModule(TestNxosModule):
module = nxos_feature
def setUp(self):
self.mock_run_commands = patch('ansible.modules.network.nxos.nxos_feature.run_commands')
self.run_commands = self.mock_run_commands.start()
self.mock_load_config = patch('ansible.modules.network.nxos.nxos_feature.load_config')
self.load_config = self.mock_load_config.start()
def tearDown(self):
self.mock_run_commands.stop()
self.mock_load_config.stop()
def load_fixtures(self, commands=None, device=''):
def load_from_file(*args, **kwargs):
module, commands = args
output = list()
for item in commands:
try:
obj = json.loads(item['command'])
command = obj['command']
except ValueError:
command = item['command']
filename = '%s.txt' % str(command).replace(' ', '_')
output.append(load_fixture('nxos_feature', filename))
return output
self.run_commands.side_effect = load_from_file
self.load_config.return_value = None
def test_nxos_feature_enable(self):
set_module_args(dict(feature='nve', state='enabled'))
result = self.execute_module(changed=True)
self.assertEqual(result['commands'], ['feature nv overlay'])
def test_nxos_feature_disable(self):
set_module_args(dict(feature='ospf', state='disabled'))
result = self.execute_module(changed=True)
self.assertEqual(result['commands'], ['no feature ospf'])
| qrkourier/ansible | test/units/modules/network/nxos/test_nxos_feature.py | Python | gpl-3.0 | 2,591 | 0.000772 |
# Copyright (C) 2013 Adam Stokes <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
from sos.plugins import Plugin, UbuntuPlugin
class Maas(Plugin, UbuntuPlugin):
"""Ubuntu Metal-As-A-Service
"""
plugin_name = 'maas'
profiles = ('sysmgmt',)
option_list = [
('profile-name',
'The name with which you will later refer to this remote', '', False),
('url', 'The URL of the remote API', '', False),
('credentials',
'The credentials, also known as the API key', '', False)
]
def _has_login_options(self):
return self.get_option("url") and self.get_option("credentials") \
and self.get_option("profile-name")
def _remote_api_login(self):
ret = self.call_ext_prog("maas login %s %s %s" % (
self.get_option("profile-name"),
self.get_option("url"),
self.get_option("credentials")))
return ret['status'] == 0
def setup(self):
self.add_copy_spec([
"/etc/squid-deb-proxy",
"/etc/maas",
"/var/lib/maas/dhcp*",
"/var/log/apache2*",
"/var/log/maas*",
"/var/log/upstart/maas-*",
])
self.add_cmd_output([
"apt-cache policy maas-*",
"apt-cache policy python-django-*",
])
if self.is_installed("maas-region-controller"):
self.add_cmd_output([
"maas-region-admin dumpdata",
])
if self._has_login_options():
if self._remote_api_login():
self.add_cmd_output("maas %s commissioning-results list" %
self.get_option("profile-name"))
else:
self._log_error(
"Cannot login into MAAS remote API with provided creds.")
# vim: set et ts=4 sw=4 :
| pierg75/pier-sosreport | sos/plugins/maas.py | Python | gpl-2.0 | 2,550 | 0 |
#!/usr/bin/env python
__author__ = 'Mike McCann'
__copyright__ = '2013'
__license__ = 'GPL v3'
__contact__ = 'mccann at mbari.org'
__doc__ = '''
Contains class for common routines for loading all BEDS data
Mike McCann
MBARI 13 May 2013
@undocumented: __doc__ parser
@status: production
@license: GPL
'''
import os
import sys
# Insert Django App directory (parent of config) into python path
sys.path.insert(0, os.path.abspath(os.path.join(
os.path.dirname(__file__), "../../")))
os.environ['DJANGO_SETTINGS_MODULE'] = 'config.settings.local'
# django >=1.7
try:
import django
django.setup()
except AttributeError:
pass
import DAPloaders
from loaders import LoadScript
import matplotlib as mpl
mpl.use('Agg') # Force matplotlib to not use any Xwindows backend
import matplotlib.pyplot as plt
from matplotlib.colors import rgb2hex
import numpy as np
class BEDSLoader(LoadScript):
'''
Common routines for loading all BEDS data
'''
num_beds = 9
beds_names = [('bed{:02d}').format(n) for n in range(num_beds+1)]
# See http://matplotlib.org/examples/color/colormaps_reference.html
colors = {}
reds = plt.cm.Reds
for b, c in zip(beds_names, reds(np.arange(0, reds.N, reds.N/num_beds))):
colors[b] = rgb2hex(c)[1:]
# Duplicate color for Trajectory 't' version
colors[b + 't'] = rgb2hex(c)[1:]
def loadBEDS(self, stride=None, featureType='trajectory'):
'''
BEDS specific load functions; featureType can be 'trajectory' or 'timeSeries'.
Use 'trajectory' for events that we've fudged into a trajectory netCDF file
using the canyon's thalweg. Use 'timeSeries' for events for which the BED
does not significantly translate.
'''
stride = stride or self.stride
for (aName, pName, file, plotTimeSeriesDepth, fg) in zip(
[ a.split('/')[-1] + ' (stride=%d)' % stride for a in self.bed_files],
self.bed_platforms, self.bed_files, self.bed_depths, self.bed_framegrabs):
url = os.path.join(self.bed_base, file)
try:
if featureType.lower() == 'trajectory':
# To get timeSeries plotting for trajectories (in the Parameter tab of the UI)
# assign a plotTimeSeriesDepth value of the starting depth in meters.
DAPloaders.runBEDTrajectoryLoader(url, self.campaignName, self.campaignDescription,
aName, pName, self.colors[pName.lower()], 'bed',
'deployment', self.bed_parms, self.dbAlias, stride,
plotTimeSeriesDepth=plotTimeSeriesDepth,
grdTerrain=self.grdTerrain, framegrab=fg)
elif featureType.lower() == 'timeseries':
DAPloaders.runTimeSeriesLoader(url, self.campaignName, self.campaignDescription,
aName, pName, self.colors[pName.lower()], 'bed',
'deployment', self.bed_parms, self.dbAlias, stride)
self.addPlatformResources('https://stoqs.mbari.org/x3d/beds/beds_housing_with_axes_src_scene.x3d',
pName, scalefactor=10)
except (DAPloaders.OpendapError, DAPloaders.InvalidSliceRequest):
pass
if __name__ == '__main__':
'''
Test operation of this class
'''
cl = BEDSLoader('stoqs_beds2013', 'Test BEDS Load')
cl.stride = 1
cl.bed_base = 'http://odss-test.shore.mbari.org/thredds/dodsC/BEDS_2013/beds01/'
cl.bed_files = ['BED00039.nc']
cl.bed_parms = ['XA', 'YA', 'ZA', 'XR', 'YR', 'ZR', 'PRESS', 'BED_DEPTH']
cl.loadBEDS()
| danellecline/stoqs | stoqs/loaders/BEDS/__init__.py | Python | gpl-3.0 | 3,934 | 0.009151 |
from flask.ext.migrate import MigrateCommand
from flask.ext.script import Manager
from flask.ext.script.commands import ShowUrls, Clean
from commands.testcase_command import TestCommand
from flask_learn import create_app
manager = Manager(create_app)
manager.add_option("-c", "--config", dest='config',
help="application config file",
required=False)
manager.add_command("test", TestCommand)
manager.add_command("showurls", ShowUrls())
manager.add_command("clean", Clean())
manager.add_command("db", MigrateCommand)
if __name__ == '__main__':
manager.run()
| KotiyaSenya/FlaskLearn | manage.py | Python | mit | 605 | 0 |
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/component/weapon/shared_reinforcement_core.iff"
result.attribute_template_id = -1
result.stfName("craft_weapon_ingredients_n","reinforcement_core")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | anhstudios/swganh | data/scripts/templates/object/tangible/component/weapon/shared_reinforcement_core.py | Python | mit | 484 | 0.045455 |
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=unused-import,g-bad-import-order
"""Classes and functions for building TensorFlow graphs.
## Core graph data structures
@@Graph
@@Operation
@@Tensor
## Tensor types
@@DType
@@as_dtype
## Utility functions
@@device
@@name_scope
@@control_dependencies
@@convert_to_tensor
@@convert_to_tensor_or_indexed_slices
@@get_default_graph
@@reset_default_graph
@@import_graph_def
@@load_file_system_library
@@load_op_library
## Graph collections
@@add_to_collection
@@get_collection
@@get_collection_ref
@@GraphKeys
## Defining new operations
@@RegisterGradient
@@NoGradient
@@RegisterShape
@@TensorShape
@@Dimension
@@op_scope
@@get_seed
## For libraries building on TensorFlow
@@register_tensor_conversion_function
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Classes used when building a Graph.
from tensorflow.python.framework.device import DeviceSpec
from tensorflow.python.framework.ops import Graph
from tensorflow.python.framework.ops import Operation
from tensorflow.python.framework.ops import Tensor
from tensorflow.python.framework.ops import SparseTensor
from tensorflow.python.framework.ops import SparseTensorValue
from tensorflow.python.framework.ops import IndexedSlices
# Utilities used when building a Graph.
from tensorflow.python.framework.ops import device
from tensorflow.python.framework.ops import name_scope
from tensorflow.python.framework.ops import op_scope
from tensorflow.python.framework.ops import control_dependencies
from tensorflow.python.framework.ops import get_default_graph
from tensorflow.python.framework.ops import reset_default_graph
from tensorflow.python.framework.ops import GraphKeys
from tensorflow.python.framework.ops import add_to_collection
from tensorflow.python.framework.ops import get_collection
from tensorflow.python.framework.ops import get_collection_ref
from tensorflow.python.framework.ops import convert_to_tensor
from tensorflow.python.framework.ops import convert_to_tensor_or_indexed_slices
from tensorflow.python.framework.random_seed import get_seed
from tensorflow.python.framework.random_seed import set_random_seed
from tensorflow.python.framework.importer import import_graph_def
# Needed when you defined a new Op in C++.
from tensorflow.python.framework.ops import RegisterGradient
from tensorflow.python.framework.ops import NoGradient
from tensorflow.python.framework.ops import RegisterShape
from tensorflow.python.framework.tensor_shape import Dimension
from tensorflow.python.framework.tensor_shape import TensorShape
# Needed when interfacing tensorflow to new array libraries
from tensorflow.python.framework.ops import register_tensor_conversion_function
# go/tf-wildcard-import
# pylint: disable=wildcard-import
from tensorflow.python.framework.dtypes import *
# Load a TensorFlow plugin
from tensorflow.python.framework.load_library import *
# pylint: enable=wildcard-import
| peterbraden/tensorflow | tensorflow/python/framework/framework_lib.py | Python | apache-2.0 | 3,634 | 0 |
from . import db
class CustomPlaceholder(db.Model):
"""email notifications model class"""
__tablename__ = 'custom_placeholder'
id = db.Column(db.Integer,
primary_key=True)
name = db.Column(db.String)
url = db.Column(db.String)
thumbnail = db.Column(db.String)
copyright = db.Column(db.String)
origin = db.Column(db.String)
def __init__(self,
name=None,
url=None,
thumbnail=None,
copyright=None,
origin=None):
self.name = name
self.url = url
self.thumbnail = thumbnail
self.copyright = copyright
self.origin = origin
def __str__(self):
return 'Name:' + unicode(self.name).encode('utf-8')
def __unicode__(self):
return unicode(self.id)
@property
def serialize(self):
"""Return object data in easily serializable format"""
return {
'id': self.id,
'name': self.name,
'url': self.url,
'thumbnail': self.thumbnail,
'copyright': self.copyright,
'origin': self.origin
}
| gaeun/open-event-orga-server | app/models/custom_placeholder.py | Python | gpl-3.0 | 1,176 | 0 |
#!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: profitbricks
short_description: Create, destroy, start, stop, and reboot a ProfitBricks virtual machine.
description:
- Create, destroy, update, start, stop, and reboot a ProfitBricks virtual machine. When the virtual machine is created it can optionally wait for it to be 'running' before returning. This module has a dependency on profitbricks >= 1.0.0
version_added: "2.0"
options:
auto_increment:
description:
- Whether or not to increment a single number in the name for created virtual machines.
default: yes
choices: ["yes", "no"]
name:
description:
- The name of the virtual machine.
required: true
image:
description:
- The system image ID for creating the virtual machine, e.g. a3eae284-a2fe-11e4-b187-5f1f641608c8.
required: true
image_password:
description:
- Password set for the administrative user.
required: false
version_added: '2.2'
ssh_keys:
description:
- Public SSH keys allowing access to the virtual machine.
required: false
version_added: '2.2'
datacenter:
description:
- The datacenter to provision this virtual machine.
required: false
default: null
cores:
description:
- The number of CPU cores to allocate to the virtual machine.
required: false
default: 2
ram:
description:
- The amount of memory to allocate to the virtual machine.
required: false
default: 2048
cpu_family:
description:
- The CPU family type to allocate to the virtual machine.
required: false
default: AMD_OPTERON
choices: [ "AMD_OPTERON", "INTEL_XEON" ]
version_added: '2.2'
volume_size:
description:
- The size in GB of the boot volume.
required: false
default: 10
bus:
description:
- The bus type for the volume.
required: false
default: VIRTIO
choices: [ "IDE", "VIRTIO"]
instance_ids:
description:
- list of instance ids, currently only used when state='absent' to remove instances.
required: false
count:
description:
- The number of virtual machines to create.
required: false
default: 1
location:
description:
- The datacenter location. Use only if you want to create the Datacenter or else this value is ignored.
required: false
default: us/las
choices: [ "us/las", "de/fra", "de/fkb" ]
assign_public_ip:
description:
- This will assign the machine to the public LAN. If no LAN exists with public Internet access it is created.
required: false
default: false
lan:
description:
- The ID of the LAN you wish to add the servers to.
required: false
default: 1
subscription_user:
description:
- The ProfitBricks username. Overrides the PB_SUBSCRIPTION_ID environement variable.
required: false
default: null
subscription_password:
description:
- THe ProfitBricks password. Overrides the PB_PASSWORD environement variable.
required: false
default: null
wait:
description:
- wait for the instance to be in state 'running' before returning
required: false
default: "yes"
choices: [ "yes", "no" ]
wait_timeout:
description:
- how long before wait gives up, in seconds
default: 600
remove_boot_volume:
description:
- remove the bootVolume of the virtual machine you're destroying.
required: false
default: "yes"
choices: ["yes", "no"]
state:
description:
- create or terminate instances
required: false
default: 'present'
choices: [ "running", "stopped", "absent", "present" ]
requirements:
- "profitbricks"
- "python >= 2.6"
author: Matt Baldwin ([email protected])
'''
EXAMPLES = '''
# Note: These examples do not set authentication details, see the AWS Guide for details.
# Provisioning example. This will create three servers and enumerate their names.
- profitbricks:
datacenter: Tardis One
name: web%02d.stackpointcloud.com
cores: 4
ram: 2048
volume_size: 50
cpu_family: INTEL_XEON
image: a3eae284-a2fe-11e4-b187-5f1f641608c8
location: us/las
count: 3
assign_public_ip: true
# Removing Virtual machines
- profitbricks:
datacenter: Tardis One
instance_ids:
- 'web001.stackpointcloud.com'
- 'web002.stackpointcloud.com'
- 'web003.stackpointcloud.com'
wait_timeout: 500
state: absent
# Starting Virtual Machines.
- profitbricks:
datacenter: Tardis One
instance_ids:
- 'web001.stackpointcloud.com'
- 'web002.stackpointcloud.com'
- 'web003.stackpointcloud.com'
wait_timeout: 500
state: running
# Stopping Virtual Machines
- profitbricks:
datacenter: Tardis One
instance_ids:
- 'web001.stackpointcloud.com'
- 'web002.stackpointcloud.com'
- 'web003.stackpointcloud.com'
wait_timeout: 500
state: stopped
'''
import re
import uuid
import time
HAS_PB_SDK = True
try:
from profitbricks.client import ProfitBricksService, Volume, Server, Datacenter, NIC, LAN
except ImportError:
HAS_PB_SDK = False
LOCATIONS = ['us/las',
'de/fra',
'de/fkb']
uuid_match = re.compile(
'[\w]{8}-[\w]{4}-[\w]{4}-[\w]{4}-[\w]{12}', re.I)
def _wait_for_completion(profitbricks, promise, wait_timeout, msg):
if not promise: return
wait_timeout = time.time() + wait_timeout
while wait_timeout > time.time():
time.sleep(5)
operation_result = profitbricks.get_request(
request_id=promise['requestId'],
status=True)
if operation_result['metadata']['status'] == "DONE":
return
elif operation_result['metadata']['status'] == "FAILED":
raise Exception(
'Request failed to complete ' + msg + ' "' + str(
promise['requestId']) + '" to complete.')
raise Exception(
'Timed out waiting for async operation ' + msg + ' "' + str(
promise['requestId']
) + '" to complete.')
def _create_machine(module, profitbricks, datacenter, name):
cores = module.params.get('cores')
ram = module.params.get('ram')
cpu_family = module.params.get('cpu_family')
volume_size = module.params.get('volume_size')
disk_type = module.params.get('disk_type')
image_password = module.params.get('image_password')
ssh_keys = module.params.get('ssh_keys')
bus = module.params.get('bus')
lan = module.params.get('lan')
assign_public_ip = module.params.get('assign_public_ip')
subscription_user = module.params.get('subscription_user')
subscription_password = module.params.get('subscription_password')
location = module.params.get('location')
image = module.params.get('image')
assign_public_ip = module.boolean(module.params.get('assign_public_ip'))
wait = module.params.get('wait')
wait_timeout = module.params.get('wait_timeout')
if assign_public_ip:
public_found = False
lans = profitbricks.list_lans(datacenter)
for lan in lans['items']:
if lan['properties']['public']:
public_found = True
lan = lan['id']
if not public_found:
i = LAN(
name='public',
public=True)
lan_response = profitbricks.create_lan(datacenter, i)
_wait_for_completion(profitbricks, lan_response,
wait_timeout, "_create_machine")
lan = lan_response['id']
v = Volume(
name=str(uuid.uuid4()).replace('-', '')[:10],
size=volume_size,
image=image,
image_password=image_password,
ssh_keys=ssh_keys,
disk_type=disk_type,
bus=bus)
n = NIC(
lan=int(lan)
)
s = Server(
name=name,
ram=ram,
cores=cores,
cpu_family=cpu_family,
create_volumes=[v],
nics=[n],
)
try:
create_server_response = profitbricks.create_server(
datacenter_id=datacenter, server=s)
_wait_for_completion(profitbricks, create_server_response,
wait_timeout, "create_virtual_machine")
server_response = profitbricks.get_server(
datacenter_id=datacenter,
server_id=create_server_response['id'],
depth=3
)
except Exception as e:
module.fail_json(msg="failed to create the new server: %s" % str(e))
else:
return server_response
def _startstop_machine(module, profitbricks, datacenter_id, server_id):
state = module.params.get('state')
try:
if state == 'running':
profitbricks.start_server(datacenter_id, server_id)
else:
profitbricks.stop_server(datacenter_id, server_id)
return True
except Exception as e:
module.fail_json(msg="failed to start or stop the virtual machine %s: %s" % (name, str(e)))
def _create_datacenter(module, profitbricks):
datacenter = module.params.get('datacenter')
location = module.params.get('location')
wait_timeout = module.params.get('wait_timeout')
i = Datacenter(
name=datacenter,
location=location
)
try:
datacenter_response = profitbricks.create_datacenter(datacenter=i)
_wait_for_completion(profitbricks, datacenter_response,
wait_timeout, "_create_datacenter")
return datacenter_response
except Exception as e:
module.fail_json(msg="failed to create the new server(s): %s" % str(e))
def create_virtual_machine(module, profitbricks):
"""
Create new virtual machine
module : AnsibleModule object
profitbricks: authenticated profitbricks object
Returns:
True if a new virtual machine was created, false otherwise
"""
datacenter = module.params.get('datacenter')
name = module.params.get('name')
auto_increment = module.params.get('auto_increment')
count = module.params.get('count')
lan = module.params.get('lan')
wait_timeout = module.params.get('wait_timeout')
failed = True
datacenter_found = False
virtual_machines = []
virtual_machine_ids = []
# Locate UUID for datacenter if referenced by name.
datacenter_list = profitbricks.list_datacenters()
datacenter_id = _get_datacenter_id(datacenter_list, datacenter)
if datacenter_id:
datacenter_found = True
if not datacenter_found:
datacenter_response = _create_datacenter(module, profitbricks)
datacenter_id = datacenter_response['id']
_wait_for_completion(profitbricks, datacenter_response,
wait_timeout, "create_virtual_machine")
if auto_increment:
numbers = set()
count_offset = 1
try:
name % 0
except TypeError, e:
if e.message.startswith('not all'):
name = '%s%%d' % name
else:
module.fail_json(msg=e.message)
number_range = xrange(count_offset, count_offset + count + len(numbers))
available_numbers = list(set(number_range).difference(numbers))
names = []
numbers_to_use = available_numbers[:count]
for number in numbers_to_use:
names.append(name % number)
else:
names = [name]
# Prefetch a list of servers for later comparison.
server_list = profitbricks.list_servers(datacenter_id)
for name in names:
# Skip server creation if the server already exists.
if _get_server_id(server_list, name):
continue
create_response = _create_machine(module, profitbricks, str(datacenter_id), name)
nics = profitbricks.list_nics(datacenter_id, create_response['id'])
for n in nics['items']:
if lan == n['properties']['lan']:
create_response.update({'public_ip': n['properties']['ips'][0]})
virtual_machines.append(create_response)
failed = False
results = {
'failed': failed,
'machines': virtual_machines,
'action': 'create',
'instance_ids': {
'instances': [i['id'] for i in virtual_machines],
}
}
return results
def remove_virtual_machine(module, profitbricks):
"""
Removes a virtual machine.
This will remove the virtual machine along with the bootVolume.
module : AnsibleModule object
profitbricks: authenticated profitbricks object.
Not yet supported: handle deletion of attached data disks.
Returns:
True if a new virtual server was deleted, false otherwise
"""
datacenter = module.params.get('datacenter')
instance_ids = module.params.get('instance_ids')
remove_boot_volume = module.params.get('remove_boot_volume')
changed = False
if not isinstance(module.params.get('instance_ids'), list) or len(module.params.get('instance_ids')) < 1:
module.fail_json(msg='instance_ids should be a list of virtual machine ids or names, aborting')
# Locate UUID for datacenter if referenced by name.
datacenter_list = profitbricks.list_datacenters()
datacenter_id = _get_datacenter_id(datacenter_list, datacenter)
if not datacenter_id:
module.fail_json(msg='Virtual data center \'%s\' not found.' % str(datacenter))
# Prefetch server list for later comparison.
server_list = profitbricks.list_servers(datacenter_id)
for instance in instance_ids:
# Locate UUID for server if referenced by name.
server_id = _get_server_id(server_list, instance)
if server_id:
# Remove the server's boot volume
if remove_boot_volume:
_remove_boot_volume(module, profitbricks, datacenter_id, server_id)
# Remove the server
try:
server_response = profitbricks.delete_server(datacenter_id, server_id)
except Exception as e:
module.fail_json(msg="failed to terminate the virtual server: %s" % str(e))
else:
changed = True
return changed
def _remove_boot_volume(module, profitbricks, datacenter_id, server_id):
"""
Remove the boot volume from the server
"""
try:
server = profitbricks.get_server(datacenter_id, server_id)
volume_id = server['properties']['bootVolume']['id']
volume_response = profitbricks.delete_volume(datacenter_id, volume_id)
except Exception as e:
module.fail_json(msg="failed to remove the server's boot volume: %s" % str(e))
def startstop_machine(module, profitbricks, state):
"""
Starts or Stops a virtual machine.
module : AnsibleModule object
profitbricks: authenticated profitbricks object.
Returns:
True when the servers process the action successfully, false otherwise.
"""
if not isinstance(module.params.get('instance_ids'), list) or len(module.params.get('instance_ids')) < 1:
module.fail_json(msg='instance_ids should be a list of virtual machine ids or names, aborting')
wait = module.params.get('wait')
wait_timeout = module.params.get('wait_timeout')
changed = False
datacenter = module.params.get('datacenter')
instance_ids = module.params.get('instance_ids')
# Locate UUID for datacenter if referenced by name.
datacenter_list = profitbricks.list_datacenters()
datacenter_id = _get_datacenter_id(datacenter_list, datacenter)
if not datacenter_id:
module.fail_json(msg='Virtual data center \'%s\' not found.' % str(datacenter))
# Prefetch server list for later comparison.
server_list = profitbricks.list_servers(datacenter_id)
for instance in instance_ids:
# Locate UUID of server if referenced by name.
server_id = _get_server_id(server_list, instance)
if server_id:
_startstop_machine(module, profitbricks, datacenter_id, server_id)
changed = True
if wait:
wait_timeout = time.time() + wait_timeout
while wait_timeout > time.time():
matched_instances = []
for res in profitbricks.list_servers(datacenter_id)['items']:
if state == 'running':
if res['properties']['vmState'].lower() == state:
matched_instances.append(res)
elif state == 'stopped':
if res['properties']['vmState'].lower() == 'shutoff':
matched_instances.append(res)
if len(matched_instances) < len(instance_ids):
time.sleep(5)
else:
break
if wait_timeout <= time.time():
# waiting took too long
module.fail_json(msg="wait for virtual machine state timeout on %s" % time.asctime())
return (changed)
def _get_datacenter_id(datacenters, identity):
"""
Fetch and return datacenter UUID by datacenter name if found.
"""
for datacenter in datacenters['items']:
if identity in (datacenter['properties']['name'], datacenter['id']):
return datacenter['id']
return None
def _get_server_id(servers, identity):
"""
Fetch and return server UUID by server name if found.
"""
for server in servers['items']:
if identity in (server['properties']['name'], server['id']):
return server['id']
return None
def main():
module = AnsibleModule(
argument_spec=dict(
datacenter=dict(),
name=dict(),
image=dict(),
cores=dict(type='int', default=2),
ram=dict(type='int', default=2048),
cpu_family=dict(choices=['AMD_OPTERON', 'INTEL_XEON'],
default='AMD_OPTERON'),
volume_size=dict(type='int', default=10),
disk_type=dict(choices=['HDD', 'SSD'], default='HDD'),
image_password=dict(default=None),
ssh_keys=dict(type='list', default=[]),
bus=dict(choices=['VIRTIO', 'IDE'], default='VIRTIO'),
lan=dict(type='int', default=1),
count=dict(type='int', default=1),
auto_increment=dict(type='bool', default=True),
instance_ids=dict(type='list', default=[]),
subscription_user=dict(),
subscription_password=dict(),
location=dict(choices=LOCATIONS, default='us/las'),
assign_public_ip=dict(type='bool', default=False),
wait=dict(type='bool', default=True),
wait_timeout=dict(type='int', default=600),
remove_boot_volume=dict(type='bool', default=True),
state=dict(default='present'),
)
)
if not HAS_PB_SDK:
module.fail_json(msg='profitbricks required for this module')
subscription_user = module.params.get('subscription_user')
subscription_password = module.params.get('subscription_password')
wait = module.params.get('wait')
wait_timeout = module.params.get('wait_timeout')
profitbricks = ProfitBricksService(
username=subscription_user,
password=subscription_password)
state = module.params.get('state')
if state == 'absent':
if not module.params.get('datacenter'):
module.fail_json(msg='datacenter parameter is required ' +
'for running or stopping machines.')
try:
(changed) = remove_virtual_machine(module, profitbricks)
module.exit_json(changed=changed)
except Exception as e:
module.fail_json(msg='failed to set instance state: %s' % str(e))
elif state in ('running', 'stopped'):
if not module.params.get('datacenter'):
module.fail_json(msg='datacenter parameter is required for ' +
'running or stopping machines.')
try:
(changed) = startstop_machine(module, profitbricks, state)
module.exit_json(changed=changed)
except Exception as e:
module.fail_json(msg='failed to set instance state: %s' % str(e))
elif state == 'present':
if not module.params.get('name'):
module.fail_json(msg='name parameter is required for new instance')
if not module.params.get('image'):
module.fail_json(msg='image parameter is required for new instance')
if not module.params.get('subscription_user'):
module.fail_json(msg='subscription_user parameter is ' +
'required for new instance')
if not module.params.get('subscription_password'):
module.fail_json(msg='subscription_password parameter is ' +
'required for new instance')
try:
(machine_dict_array) = create_virtual_machine(module, profitbricks)
module.exit_json(**machine_dict_array)
except Exception as e:
module.fail_json(msg='failed to set instance state: %s' % str(e))
from ansible.module_utils.basic import *
main()
| haad/ansible-modules-extras | cloud/profitbricks/profitbricks.py | Python | gpl-3.0 | 21,799 | 0.001973 |
# -*- coding:utf-8 -*-
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
import time
import scrapy
from Global_function import get_localtime, print_new_number, save_messages
now_time = get_localtime(time.strftime("%Y-%m-%d", time.localtime()))
# now_time = 20170420
class USTC005_Spider(scrapy.Spider):
name = 'USTC005'
start_urls = ['http://www.pmo.cas.cn/gs/gk/xsbg/']
domain = 'http://www.pmo.cas.cn/'
counts = 0
def parse(self, response):
messages = response.xpath("//div[@class='list']/ul/li")
print_new_number(self.counts, 'USTC', self.name)
for i in xrange(len(messages)):
if u'青年论坛' in messages[i].xpath(".//a/text()").extract()[0]:
report_url = messages[i].xpath(".//a/@href").extract()[0]
else:
report_url = self.domain + messages[i].xpath(".//a/@href").extract()[0][9:]
if 'Colloquium' in report_url:
continue
report_time = get_localtime('20' + messages[i].xpath(".//span/text()").extract()[0].strip('[]'))
if report_time < now_time:
return
yield scrapy.Request(report_url, callback=self.parse_pages, meta={'link': report_url, 'number': i + 1})
def parse_pages(self, response):
messages = response.xpath("//div[@class='TRS_Editor']").xpath(".//p")
sign = 0
title, speaker, time, address, content, person_introduce = '', '', '', '', '', ''
for message in messages:
text = self.get_text(message)
if u'欢迎大家' in text or u'联系人' in text or u'紫金山天文台学术委员会' in text:
continue
elif u'题目:' in text or 'Title:' in text or u'题目:' in text or 'Title:' in text:
title = self.connect_message(text, ':') if u'题目:' in text or 'Title:' in text else self.connect_message(text, ':')
elif u'报告人:' in text or 'Speaker:' in text or u'主讲人:' in text or u'报告人:' in text or 'Speaker:' in text or u'主讲人:' in text:
speaker = self.connect_message(text, ':') if u'报告人:' in text or 'Speaker:' in text or u'主讲人:' in text else self.connect_message(text, ':')
elif u'时间:' in text or 'Time:' in text or u'时间:' in text or 'Time:' in text:
time = self.connect_message(text, ':') if u'时间:' in text or 'Time:' in text else self.connect_message(text, ':')
elif u'地点:' in text or 'Address:' in text or u'地点:' in text or 'Address:' in text:
address = self.connect_message(text, ':') if u'地点:' in text or 'Address:' in text else self.connect_message(text, ':')
elif u'简介:' in text or 'Bio:' in text or u'简介:' in text or 'Bio:' in text:
sign = 1
person_introduce = self.connect_message(text, ':') if u'简介:' in text or 'Bio:' in text else self.connect_message(text, ':')
elif u'摘要:' in text or 'Abstract:' in text or u'摘要:' in text or 'Abstract:' in text:
sign = 2
content = self.connect_message(text, ':') if u'摘要:' in text or 'Abstract:' in text else self.connect_message(text, ':')
else:
if sign == 1:
person_introduce += text.strip()
elif sign == 2:
content += text.strip()
if title != '':
self.counts += 1
print_new_number(self.counts, 'USTC', self.name)
all_messages = save_messages('USTC', self.name, title, time, address, speaker, person_introduce,
content, '', response.meta['link'], response.meta['number'], u'中国科学技术大学', u'天文与空间科学学院')
return all_messages
def get_text(self, message):
text = ''
for each in message.xpath(".//text()").extract():
text += each.strip()
return text
def connect_message(self, message, sign):
text = ''
all = message.split(sign)[1:]
for i in xrange(len(all)):
if i > 0:
text += ':'
text += all[i].strip()
return text | hbtech-ai/ARPS | report_spider/report_spider/spiders/USTC005.py | Python | mit | 3,781 | 0.024005 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Unix SMB/CIFS implementation.
# Copyright (C) Kamen Mazdrashki <[email protected]> 2011
# Copyright (C) Andrew Bartlett <[email protected]> 2016
# Copyright (C) Catalyst IT Ltd. 2016
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import print_function
import sys
import time
import os
import ldb
sys.path.insert(0, "bin/python")
import samba.tests
from samba.tests.samba_tool.base import SambaToolCmdTest
from samba import dsdb
from samba.dcerpc import drsuapi, misc, drsblobs, security
from samba.ndr import ndr_unpack, ndr_pack
from samba.drs_utils import drs_DsBind
from samba import gensec
from ldb import (
SCOPE_BASE,
Message,
FLAG_MOD_REPLACE,
)
from samba.common import cmp
from samba.common import get_string
class DrsBaseTestCase(SambaToolCmdTest):
"""Base class implementation for all DRS python tests.
It is intended to provide common initialization and
and functionality used by all DRS tests in drs/python
test package. For instance, DC1 and DC2 are always used
to pass URLs for DCs to test against"""
def setUp(self):
super(DrsBaseTestCase, self).setUp()
creds = self.get_credentials()
creds.set_gensec_features(creds.get_gensec_features() | gensec.FEATURE_SEAL)
# connect to DCs
self.url_dc1 = samba.tests.env_get_var_value("DC1")
(self.ldb_dc1, self.info_dc1) = samba.tests.connect_samdb_ex(self.url_dc1,
ldap_only=True)
self.url_dc2 = samba.tests.env_get_var_value("DC2")
(self.ldb_dc2, self.info_dc2) = samba.tests.connect_samdb_ex(self.url_dc2,
ldap_only=True)
self.test_ldb_dc = self.ldb_dc1
# cache some of RootDSE props
self.schema_dn = str(self.info_dc1["schemaNamingContext"][0])
self.domain_dn = str(self.info_dc1["defaultNamingContext"][0])
self.config_dn = str(self.info_dc1["configurationNamingContext"][0])
self.forest_level = int(self.info_dc1["forestFunctionality"][0])
# we will need DCs DNS names for 'samba-tool drs' command
self.dnsname_dc1 = str(self.info_dc1["dnsHostName"][0])
self.dnsname_dc2 = str(self.info_dc2["dnsHostName"][0])
# for debugging the test code
self._debug = False
def tearDown(self):
super(DrsBaseTestCase, self).tearDown()
def set_test_ldb_dc(self, ldb_dc):
"""Sets which DC's LDB we perform operations on during the test"""
self.test_ldb_dc = ldb_dc
def _GUID_string(self, guid):
return get_string(self.test_ldb_dc.schema_format_value("objectGUID", guid))
def _ldap_schemaUpdateNow(self, sam_db):
rec = {"dn": "",
"schemaUpdateNow": "1"}
m = Message.from_dict(sam_db, rec, FLAG_MOD_REPLACE)
sam_db.modify(m)
def _deleted_objects_dn(self, sam_ldb):
wkdn = "<WKGUID=18E2EA80684F11D2B9AA00C04F79F805,%s>" % self.domain_dn
res = sam_ldb.search(base=wkdn,
scope=SCOPE_BASE,
controls=["show_deleted:1"])
self.assertEqual(len(res), 1)
return str(res[0]["dn"])
def _lost_and_found_dn(self, sam_ldb, nc):
wkdn = "<WKGUID=%s,%s>" % (dsdb.DS_GUID_LOSTANDFOUND_CONTAINER, nc)
res = sam_ldb.search(base=wkdn,
scope=SCOPE_BASE)
self.assertEqual(len(res), 1)
return str(res[0]["dn"])
def _make_obj_name(self, prefix):
return prefix + time.strftime("%s", time.gmtime())
def _samba_tool_cmd_list(self, drs_command):
# make command line credentials string
# If test runs on windows then it can provide its own auth string
if hasattr(self, 'cmdline_auth'):
cmdline_auth = self.cmdline_auth
else:
ccache_name = self.get_creds_ccache_name()
# Tunnel the command line credentials down to the
# subcommand to avoid a new kinit
cmdline_auth = "--krb5-ccache=%s" % ccache_name
# bin/samba-tool drs <drs_command> <cmdline_auth>
return ["drs", drs_command, cmdline_auth]
def _net_drs_replicate(self, DC, fromDC, nc_dn=None, forced=True,
local=False, full_sync=False, single=False):
if nc_dn is None:
nc_dn = self.domain_dn
# make base command line
samba_tool_cmdline = self._samba_tool_cmd_list("replicate")
# bin/samba-tool drs replicate <Dest_DC_NAME> <Src_DC_NAME> <Naming Context>
samba_tool_cmdline += [DC, fromDC, nc_dn]
if forced:
samba_tool_cmdline += ["--sync-forced"]
if local:
samba_tool_cmdline += ["--local"]
if full_sync:
samba_tool_cmdline += ["--full-sync"]
if single:
samba_tool_cmdline += ["--single-object"]
(result, out, err) = self.runsubcmd(*samba_tool_cmdline)
self.assertCmdSuccess(result, out, err)
self.assertEqual(err, "", "Shouldn't be any error messages")
def _enable_inbound_repl(self, DC):
# make base command line
samba_tool_cmd = self._samba_tool_cmd_list("options")
# disable replication
samba_tool_cmd += [DC, "--dsa-option=-DISABLE_INBOUND_REPL"]
(result, out, err) = self.runsubcmd(*samba_tool_cmd)
self.assertCmdSuccess(result, out, err)
self.assertEqual(err, "", "Shouldn't be any error messages")
def _disable_inbound_repl(self, DC):
# make base command line
samba_tool_cmd = self._samba_tool_cmd_list("options")
# disable replication
samba_tool_cmd += [DC, "--dsa-option=+DISABLE_INBOUND_REPL"]
(result, out, err) = self.runsubcmd(*samba_tool_cmd)
self.assertCmdSuccess(result, out, err)
self.assertEqual(err, "", "Shouldn't be any error messages")
def _enable_all_repl(self, DC):
self._enable_inbound_repl(DC)
# make base command line
samba_tool_cmd = self._samba_tool_cmd_list("options")
# enable replication
samba_tool_cmd += [DC, "--dsa-option=-DISABLE_OUTBOUND_REPL"]
(result, out, err) = self.runsubcmd(*samba_tool_cmd)
self.assertCmdSuccess(result, out, err)
self.assertEqual(err, "", "Shouldn't be any error messages")
def _disable_all_repl(self, DC):
self._disable_inbound_repl(DC)
# make base command line
samba_tool_cmd = self._samba_tool_cmd_list("options")
# disable replication
samba_tool_cmd += [DC, "--dsa-option=+DISABLE_OUTBOUND_REPL"]
(result, out, err) = self.runsubcmd(*samba_tool_cmd)
self.assertCmdSuccess(result, out, err)
self.assertEqual(err, "", "Shouldn't be any error messages")
def _get_highest_hwm_utdv(self, ldb_conn):
res = ldb_conn.search("", scope=ldb.SCOPE_BASE, attrs=["highestCommittedUSN"])
hwm = drsuapi.DsReplicaHighWaterMark()
hwm.tmp_highest_usn = int(res[0]["highestCommittedUSN"][0])
hwm.reserved_usn = 0
hwm.highest_usn = hwm.tmp_highest_usn
utdv = drsuapi.DsReplicaCursorCtrEx()
cursors = []
c1 = drsuapi.DsReplicaCursor()
c1.source_dsa_invocation_id = misc.GUID(ldb_conn.get_invocation_id())
c1.highest_usn = hwm.highest_usn
cursors.append(c1)
utdv.count = len(cursors)
utdv.cursors = cursors
return (hwm, utdv)
def _get_identifier(self, ldb_conn, dn):
res = ldb_conn.search(dn, scope=ldb.SCOPE_BASE,
attrs=["objectGUID", "objectSid"])
id = drsuapi.DsReplicaObjectIdentifier()
id.guid = ndr_unpack(misc.GUID, res[0]['objectGUID'][0])
if "objectSid" in res[0]:
id.sid = ndr_unpack(security.dom_sid, res[0]['objectSid'][0])
id.dn = str(res[0].dn)
return id
def _get_ctr6_links(self, ctr6):
"""
Unpacks the linked attributes from a DsGetNCChanges response
and returns them as a list.
"""
ctr6_links = []
for lidx in range(0, ctr6.linked_attributes_count):
l = ctr6.linked_attributes[lidx]
try:
target = ndr_unpack(drsuapi.DsReplicaObjectIdentifier3,
l.value.blob)
except:
target = ndr_unpack(drsuapi.DsReplicaObjectIdentifier3Binary,
l.value.blob)
al = AbstractLink(l.attid, l.flags,
l.identifier.guid,
target.guid, target.dn)
ctr6_links.append(al)
return ctr6_links
def _get_ctr6_object_guids(self, ctr6):
"""Returns all the object GUIDs in a GetNCChanges response"""
guid_list = []
obj = ctr6.first_object
for i in range(0, ctr6.object_count):
guid_list.append(str(obj.object.identifier.guid))
obj = obj.next_object
return guid_list
def _ctr6_debug(self, ctr6):
"""
Displays basic info contained in a DsGetNCChanges response.
Having this debug code allows us to see the difference in behaviour
between Samba and Windows easier. Turn on the self._debug flag to see it.
"""
if self._debug:
print("------------ recvd CTR6 -------------")
next_object = ctr6.first_object
for i in range(0, ctr6.object_count):
print("Obj %d: %s %s" % (i, next_object.object.identifier.dn[:25],
next_object.object.identifier.guid))
next_object = next_object.next_object
print("Linked Attributes: %d" % ctr6.linked_attributes_count)
for lidx in range(0, ctr6.linked_attributes_count):
l = ctr6.linked_attributes[lidx]
try:
target = ndr_unpack(drsuapi.DsReplicaObjectIdentifier3,
l.value.blob)
except:
target = ndr_unpack(drsuapi.DsReplicaObjectIdentifier3Binary,
l.value.blob)
print("Link Tgt %s... <-- Src %s"
% (target.dn[:25], l.identifier.guid))
state = "Del"
if l.flags & drsuapi.DRSUAPI_DS_LINKED_ATTRIBUTE_FLAG_ACTIVE:
state = "Act"
print(" v%u %s changed %u" % (l.meta_data.version, state,
l.meta_data.originating_change_time))
print("HWM: %d" % (ctr6.new_highwatermark.highest_usn))
print("Tmp HWM: %d" % (ctr6.new_highwatermark.tmp_highest_usn))
print("More data: %d" % (ctr6.more_data))
def _get_replication(self, replica_flags,
drs_error=drsuapi.DRSUAPI_EXOP_ERR_NONE, drs=None, drs_handle=None,
highwatermark=None, uptodateness_vector=None,
more_flags=0, max_objects=133, exop=0,
dest_dsa=drsuapi.DRSUAPI_DS_BIND_GUID_W2K3,
source_dsa=None, invocation_id=None, nc_dn_str=None):
"""
Builds a DsGetNCChanges request based on the information provided
and returns the response received from the DC.
"""
if source_dsa is None:
source_dsa = self.test_ldb_dc.get_ntds_GUID()
if invocation_id is None:
invocation_id = self.test_ldb_dc.get_invocation_id()
if nc_dn_str is None:
nc_dn_str = self.test_ldb_dc.domain_dn()
if highwatermark is None:
if self.default_hwm is None:
(highwatermark, _) = self._get_highest_hwm_utdv(self.test_ldb_dc)
else:
highwatermark = self.default_hwm
if drs is None:
drs = self.drs
if drs_handle is None:
drs_handle = self.drs_handle
req10 = self._getnc_req10(dest_dsa=dest_dsa,
invocation_id=invocation_id,
nc_dn_str=nc_dn_str,
exop=exop,
max_objects=max_objects,
replica_flags=replica_flags,
more_flags=more_flags)
req10.highwatermark = highwatermark
if uptodateness_vector is not None:
uptodateness_vector_v1 = drsuapi.DsReplicaCursorCtrEx()
cursors = []
for i in range(0, uptodateness_vector.count):
c = uptodateness_vector.cursors[i]
c1 = drsuapi.DsReplicaCursor()
c1.source_dsa_invocation_id = c.source_dsa_invocation_id
c1.highest_usn = c.highest_usn
cursors.append(c1)
uptodateness_vector_v1.count = len(cursors)
uptodateness_vector_v1.cursors = cursors
req10.uptodateness_vector = uptodateness_vector_v1
(level, ctr) = drs.DsGetNCChanges(drs_handle, 10, req10)
self._ctr6_debug(ctr)
self.assertEqual(level, 6, "expected level 6 response!")
self.assertEqual(ctr.source_dsa_guid, misc.GUID(source_dsa))
self.assertEqual(ctr.source_dsa_invocation_id, misc.GUID(invocation_id))
self.assertEqual(ctr.extended_ret, drs_error)
return ctr
def _check_replication(self, expected_dns, replica_flags, expected_links=[],
drs_error=drsuapi.DRSUAPI_EXOP_ERR_NONE, drs=None, drs_handle=None,
highwatermark=None, uptodateness_vector=None,
more_flags=0, more_data=False,
dn_ordered=True, links_ordered=True,
max_objects=133, exop=0,
dest_dsa=drsuapi.DRSUAPI_DS_BIND_GUID_W2K3,
source_dsa=None, invocation_id=None, nc_dn_str=None,
nc_object_count=0, nc_linked_attributes_count=0):
"""
Makes sure that replication returns the specific error given.
"""
# send a DsGetNCChanges to the DC
ctr6 = self._get_replication(replica_flags,
drs_error, drs, drs_handle,
highwatermark, uptodateness_vector,
more_flags, max_objects, exop, dest_dsa,
source_dsa, invocation_id, nc_dn_str)
# check the response is what we expect
self._check_ctr6(ctr6, expected_dns, expected_links,
nc_object_count=nc_object_count, more_data=more_data,
dn_ordered=dn_ordered)
return (ctr6.new_highwatermark, ctr6.uptodateness_vector)
def _get_ctr6_dn_list(self, ctr6):
"""
Returns the DNs contained in a DsGetNCChanges response.
"""
dn_list = []
next_object = ctr6.first_object
for i in range(0, ctr6.object_count):
dn_list.append(next_object.object.identifier.dn)
next_object = next_object.next_object
self.assertEqual(next_object, None)
return dn_list
def _check_ctr6(self, ctr6, expected_dns=[], expected_links=[],
dn_ordered=True, links_ordered=True,
more_data=False, nc_object_count=0,
nc_linked_attributes_count=0, drs_error=0):
"""
Check that a ctr6 matches the specified parameters.
"""
ctr6_raw_dns = self._get_ctr6_dn_list(ctr6)
# filter out changes to the RID Set objects, as these can happen
# intermittently and mess up the test assertions
ctr6_dns = []
for dn in ctr6_raw_dns:
if "CN=RID Set," in dn or "CN=RID Manager$," in dn:
print("Removing {0} from GetNCChanges reply".format(dn))
else:
ctr6_dns.append(dn)
self.assertEqual(len(ctr6_dns), len(expected_dns),
"Received unexpected objects (%s)" % ctr6_dns)
self.assertEqual(ctr6.object_count, len(ctr6_raw_dns))
self.assertEqual(ctr6.linked_attributes_count, len(expected_links))
self.assertEqual(ctr6.more_data, more_data)
self.assertEqual(ctr6.nc_object_count, nc_object_count)
self.assertEqual(ctr6.nc_linked_attributes_count, nc_linked_attributes_count)
self.assertEqual(ctr6.drs_error[0], drs_error)
i = 0
for dn in expected_dns:
# Expect them back in the exact same order as specified.
if dn_ordered:
self.assertNotEqual(ctr6_dns[i], None)
self.assertEqual(ctr6_dns[i], dn)
i = i + 1
# Don't care what order
else:
self.assertTrue(dn in ctr6_dns, "Couldn't find DN '%s' anywhere in ctr6 response." % dn)
# Extract the links from the response
ctr6_links = self._get_ctr6_links(ctr6)
expected_links.sort()
lidx = 0
for el in expected_links:
if links_ordered:
self.assertEqual(el, ctr6_links[lidx])
lidx += 1
else:
self.assertTrue(el in ctr6_links, "Couldn't find link '%s' anywhere in ctr6 response." % el)
def _exop_req8(self, dest_dsa, invocation_id, nc_dn_str, exop,
replica_flags=0, max_objects=0, partial_attribute_set=None,
partial_attribute_set_ex=None, mapping_ctr=None):
req8 = drsuapi.DsGetNCChangesRequest8()
req8.destination_dsa_guid = misc.GUID(dest_dsa) if dest_dsa else misc.GUID()
req8.source_dsa_invocation_id = misc.GUID(invocation_id)
req8.naming_context = drsuapi.DsReplicaObjectIdentifier()
req8.naming_context.dn = str(nc_dn_str)
req8.highwatermark = drsuapi.DsReplicaHighWaterMark()
req8.highwatermark.tmp_highest_usn = 0
req8.highwatermark.reserved_usn = 0
req8.highwatermark.highest_usn = 0
req8.uptodateness_vector = None
req8.replica_flags = replica_flags
req8.max_object_count = max_objects
req8.max_ndr_size = 402116
req8.extended_op = exop
req8.fsmo_info = 0
req8.partial_attribute_set = partial_attribute_set
req8.partial_attribute_set_ex = partial_attribute_set_ex
if mapping_ctr:
req8.mapping_ctr = mapping_ctr
else:
req8.mapping_ctr.num_mappings = 0
req8.mapping_ctr.mappings = None
return req8
def _getnc_req10(self, dest_dsa, invocation_id, nc_dn_str, exop,
replica_flags=0, max_objects=0, partial_attribute_set=None,
partial_attribute_set_ex=None, mapping_ctr=None,
more_flags=0):
req10 = drsuapi.DsGetNCChangesRequest10()
req10.destination_dsa_guid = misc.GUID(dest_dsa) if dest_dsa else misc.GUID()
req10.source_dsa_invocation_id = misc.GUID(invocation_id)
req10.naming_context = drsuapi.DsReplicaObjectIdentifier()
req10.naming_context.dn = str(nc_dn_str)
req10.highwatermark = drsuapi.DsReplicaHighWaterMark()
req10.highwatermark.tmp_highest_usn = 0
req10.highwatermark.reserved_usn = 0
req10.highwatermark.highest_usn = 0
req10.uptodateness_vector = None
req10.replica_flags = replica_flags
req10.max_object_count = max_objects
req10.max_ndr_size = 402116
req10.extended_op = exop
req10.fsmo_info = 0
req10.partial_attribute_set = partial_attribute_set
req10.partial_attribute_set_ex = partial_attribute_set_ex
if mapping_ctr:
req10.mapping_ctr = mapping_ctr
else:
req10.mapping_ctr.num_mappings = 0
req10.mapping_ctr.mappings = None
req10.more_flags = more_flags
return req10
def _ds_bind(self, server_name, creds=None):
binding_str = "ncacn_ip_tcp:%s[seal]" % server_name
if creds is None:
creds = self.get_credentials()
drs = drsuapi.drsuapi(binding_str, self.get_loadparm(), creds)
(drs_handle, supported_extensions) = drs_DsBind(drs)
return (drs, drs_handle)
def get_partial_attribute_set(self, attids=[drsuapi.DRSUAPI_ATTID_objectClass]):
partial_attribute_set = drsuapi.DsPartialAttributeSet()
partial_attribute_set.attids = attids
partial_attribute_set.num_attids = len(attids)
return partial_attribute_set
class AbstractLink:
def __init__(self, attid, flags, identifier, targetGUID,
targetDN=""):
self.attid = attid
self.flags = flags
self.identifier = str(identifier)
self.selfGUID_blob = ndr_pack(identifier)
self.targetGUID = str(targetGUID)
self.targetGUID_blob = ndr_pack(targetGUID)
self.targetDN = targetDN
def __repr__(self):
return "AbstractLink(0x%08x, 0x%08x, %s, %s)" % (
self.attid, self.flags, self.identifier, self.targetGUID)
def __internal_cmp__(self, other, verbose=False):
"""See CompareLinks() in MS-DRSR section 4.1.10.5.17"""
if not isinstance(other, AbstractLink):
if verbose:
print("AbstractLink.__internal_cmp__(%r, %r) => wrong type" % (self, other))
return NotImplemented
c = cmp(self.selfGUID_blob, other.selfGUID_blob)
if c != 0:
if verbose:
print("AbstractLink.__internal_cmp__(%r, %r) => %d different identifier" % (self, other, c))
return c
c = other.attid - self.attid
if c != 0:
if verbose:
print("AbstractLink.__internal_cmp__(%r, %r) => %d different attid" % (self, other, c))
return c
self_active = self.flags & drsuapi.DRSUAPI_DS_LINKED_ATTRIBUTE_FLAG_ACTIVE
other_active = other.flags & drsuapi.DRSUAPI_DS_LINKED_ATTRIBUTE_FLAG_ACTIVE
c = self_active - other_active
if c != 0:
if verbose:
print("AbstractLink.__internal_cmp__(%r, %r) => %d different FLAG_ACTIVE" % (self, other, c))
return c
c = cmp(self.targetGUID_blob, other.targetGUID_blob)
if c != 0:
if verbose:
print("AbstractLink.__internal_cmp__(%r, %r) => %d different target" % (self, other, c))
return c
c = self.flags - other.flags
if c != 0:
if verbose:
print("AbstractLink.__internal_cmp__(%r, %r) => %d different flags" % (self, other, c))
return c
return 0
def __lt__(self, other):
c = self.__internal_cmp__(other)
if c == NotImplemented:
return NotImplemented
if c < 0:
return True
return False
def __le__(self, other):
c = self.__internal_cmp__(other)
if c == NotImplemented:
return NotImplemented
if c <= 0:
return True
return False
def __eq__(self, other):
c = self.__internal_cmp__(other, verbose=True)
if c == NotImplemented:
return NotImplemented
if c == 0:
return True
return False
def __ne__(self, other):
c = self.__internal_cmp__(other)
if c == NotImplemented:
return NotImplemented
if c != 0:
return True
return False
def __gt__(self, other):
c = self.__internal_cmp__(other)
if c == NotImplemented:
return NotImplemented
if c > 0:
return True
return False
def __ge__(self, other):
c = self.__internal_cmp__(other)
if c == NotImplemented:
return NotImplemented
if c >= 0:
return True
return False
def __hash__(self):
return hash((self.attid, self.flags, self.identifier, self.targetGUID))
| kernevil/samba | source4/torture/drs/python/drs_base.py | Python | gpl-3.0 | 24,898 | 0.001848 |
import codecs
from setuptools import setup
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with codecs.open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='zenbu',
version='1.0.5',
description='Jinja2 + YAML based config templater.',
long_description=long_description,
url='https://github.com/metakirby5/zenbu',
author='Ethan Chan',
author_email='[email protected]',
license='MIT',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'Topic :: Utilities',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
keywords='zenbu config templater jinja2 yaml',
py_modules=['zenbu'],
install_requires=[
'argcomplete',
'colorlog',
'Jinja2',
'PyYAML',
'termcolor',
'watchdog',
],
entry_points={
'console_scripts': [
'zenbu=zenbu:main',
],
},
)
| metakirby5/zenbu | setup.py | Python | mit | 1,288 | 0 |
'''
Copyright (C) 2016 Bastille Networks
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
def i_code (code3):
return code3[0]
def o_code (code3):
if len (code3) >= 2:
return code3[1]
else:
return code3[0]
def tap_code (code3):
if len (code3) >= 3:
return code3[2]
else:
return code3[0]
def i_type (code3):
return char_to_type[i_code (code3)]
def o_type (code3):
return char_to_type[o_code (code3)]
def tap_type (code3):
return char_to_type[tap_code (code3)]
char_to_type = {}
char_to_type['s'] = 'short'
char_to_type['i'] = 'int'
char_to_type['f'] = 'float'
char_to_type['c'] = 'gr_complex'
char_to_type['b'] = 'unsigned char'
| BastilleResearch/gr-nordic | python/build_utils_codes.py | Python | gpl-3.0 | 1,289 | 0.013189 |
"""
HDF5 in memory object
Britton Smith <[email protected]>
"""
import h5py
import numpy as np
import sys
class H5InMemory(object):
def __init__(self, fh):
self.attrs = {}
if fh is None:
self.data = {}
return
if isinstance(fh, str):
fh = h5py.File(fh, "r")
if hasattr(fh, "attrs"):
self.attrs = dict([(attr, fh.attrs[attr]) \
for attr in fh.attrs])
if isinstance(fh, h5py.Dataset):
self.value = fh.value
elif isinstance(fh, np.ndarray):
self.value = fh
else:
self.data = dict([(field, H5InMemory(fh[field])) \
for field in fh.keys()])
if isinstance(fh, h5py.File):
fh.close()
def create_group(self, key):
self.data[key] = H5InMemory(None)
return self.data[key]
def create_dataset(self, key, data=None):
self.data[key] = H5InMemory(data)
return self.data[key]
def __getitem__(self, key):
return self.data[key]
def __setitem__(self, key, value):
self.data[key] = value
def __delitem__(self, key):
del self.data[key]
def __iter__(self):
for key in self.data:
yield key
def iterkeys(self):
for key in self.data:
yield key
def __contains__(self, key):
return key in self.data
def __repr__(self):
if hasattr(self, "value"):
return "<H5InMemory Data object %s>" % \
str(self.value.shape)
else:
return "<H5InMemory Group object (%d items)>" % \
len(self.keys())
def __str__(self):
return self.__repr__()
def __iter__(self):
for field in self.keys():
yield field
def keys(self):
if hasattr(self, "data"):
return self.data.keys()
return None
def save(self, fh):
top = False
if isinstance(fh, str):
top = True
fh = h5py.File(fh, "w")
for attr in self.attrs:
fh.attrs[attr] = self.attrs[attr]
if hasattr(self, "data"):
for field in self:
if hasattr(self.data[field], "data"):
self.data[field].save(fh.create_group(field))
else:
dfh = fh.create_dataset(field,
data=self.data[field].value)
self.data[field].save(dfh)
if top:
fh.close()
| aemerick/galaxy_analysis | physics_data/UVB/grackle_tables/h5_in_memory.py | Python | mit | 2,620 | 0.002672 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# pylint: disable=W0403
# pylint: disable=R0915
# pylint: disable=W0613
# Copyright (c) 2014-2015, Human Brain Project
# Cyrille Favreau <[email protected]>
# Daniel Nachbaur <[email protected]>
#
# This file is part of RenderingResourceManager
# <https://github.com/BlueBrain/RenderingResourceManager>
#
# This library is free software; you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License version 3.0 as published
# by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this library; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
# All rights reserved. Do not distribute without further notice.
"""
The Slurm job manager is in charge of managing slurm jobs.
"""
import subprocess
import requests
import traceback
from threading import Lock
import json
import re
import rendering_resource_manager_service.session.management.session_manager_settings as settings
from rendering_resource_manager_service.config.management import \
rendering_resource_settings_manager as manager
import rendering_resource_manager_service.utils.custom_logging as log
from rendering_resource_manager_service.session.models import \
SESSION_STATUS_STARTING, SESSION_STATUS_RUNNING, \
SESSION_STATUS_SCHEDULING, SESSION_STATUS_SCHEDULED, SESSION_STATUS_FAILED
import rendering_resource_manager_service.service.settings as global_settings
SLURM_SSH_COMMAND = '/usr/bin/ssh -i ' + \
global_settings.SLURM_SSH_KEY + ' ' + \
global_settings.SLURM_USERNAME + '@'
class SlurmJobManager(object):
"""
The job manager class provides methods for managing slurm jobs
"""
def __init__(self):
"""
Setup job manager
"""
self._mutex = Lock()
def schedule(self, session, job_information, auth_token=None):
"""
Allocates a job and starts the rendering resource process. If successful, the session
job_id is populated and the session status is set to SESSION_STATUS_STARTING
:param session: Current user session
:param job_information: Information about the job
:param auth_token: Currently not used by Slurm
:return: A Json response containing on ok status or a description of the error
"""
status = self.allocate(session, job_information)
if status[0] == 200:
session.http_host = self.hostname(session)
status = self.start(session, job_information)
return status
def allocate(self, session, job_information):
"""
Allocates a job according to rendering resource configuration. If the allocation is
successful, the session job_id is populated and the session status is set to
SESSION_STATUS_SCHEDULED
:param session: Current user session
:param job_information: Information about the job
:return: A Json response containing on ok status or a description of the error
"""
status = None
for cluster_node in global_settings.SLURM_HOSTS:
try:
self._mutex.acquire()
session.status = SESSION_STATUS_SCHEDULING
session.cluster_node = cluster_node
session.save()
log.info(1, 'Scheduling job for session ' + session.id)
job_information.cluster_node = cluster_node
command_line = self._build_allocation_command(session, job_information)
process = subprocess.Popen(
[command_line],
shell=True,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
error = process.communicate()[1]
if len(re.findall('Granted', error)) != 0:
session.job_id = re.findall('\\d+', error)[0]
log.info(1, 'Allocated job ' + str(session.job_id) +
' on cluster node ' + cluster_node)
session.status = SESSION_STATUS_SCHEDULED
session.save()
response = json.dumps({'message': 'Job scheduled', 'jobId': session.job_id})
status = [200, response]
break
else:
session.status = SESSION_STATUS_FAILED
session.save()
log.error(error)
response = json.dumps({'contents': error})
status = [400, response]
process.stdin.close()
except OSError as e:
log.error(str(e))
response = json.dumps({'contents': str(e)})
status = [400, response]
finally:
if self._mutex.locked():
self._mutex.release()
return status
def start(self, session, job_information):
"""
Start the rendering resource using the job allocated by the schedule method. If successful,
the session status is set to SESSION_STATUS_STARTING
:param session: Current user session
:param job_information: Information about the job
:return: A Json response containing on ok status or a description of the error
"""
try:
self._mutex.acquire()
session.status = SESSION_STATUS_STARTING
session.save()
rr_settings = \
manager.RenderingResourceSettingsManager.get_by_id(session.configuration_id.lower())
# Modules
full_command = '"source /etc/profile && module purge && '
if rr_settings.modules is not None:
values = rr_settings.modules.split()
for module in values:
full_command += 'module load ' + module.strip() + ' && '
# Environment variables
if rr_settings.environment_variables is not None:
values = rr_settings.environment_variables.split()
values += job_information.environment.split()
for variable in values:
full_command += variable + ' '
# Command lines parameters
rest_parameters = manager.RenderingResourceSettingsManager.format_rest_parameters(
str(rr_settings.scheduler_rest_parameters_format),
str(session.http_host),
str(session.http_port),
'rest' + str(rr_settings.id + session.id),
str(session.job_id))
full_command += rr_settings.command_line
values = rest_parameters.split()
values += job_information.params.split()
for parameter in values:
full_command += ' ' + parameter
# Output redirection
full_command += ' > ' + self._file_name(session, settings.SLURM_OUT_FILE)
full_command += ' 2> ' + self._file_name(session, settings.SLURM_ERR_FILE)
full_command += '"'
command_line = '/usr/bin/ssh -f -o StrictHostKeyChecking=no -i ' + \
global_settings.SLURM_SSH_KEY + ' ' + \
global_settings.SLURM_USERNAME + '@' + \
session.http_host
ssh_command = command_line + ' ' + full_command
subprocess.Popen([ssh_command], shell=True, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
log.info(1, 'Connect to cluster machine and execute command: ' + ssh_command)
if rr_settings.wait_until_running:
session.status = SESSION_STATUS_STARTING
else:
session.status = SESSION_STATUS_RUNNING
session.save()
response = json.dumps({'message': session.configuration_id + ' successfully started'})
return [200, response]
except OSError as e:
log.error(str(e))
response = json.dumps({'contents': str(e)})
return [400, response]
finally:
if self._mutex.locked():
self._mutex.release()
def stop(self, session):
"""
Gently stops a given job, waits for 2 seconds and checks for its disappearance
:param session: Current user session
:return: A Json response containing on ok status or a description of the error
"""
result = [500, 'Unexpected error']
try:
self._mutex.acquire()
# pylint: disable=E1101
setting = \
manager.RenderingResourceSettings.objects.get(
id=session.configuration_id)
if setting.graceful_exit:
log.info(1, 'Gracefully exiting rendering resource')
try:
url = 'http://' + session.http_host + \
':' + str(session.http_port) + '/' + \
settings.RR_SPECIFIC_COMMAND_EXIT
log.info(1, url)
r = requests.put(
url=url,
timeout=global_settings.REQUEST_TIMEOUT)
r.close()
# pylint: disable=W0702
except requests.exceptions.RequestException as e:
log.error(traceback.format_exc(e))
result = self.kill(session)
except OSError as e:
msg = str(e)
log.error(msg)
response = json.dumps({'contents': msg})
result = [400, response]
finally:
if self._mutex.locked():
self._mutex.release()
return result
@staticmethod
def kill(session):
"""
Kills the given job. This method should only be used if the stop method failed.
:param session: Current user session
:return: A Json response containing on ok status or a description of the error
"""
result = [500, 'Unexpected error']
if session.job_id is not None:
try:
command_line = SLURM_SSH_COMMAND + session.cluster_node + \
' scancel ' + session.job_id
log.info(1, 'Stopping job ' + session.job_id)
process = subprocess.Popen(
[command_line],
shell=True,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
output = process.communicate()[0]
log.info(1, output)
msg = 'Job successfully cancelled'
log.info(1, msg)
response = json.dumps({'contents': msg})
result = [200, response]
except OSError as e:
msg = str(e)
log.error(msg)
response = json.dumps({'contents': msg})
result = [400, response]
return result
def hostname(self, session):
"""
Retrieve the hostname for the host of the given job is allocated.
Note: this uses ssh and scontrol on the SLURM_HOST
Note: Due to DNS migration of CSCS compute nodes to bbp.epfl.ch domain
it uses hardcoded value based on the front-end dns name (which was not migrated)
:param session: Current user session
:return: The hostname of the host if the job is running, empty otherwise
"""
hostname = self._query(session, 'BatchHost')
if hostname != '':
domain = self._get_domain(session)
if domain == 'cscs.ch':
return hostname + '.bbp.epfl.ch'
elif domain == 'epfl.ch':
hostname = hostname + '.' + domain
return hostname
def job_information(self, session):
"""
Returns information about the job
:param session: Current user session
:return: A string containing the status of the job
"""
return self._query(session)
def rendering_resource_out_log(self, session):
"""
Returns the contents of the rendering resource output file
:param session: Current user session
:return: A string containing the output log
"""
return self._rendering_resource_log(session, settings.SLURM_OUT_FILE)
def rendering_resource_err_log(self, session):
"""
Returns the contents of the rendering resource error file
:param session: Current user session
:return: A string containing the error log
"""
return self._rendering_resource_log(session, settings.SLURM_ERR_FILE)
@staticmethod
def _query(session, attribute=None):
"""
Queries Slurm for information
:param session: Current user session
:param attribute: Attribute to be queried
:return: A Json response containing an ok status or a description of the error
"""
value = ''
if session.job_id is not None:
try:
command_line = SLURM_SSH_COMMAND + session.cluster_node + \
' scontrol show job ' + str(session.job_id)
process = subprocess.Popen(
[command_line],
shell=True,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
output = process.communicate()[0]
if attribute is None:
return output
status = re.search(r'JobState=(\w+)', output).group(1)
if status != 'CANCELLED':
value = re.search(attribute + r'=(\w+)', output).group(1)
log.info(1, attribute + ' = ' + value)
log.info(1, 'Job status: ' + status + ' hostname: ' + value)
return value
except OSError as e:
log.error(str(e))
return value
def _file_name(self, session, extension):
"""
Returns the contents of the log file with the specified extension
:param session: Current user session
:param extension: file extension (typically err or out)
:return: A string containing the error log
"""
domain = self._get_domain(session)
if domain == 'epfl.ch':
return settings.SLURM_OUTPUT_PREFIX_NFS + '_' + str(session.job_id) + \
'_' + session.configuration_id + '_' + extension
return settings.SLURM_OUTPUT_PREFIX + '_' + str(session.job_id) + \
'_' + session.configuration_id + '_' + extension
def _rendering_resource_log(self, session, extension):
"""
Returns the contents of the specified file
:param session: Current user session
:param extension: File extension (typically err or out)
:return: A string containing the log
"""
try:
result = 'Not currently available'
if session.status in [SESSION_STATUS_STARTING, SESSION_STATUS_RUNNING]:
filename = self._file_name(session, extension)
command_line = SLURM_SSH_COMMAND + session.cluster_node + \
' cat ' + filename
log.info(1, 'Querying log: ' + command_line)
process = subprocess.Popen(
[command_line],
shell=True,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
output = process.communicate()[0]
result = output
return result
except OSError as e:
return str(e)
except IOError as e:
return str(e)
@staticmethod
def _get_domain(session):
"""
Return the domain name of node used in the current session
:param session: Current user session
:return: A string containing the DNS domain
"""
return session.cluster_node.partition('.')[2]
@staticmethod
def _build_allocation_command(session, job_information):
"""
Builds the SLURM allocation command line
:param session: Current user session
:param job_information: Information about the job
:return: A string containing the SLURM command
"""
rr_settings = \
manager.RenderingResourceSettingsManager.get_by_id(session.configuration_id.lower())
options = ''
if job_information.exclusive_allocation or rr_settings.exclusive:
options += ' --exclusive'
value = rr_settings.nb_nodes
if job_information.nb_nodes != 0:
value = job_information.nb_nodes
if value != 0:
options += ' -N ' + str(value)
value = rr_settings.nb_cpus
if job_information.nb_cpus != 0:
value = job_information.nb_cpus
options += ' -c ' + str(value)
value = rr_settings.nb_gpus
if job_information.nb_gpus != 0:
value = job_information.nb_gpus
options += ' --gres=gpu:' + str(value)
value = rr_settings.memory
if job_information.memory != 0:
value = job_information.memory
options += ' --mem=' + str(value)
if job_information.reservation != '' and job_information.reservation is not None:
options += ' --reservation=' + job_information.reservation
allocation_time = global_settings.SLURM_DEFAULT_TIME
if job_information.allocation_time != '':
allocation_time = job_information.allocation_time
log.info(1, 'Scheduling job for session ' + session.id)
job_name = session.owner + '_' + rr_settings.id
command_line = SLURM_SSH_COMMAND + session.cluster_node + \
' salloc --no-shell' + \
' --immediate=' + str(settings.SLURM_ALLOCATION_TIMEOUT) + \
' -p ' + rr_settings.queue + \
' --account=' + rr_settings.project + \
' --job-name=' + job_name + \
' --time=' + allocation_time + \
options
log.info(1, command_line)
return command_line
| favreau/RenderingResourceManager | rendering_resource_manager_service/session/management/slurm_job_manager.py | Python | lgpl-3.0 | 18,783 | 0.001757 |
#! /usr/bin/python
# -*- coding: utf8 -*-
import tensorflow as tf
import tensorlayer as tl
import numpy as np
import time
import numbers
import random
import os
import re
import sys
import threading
# import Queue # <-- donot work for py3
is_py2 = sys.version[0] == '2'
if is_py2:
import Queue as queue
else:
import queue as queue
from six.moves import range
import scipy
from scipy import linalg
import scipy.ndimage as ndi
from skimage import transform
from skimage import exposure
import skimage
# linalg https://docs.scipy.org/doc/scipy/reference/linalg.html
# ndimage https://docs.scipy.org/doc/scipy/reference/ndimage.html
## Threading
def threading_data(data=None, fn=None, **kwargs):
"""Return a batch of result by given data.
Usually be used for data augmentation.
Parameters
-----------
data : numpy array or zip of numpy array, see Examples below.
fn : the function for data processing.
more args : the args for fn, see Examples below.
Examples
--------
- Single array
>>> X --> [batch_size, row, col, 1] greyscale
>>> results = threading_data(X, zoom, zoom_range=[0.5, 1], is_random=True)
... results --> [batch_size, row, col, channel]
>>> tl.visualize.images2d(images=np.asarray(results), second=0.01, saveable=True, name='after', dtype=None)
>>> tl.visualize.images2d(images=np.asarray(X), second=0.01, saveable=True, name='before', dtype=None)
- List of array (e.g. functions with ``multi``)
>>> X, Y --> [batch_size, row, col, 1] greyscale
>>> data = threading_data([_ for _ in zip(X, Y)], zoom_multi, zoom_range=[0.5, 1], is_random=True)
... data --> [batch_size, 2, row, col, 1]
>>> X_, Y_ = data.transpose((1,0,2,3,4))
... X_, Y_ --> [batch_size, row, col, 1]
>>> tl.visualize.images2d(images=np.asarray(X_), second=0.01, saveable=True, name='after', dtype=None)
>>> tl.visualize.images2d(images=np.asarray(Y_), second=0.01, saveable=True, name='before', dtype=None)
- Customized function for image segmentation
>>> def distort_img(data):
... x, y = data
... x, y = flip_axis_multi([x, y], axis=0, is_random=True)
... x, y = flip_axis_multi([x, y], axis=1, is_random=True)
... x, y = crop_multi([x, y], 100, 100, is_random=True)
... return x, y
>>> X, Y --> [batch_size, row, col, channel]
>>> data = threading_data([_ for _ in zip(X, Y)], distort_img)
>>> X_, Y_ = data.transpose((1,0,2,3,4))
References
----------
- `python queue <https://pymotw.com/2/Queue/index.html#module-Queue>`_
- `run with limited queue <http://effbot.org/librarybook/queue.htm>`_
"""
## plot function info
# for name, value in kwargs.items():
# print('{0} = {1}'.format(name, value))
# exit()
# define function for threading
def apply_fn(results, i, data, kwargs):
results[i] = fn(data, **kwargs)
## start multi-threaded reading.
results = [None] * len(data) ## preallocate result list
threads = []
for i in range(len(data)):
t = threading.Thread(
name='threading_and_return',
target=apply_fn,
args=(results, i, data[i], kwargs)
)
t.start()
threads.append(t)
## <Milo> wait for all threads to complete
for t in threads:
t.join()
return np.asarray(results)
## old implementation
# define function for threading
# def function(q, i, data, kwargs):
# result = fn(data, **kwargs)
# q.put([i, result])
# ## start threading
# q = queue.Queue()
# threads = []
# for i in range(len(data)):
# t = threading.Thread(
# name='threading_and_return',
# target=function,
# args=(q, i, data[i], kwargs)
# )
# t.start()
# threads.append(t)
#
# ## <Milo> wait for all threads to complete
# for t in threads:
# t.join()
#
# ## get results
# results = []
# for i in range(len(data)):
# result = q.get()
# results.append(result)
# results = sorted(results)
# for i in range(len(results)):
# results[i] = results[i][1]
# return np.asarray(results)
## Image
def rotation(x, rg=20, is_random=False, row_index=0, col_index=1, channel_index=2,
fill_mode='nearest', cval=0.):
"""Rotate an image randomly or non-randomly.
Parameters
-----------
x : numpy array
An image with dimension of [row, col, channel] (default).
rg : int or float
Degree to rotate, usually 0 ~ 180.
is_random : boolean, default False
If True, randomly rotate.
row_index, col_index, channel_index : int
Index of row, col and channel, default (0, 1, 2), for theano (1, 2, 0).
fill_mode : string
Method to fill missing pixel, default ‘nearest’, more options ‘constant’, ‘reflect’ or ‘wrap’
- `scipy ndimage affine_transform <https://docs.scipy.org/doc/scipy-0.14.0/reference/generated/scipy.ndimage.interpolation.affine_transform.html>`_
cval : scalar, optional
Value used for points outside the boundaries of the input if mode='constant'. Default is 0.0
- `scipy ndimage affine_transform <https://docs.scipy.org/doc/scipy-0.14.0/reference/generated/scipy.ndimage.interpolation.affine_transform.html>`_
Examples
---------
>>> x --> [row, col, 1] greyscale
>>> x = rotation(x, rg=40, is_random=False)
>>> tl.visualize.frame(x[:,:,0], second=0.01, saveable=True, name='temp',cmap='gray')
"""
if is_random:
theta = np.pi / 180 * np.random.uniform(-rg, rg)
else:
theta = np.pi /180 * rg
rotation_matrix = np.array([[np.cos(theta), -np.sin(theta), 0],
[np.sin(theta), np.cos(theta), 0],
[0, 0, 1]])
h, w = x.shape[row_index], x.shape[col_index]
transform_matrix = transform_matrix_offset_center(rotation_matrix, h, w)
x = apply_transform(x, transform_matrix, channel_index, fill_mode, cval)
return x
def rotation_multi(x, rg=20, is_random=False, row_index=0, col_index=1, channel_index=2,
fill_mode='nearest', cval=0.):
"""Rotate multiple images with the same arguments, randomly or non-randomly.
Usually be used for image segmentation which x=[X, Y], X and Y should be matched.
Parameters
-----------
x : list of numpy array
List of images with dimension of [n_images, row, col, channel] (default).
others : see ``rotation``.
Examples
--------
>>> x, y --> [row, col, 1] greyscale
>>> x, y = rotation_multi([x, y], rg=90, is_random=False)
>>> tl.visualize.frame(x[:,:,0], second=0.01, saveable=True, name='x',cmap='gray')
>>> tl.visualize.frame(y[:,:,0], second=0.01, saveable=True, name='y',cmap='gray')
"""
if is_random:
theta = np.pi / 180 * np.random.uniform(-rg, rg)
else:
theta = np.pi /180 * rg
rotation_matrix = np.array([[np.cos(theta), -np.sin(theta), 0],
[np.sin(theta), np.cos(theta), 0],
[0, 0, 1]])
h, w = x[0].shape[row_index], x[0].shape[col_index]
transform_matrix = transform_matrix_offset_center(rotation_matrix, h, w)
results = []
for data in x:
results.append( apply_transform(data, transform_matrix, channel_index, fill_mode, cval))
return np.asarray(results)
# crop
def crop(x, wrg, hrg, is_random=False, row_index=0, col_index=1, channel_index=2):
"""Randomly or centrally crop an image.
Parameters
----------
x : numpy array
An image with dimension of [row, col, channel] (default).
wrg : float
Size of weight.
hrg : float
Size of height.
is_random : boolean, default False
If True, randomly crop, else central crop.
row_index, col_index, channel_index : int
Index of row, col and channel, default (0, 1, 2), for theano (1, 2, 0).
"""
h, w = x.shape[row_index], x.shape[col_index]
assert (h > hrg) and (w > wrg), "The size of cropping should smaller than the original image"
if is_random:
h_offset = int(np.random.uniform(0, h-hrg) -1)
w_offset = int(np.random.uniform(0, w-wrg) -1)
# print(h_offset, w_offset, x[h_offset: hrg+h_offset ,w_offset: wrg+w_offset].shape)
return x[h_offset: hrg+h_offset ,w_offset: wrg+w_offset]
else: # central crop
h_offset = int(np.floor((h - hrg)/2.))
w_offset = int(np.floor((w - wrg)/2.))
h_end = h_offset + hrg
w_end = w_offset + wrg
return x[h_offset: h_end, w_offset: w_end]
# old implementation
# h_offset = (h - hrg)/2
# w_offset = (w - wrg)/2
# # print(x[h_offset: h-h_offset ,w_offset: w-w_offset].shape)
# return x[h_offset: h-h_offset ,w_offset: w-w_offset]
# central crop
def crop_multi(x, wrg, hrg, is_random=False, row_index=0, col_index=1, channel_index=2):
"""Randomly or centrally crop multiple images.
Parameters
----------
x : list of numpy array
List of images with dimension of [n_images, row, col, channel] (default).
others : see ``crop``.
"""
h, w = x[0].shape[row_index], x[0].shape[col_index]
assert (h > hrg) and (w > wrg), "The size of cropping should smaller than the original image"
if is_random:
h_offset = int(np.random.uniform(0, h-hrg) -1)
w_offset = int(np.random.uniform(0, w-wrg) -1)
results = []
for data in x:
results.append( data[h_offset: hrg+h_offset ,w_offset: wrg+w_offset])
return np.asarray(results)
else:
# central crop
h_offset = (h - hrg)/2
w_offset = (w - wrg)/2
results = []
for data in x:
results.append( data[h_offset: h-h_offset ,w_offset: w-w_offset] )
return np.asarray(results)
# flip
def flip_axis(x, axis, is_random=False):
"""Flip the axis of an image, such as flip left and right, up and down, randomly or non-randomly,
Parameters
----------
x : numpy array
An image with dimension of [row, col, channel] (default).
axis : int
- 0, flip up and down
- 1, flip left and right
- 2, flip channel
is_random : boolean, default False
If True, randomly flip.
"""
if is_random:
factor = np.random.uniform(-1, 1)
if factor > 0:
x = np.asarray(x).swapaxes(axis, 0)
x = x[::-1, ...]
x = x.swapaxes(0, axis)
return x
else:
return x
else:
x = np.asarray(x).swapaxes(axis, 0)
x = x[::-1, ...]
x = x.swapaxes(0, axis)
return x
def flip_axis_multi(x, axis, is_random=False):
"""Flip the axises of multiple images together, such as flip left and right, up and down, randomly or non-randomly,
Parameters
-----------
x : list of numpy array
List of images with dimension of [n_images, row, col, channel] (default).
others : see ``flip_axis``.
"""
if is_random:
factor = np.random.uniform(-1, 1)
if factor > 0:
# x = np.asarray(x).swapaxes(axis, 0)
# x = x[::-1, ...]
# x = x.swapaxes(0, axis)
# return x
results = []
for data in x:
data = np.asarray(data).swapaxes(axis, 0)
data = data[::-1, ...]
data = data.swapaxes(0, axis)
results.append( data )
return np.asarray(results)
else:
return np.asarray(x)
else:
# x = np.asarray(x).swapaxes(axis, 0)
# x = x[::-1, ...]
# x = x.swapaxes(0, axis)
# return x
results = []
for data in x:
data = np.asarray(data).swapaxes(axis, 0)
data = data[::-1, ...]
data = data.swapaxes(0, axis)
results.append( data )
return np.asarray(results)
# shift
def shift(x, wrg=0.1, hrg=0.1, is_random=False, row_index=0, col_index=1, channel_index=2,
fill_mode='nearest', cval=0.):
"""Shift an image randomly or non-randomly.
Parameters
-----------
x : numpy array
An image with dimension of [row, col, channel] (default).
wrg : float
Percentage of shift in axis x, usually -0.25 ~ 0.25.
hrg : float
Percentage of shift in axis y, usually -0.25 ~ 0.25.
is_random : boolean, default False
If True, randomly shift.
row_index, col_index, channel_index : int
Index of row, col and channel, default (0, 1, 2), for theano (1, 2, 0).
fill_mode : string
Method to fill missing pixel, default ‘nearest’, more options ‘constant’, ‘reflect’ or ‘wrap’.
- `scipy ndimage affine_transform <https://docs.scipy.org/doc/scipy-0.14.0/reference/generated/scipy.ndimage.interpolation.affine_transform.html>`_
cval : scalar, optional
Value used for points outside the boundaries of the input if mode='constant'. Default is 0.0.
- `scipy ndimage affine_transform <https://docs.scipy.org/doc/scipy-0.14.0/reference/generated/scipy.ndimage.interpolation.affine_transform.html>`_
"""
h, w = x.shape[row_index], x.shape[col_index]
if is_random:
tx = np.random.uniform(-hrg, hrg) * h
ty = np.random.uniform(-wrg, wrg) * w
else:
tx, ty = hrg * h, wrg * w
translation_matrix = np.array([[1, 0, tx],
[0, 1, ty],
[0, 0, 1]])
transform_matrix = translation_matrix # no need to do offset
x = apply_transform(x, transform_matrix, channel_index, fill_mode, cval)
return x
def shift_multi(x, wrg=0.1, hrg=0.1, is_random=False, row_index=0, col_index=1, channel_index=2,
fill_mode='nearest', cval=0.):
"""Shift images with the same arguments, randomly or non-randomly.
Usually be used for image segmentation which x=[X, Y], X and Y should be matched.
Parameters
-----------
x : list of numpy array
List of images with dimension of [n_images, row, col, channel] (default).
others : see ``shift``.
"""
h, w = x[0].shape[row_index], x[0].shape[col_index]
if is_random:
tx = np.random.uniform(-hrg, hrg) * h
ty = np.random.uniform(-wrg, wrg) * w
else:
tx, ty = hrg * h, wrg * w
translation_matrix = np.array([[1, 0, tx],
[0, 1, ty],
[0, 0, 1]])
transform_matrix = translation_matrix # no need to do offset
results = []
for data in x:
results.append( apply_transform(data, transform_matrix, channel_index, fill_mode, cval))
return np.asarray(results)
# shear
def shear(x, intensity=0.1, is_random=False, row_index=0, col_index=1, channel_index=2,
fill_mode='nearest', cval=0.):
"""Shear an image randomly or non-randomly.
Parameters
-----------
x : numpy array
An image with dimension of [row, col, channel] (default).
intensity : float
Percentage of shear, usually -0.5 ~ 0.5 (is_random==True), 0 ~ 0.5 (is_random==False),
you can have a quick try by shear(X, 1).
is_random : boolean, default False
If True, randomly shear.
row_index, col_index, channel_index : int
Index of row, col and channel, default (0, 1, 2), for theano (1, 2, 0).
fill_mode : string
Method to fill missing pixel, default ‘nearest’, more options ‘constant’, ‘reflect’ or ‘wrap’.
- `scipy ndimage affine_transform <https://docs.scipy.org/doc/scipy-0.14.0/reference/generated/scipy.ndimage.interpolation.affine_transform.html>`_
cval : scalar, optional
Value used for points outside the boundaries of the input if mode='constant'. Default is 0.0.
- `scipy ndimage affine_transform <https://docs.scipy.org/doc/scipy-0.14.0/reference/generated/scipy.ndimage.interpolation.affine_transform.html>`_
"""
if is_random:
shear = np.random.uniform(-intensity, intensity)
else:
shear = intensity
shear_matrix = np.array([[1, -np.sin(shear), 0],
[0, np.cos(shear), 0],
[0, 0, 1]])
h, w = x.shape[row_index], x.shape[col_index]
transform_matrix = transform_matrix_offset_center(shear_matrix, h, w)
x = apply_transform(x, transform_matrix, channel_index, fill_mode, cval)
return x
def shear_multi(x, intensity=0.1, is_random=False, row_index=0, col_index=1, channel_index=2,
fill_mode='nearest', cval=0.):
"""Shear images with the same arguments, randomly or non-randomly.
Usually be used for image segmentation which x=[X, Y], X and Y should be matched.
Parameters
-----------
x : list of numpy array
List of images with dimension of [n_images, row, col, channel] (default).
others : see ``shear``.
"""
if is_random:
shear = np.random.uniform(-intensity, intensity)
else:
shear = intensity
shear_matrix = np.array([[1, -np.sin(shear), 0],
[0, np.cos(shear), 0],
[0, 0, 1]])
h, w = x[0].shape[row_index], x[0].shape[col_index]
transform_matrix = transform_matrix_offset_center(shear_matrix, h, w)
results = []
for data in x:
results.append( apply_transform(data, transform_matrix, channel_index, fill_mode, cval))
return np.asarray(results)
# swirl
def swirl(x, center=None, strength=1, radius=100, rotation=0, output_shape=None, order=1, mode='constant', cval=0, clip=True, preserve_range=False, is_random=False):
"""Swirl an image randomly or non-randomly, see `scikit-image swirl API <http://scikit-image.org/docs/dev/api/skimage.transform.html#skimage.transform.swirl>`_
and `example <http://scikit-image.org/docs/dev/auto_examples/plot_swirl.html>`_.
Parameters
-----------
x : numpy array
An image with dimension of [row, col, channel] (default).
center : (row, column) tuple or (2,) ndarray, optional
Center coordinate of transformation.
strength : float, optional
The amount of swirling applied.
radius : float, optional
The extent of the swirl in pixels. The effect dies out rapidly beyond radius.
rotation : float, (degree) optional
Additional rotation applied to the image, usually [0, 360], relates to center.
output_shape : tuple (rows, cols), optional
Shape of the output image generated. By default the shape of the input image is preserved.
order : int, optional
The order of the spline interpolation, default is 1. The order has to be in the range 0-5. See skimage.transform.warp for detail.
mode : {‘constant’, ‘edge’, ‘symmetric’, ‘reflect’, ‘wrap’}, optional
Points outside the boundaries of the input are filled according to the given mode, with ‘constant’ used as the default. Modes match the behaviour of numpy.pad.
cval : float, optional
Used in conjunction with mode ‘constant’, the value outside the image boundaries.
clip : bool, optional
Whether to clip the output to the range of values of the input image. This is enabled by default, since higher order interpolation may produce values outside the given input range.
preserve_range : bool, optional
Whether to keep the original range of values. Otherwise, the input image is converted according to the conventions of img_as_float.
is_random : boolean, default False
If True, random swirl.
- random center = [(0 ~ x.shape[0]), (0 ~ x.shape[1])]
- random strength = [0, strength]
- random radius = [1e-10, radius]
- random rotation = [-rotation, rotation]
Examples
---------
>>> x --> [row, col, 1] greyscale
>>> x = swirl(x, strength=4, radius=100)
"""
assert radius != 0, Exception("Invalid radius value")
rotation = np.pi / 180 * rotation
if is_random:
center_h = int(np.random.uniform(0, x.shape[0]))
center_w = int(np.random.uniform(0, x.shape[1]))
center = (center_h, center_w)
strength = np.random.uniform(0, strength)
radius = np.random.uniform(1e-10, radius)
rotation = np.random.uniform(-rotation, rotation)
max_v = np.max(x)
if max_v > 1: # Note: the input of this fn should be [-1, 1], rescale is required.
x = x / max_v
swirled = skimage.transform.swirl(x, center=center, strength=strength, radius=radius, rotation=rotation,
output_shape=output_shape, order=order, mode=mode, cval=cval, clip=clip, preserve_range=preserve_range)
if max_v > 1:
swirled = swirled * max_v
return swirled
def swirl_multi(x, center=None, strength=1, radius=100, rotation=0, output_shape=None, order=1, mode='constant', cval=0, clip=True, preserve_range=False, is_random=False):
"""Swirl multiple images with the same arguments, randomly or non-randomly.
Usually be used for image segmentation which x=[X, Y], X and Y should be matched.
Parameters
-----------
x : list of numpy array
List of images with dimension of [n_images, row, col, channel] (default).
others : see ``swirl``.
"""
assert radius != 0, Exception("Invalid radius value")
rotation = np.pi / 180 * rotation
if is_random:
center_h = int(np.random.uniform(0, x[0].shape[0]))
center_w = int(np.random.uniform(0, x[0].shape[1]))
center = (center_h, center_w)
strength = np.random.uniform(0, strength)
radius = np.random.uniform(1e-10, radius)
rotation = np.random.uniform(-rotation, rotation)
results = []
for data in x:
max_v = np.max(data)
if max_v > 1: # Note: the input of this fn should be [-1, 1], rescale is required.
data = data / max_v
swirled = skimage.transform.swirl(data, center=center, strength=strength, radius=radius, rotation=rotation,
output_shape=output_shape, order=order, mode=mode, cval=cval, clip=clip, preserve_range=preserve_range)
if max_v > 1:
swirled = swirled * max_v
results.append( swirled )
return np.asarray(results)
# elastic_transform
from scipy.ndimage.interpolation import map_coordinates
from scipy.ndimage.filters import gaussian_filter
def elastic_transform(x, alpha, sigma, mode="constant", cval=0, is_random=False):
"""Elastic deformation of images as described in `[Simard2003] <http://deeplearning.cs.cmu.edu/pdfs/Simard.pdf>`_ .
Parameters
-----------
x : numpy array, a greyscale image.
alpha : scalar factor.
sigma : scalar or sequence of scalars, the smaller the sigma, the more transformation.
Standard deviation for Gaussian kernel. The standard deviations of the Gaussian filter are given for each axis as a sequence, or as a single number, in which case it is equal for all axes.
mode : default constant, see `scipy.ndimage.filters.gaussian_filter <https://docs.scipy.org/doc/scipy-0.14.0/reference/generated/scipy.ndimage.filters.gaussian_filter.html>`_.
cval : float, optional. Used in conjunction with mode ‘constant’, the value outside the image boundaries.
is_random : boolean, default False
Examples
---------
>>> x = elastic_transform(x, alpha = x.shape[1] * 3, sigma = x.shape[1] * 0.07)
References
------------
- `Github <https://gist.github.com/chsasank/4d8f68caf01f041a6453e67fb30f8f5a>`_.
- `Kaggle <https://www.kaggle.com/pscion/ultrasound-nerve-segmentation/elastic-transform-for-data-augmentation-0878921a>`_
"""
if is_random is False:
random_state = np.random.RandomState(None)
else:
random_state = np.random.RandomState(int(time.time()))
#
is_3d = False
if len(x.shape) == 3 and x.shape[-1] == 1:
x = x[:,:,0]
is_3d = True
elif len(x.shape) == 3 and x.shape[-1] != 1:
raise Exception("Only support greyscale image")
assert len(x.shape)==2
shape = x.shape
dx = gaussian_filter((random_state.rand(*shape) * 2 - 1), sigma, mode=mode, cval=cval) * alpha
dy = gaussian_filter((random_state.rand(*shape) * 2 - 1), sigma, mode=mode, cval=cval) * alpha
x_, y_ = np.meshgrid(np.arange(shape[0]), np.arange(shape[1]), indexing='ij')
indices = np.reshape(x_ + dx, (-1, 1)), np.reshape(y_ + dy, (-1, 1))
if is_3d:
return map_coordinates(x, indices, order=1).reshape((shape[0], shape[1], 1))
else:
return map_coordinates(x, indices, order=1).reshape(shape)
def elastic_transform_multi(x, alpha, sigma, mode="constant", cval=0, is_random=False):
"""Elastic deformation of images as described in `[Simard2003] <http://deeplearning.cs.cmu.edu/pdfs/Simard.pdf>`_.
Parameters
-----------
x : list of numpy array
others : see ``elastic_transform``.
"""
if is_random is False:
random_state = np.random.RandomState(None)
else:
random_state = np.random.RandomState(int(time.time()))
shape = x[0].shape
if len(shape) == 3:
shape = (shape[0], shape[1])
new_shape = random_state.rand(*shape)
results = []
for data in x:
is_3d = False
if len(data.shape) == 3 and data.shape[-1] == 1:
data = data[:,:,0]
is_3d = True
elif len(data.shape) == 3 and data.shape[-1] != 1:
raise Exception("Only support greyscale image")
assert len(data.shape)==2
dx = gaussian_filter((new_shape * 2 - 1), sigma, mode=mode, cval=cval) * alpha
dy = gaussian_filter((new_shape * 2 - 1), sigma, mode=mode, cval=cval) * alpha
x_, y_ = np.meshgrid(np.arange(shape[0]), np.arange(shape[1]), indexing='ij')
indices = np.reshape(x_ + dx, (-1, 1)), np.reshape(y_ + dy, (-1, 1))
# print(data.shape)
if is_3d:
results.append( map_coordinates(data, indices, order=1).reshape((shape[0], shape[1], 1)))
else:
results.append( map_coordinates(data, indices, order=1).reshape(shape) )
return np.asarray(results)
# zoom
def zoom(x, zoom_range=(0.9, 1.1), is_random=False, row_index=0, col_index=1, channel_index=2,
fill_mode='nearest', cval=0.):
"""Zoom in and out of a single image, randomly or non-randomly.
Parameters
-----------
x : numpy array
An image with dimension of [row, col, channel] (default).
zoom_range : list or tuple
- If is_random=False, (h, w) are the fixed zoom factor for row and column axies, factor small than one is zoom in.
- If is_random=True, (min zoom out, max zoom out) for x and y with different random zoom in/out factor.
e.g (0.5, 1) zoom in 1~2 times.
is_random : boolean, default False
If True, randomly zoom.
row_index, col_index, channel_index : int
Index of row, col and channel, default (0, 1, 2), for theano (1, 2, 0).
fill_mode : string
Method to fill missing pixel, default ‘nearest’, more options ‘constant’, ‘reflect’ or ‘wrap’.
- `scipy ndimage affine_transform <https://docs.scipy.org/doc/scipy-0.14.0/reference/generated/scipy.ndimage.interpolation.affine_transform.html>`_
cval : scalar, optional
Value used for points outside the boundaries of the input if mode='constant'. Default is 0.0.
- `scipy ndimage affine_transform <https://docs.scipy.org/doc/scipy-0.14.0/reference/generated/scipy.ndimage.interpolation.affine_transform.html>`_
"""
if len(zoom_range) != 2:
raise Exception('zoom_range should be a tuple or list of two floats. '
'Received arg: ', zoom_range)
if is_random:
if zoom_range[0] == 1 and zoom_range[1] == 1:
zx, zy = 1, 1
print(" random_zoom : not zoom in/out")
else:
zx, zy = np.random.uniform(zoom_range[0], zoom_range[1], 2)
else:
zx, zy = zoom_range
# print(zx, zy)
zoom_matrix = np.array([[zx, 0, 0],
[0, zy, 0],
[0, 0, 1]])
h, w = x.shape[row_index], x.shape[col_index]
transform_matrix = transform_matrix_offset_center(zoom_matrix, h, w)
x = apply_transform(x, transform_matrix, channel_index, fill_mode, cval)
return x
def zoom_multi(x, zoom_range=(0.9, 1.1), is_random=False,
row_index=0, col_index=1, channel_index=2, fill_mode='nearest', cval=0.):
"""Zoom in and out of images with the same arguments, randomly or non-randomly.
Usually be used for image segmentation which x=[X, Y], X and Y should be matched.
Parameters
-----------
x : list of numpy array
List of images with dimension of [n_images, row, col, channel] (default).
others : see ``zoom``.
"""
if len(zoom_range) != 2:
raise Exception('zoom_range should be a tuple or list of two floats. '
'Received arg: ', zoom_range)
if is_random:
if zoom_range[0] == 1 and zoom_range[1] == 1:
zx, zy = 1, 1
print(" random_zoom : not zoom in/out")
else:
zx, zy = np.random.uniform(zoom_range[0], zoom_range[1], 2)
else:
zx, zy = zoom_range
zoom_matrix = np.array([[zx, 0, 0],
[0, zy, 0],
[0, 0, 1]])
h, w = x[0].shape[row_index], x[0].shape[col_index]
transform_matrix = transform_matrix_offset_center(zoom_matrix, h, w)
# x = apply_transform(x, transform_matrix, channel_index, fill_mode, cval)
# return x
results = []
for data in x:
results.append( apply_transform(data, transform_matrix, channel_index, fill_mode, cval))
return np.asarray(results)
# image = tf.image.random_brightness(image, max_delta=32. / 255.)
# image = tf.image.random_saturation(image, lower=0.5, upper=1.5)
# image = tf.image.random_hue(image, max_delta=0.032)
# image = tf.image.random_contrast(image, lower=0.5, upper=1.5)
# brightness
def brightness(x, gamma=1, gain=1, is_random=False):
"""Change the brightness of a single image, randomly or non-randomly.
Parameters
-----------
x : numpy array
An image with dimension of [row, col, channel] (default).
gamma : float, small than 1 means brighter.
Non negative real number. Default value is 1.
- If is_random is True, gamma in a range of (1-gamma, 1+gamma).
gain : float
The constant multiplier. Default value is 1.
is_random : boolean, default False
- If True, randomly change brightness.
References
-----------
- `skimage.exposure.adjust_gamma <http://scikit-image.org/docs/dev/api/skimage.exposure.html>`_
- `chinese blog <http://www.cnblogs.com/denny402/p/5124402.html>`_
"""
if is_random:
gamma = np.random.uniform(1-gamma, 1+gamma)
x = exposure.adjust_gamma(x, gamma, gain)
return x
def brightness_multi(x, gamma=1, gain=1, is_random=False):
"""Change the brightness of multiply images, randomly or non-randomly.
Usually be used for image segmentation which x=[X, Y], X and Y should be matched.
Parameters
-----------
x : list of numpy array
List of images with dimension of [n_images, row, col, channel] (default).
others : see ``brightness``.
"""
if is_random:
gamma = np.random.uniform(1-gamma, 1+gamma)
results = []
for data in x:
results.append( exposure.adjust_gamma(data, gamma, gain) )
return np.asarray(results)
# contrast
def constant(x, cutoff=0.5, gain=10, inv=False, is_random=False):
# TODO
x = exposure.adjust_sigmoid(x, cutoff=cutoff, gain=gain, inv=inv)
return x
def constant_multi():
#TODO
pass
# resize
def imresize(x, size=[100, 100], interp='bilinear', mode=None):
"""Resize an image by given output size and method. Warning, this function
will rescale the value to [0, 255].
Parameters
-----------
x : numpy array
An image with dimension of [row, col, channel] (default).
size : int, float or tuple (h, w)
- int, Percentage of current size.
- float, Fraction of current size.
- tuple, Size of the output image.
interp : str, optional
Interpolation to use for re-sizing (‘nearest’, ‘lanczos’, ‘bilinear’, ‘bicubic’ or ‘cubic’).
mode : str, optional
The PIL image mode (‘P’, ‘L’, etc.) to convert arr before resizing.
Returns
--------
imresize : ndarray
The resized array of image.
References
------------
- `scipy.misc.imresize <https://docs.scipy.org/doc/scipy/reference/generated/scipy.misc.imresize.html>`_
"""
if x.shape[-1] == 1:
# greyscale
x = scipy.misc.imresize(x[:,:,0], size, interp=interp, mode=mode)
return x[:, :, np.newaxis]
elif x.shape[-1] == 3:
# rgb, bgr ..
return scipy.misc.imresize(x, size, interp=interp, mode=mode)
else:
raise Exception("Unsupported channel %d" % x.shape[-1])
# normailization
def samplewise_norm(x, rescale=None, samplewise_center=False, samplewise_std_normalization=False,
channel_index=2, epsilon=1e-7):
"""Normalize an image by rescale, samplewise centering and samplewise centering in order.
Parameters
-----------
x : numpy array
An image with dimension of [row, col, channel] (default).
rescale : rescaling factor.
If None or 0, no rescaling is applied, otherwise we multiply the data by the value provided (before applying any other transformation)
samplewise_center : set each sample mean to 0.
samplewise_std_normalization : divide each input by its std.
epsilon : small position value for dividing standard deviation.
Examples
--------
>>> x = samplewise_norm(x, samplewise_center=True, samplewise_std_normalization=True)
>>> print(x.shape, np.mean(x), np.std(x))
... (160, 176, 1), 0.0, 1.0
Notes
------
When samplewise_center and samplewise_std_normalization are True.
- For greyscale image, every pixels are subtracted and divided by the mean and std of whole image.
- For RGB image, every pixels are subtracted and divided by the mean and std of this pixel i.e. the mean and std of a pixel is 0 and 1.
"""
if rescale:
x *= rescale
if x.shape[channel_index] == 1:
# greyscale
if samplewise_center:
x = x - np.mean(x)
if samplewise_std_normalization:
x = x / np.std(x)
return x
elif x.shape[channel_index] == 3:
# rgb
if samplewise_center:
x = x - np.mean(x, axis=channel_index, keepdims=True)
if samplewise_std_normalization:
x = x / (np.std(x, axis=channel_index, keepdims=True) + epsilon)
return x
else:
raise Exception("Unsupported channels %d" % x.shape[channel_index])
def featurewise_norm(x, mean=None, std=None, epsilon=1e-7):
"""Normalize every pixels by the same given mean and std, which are usually
compute from all examples.
Parameters
-----------
x : numpy array
An image with dimension of [row, col, channel] (default).
mean : value for subtraction.
std : value for division.
epsilon : small position value for dividing standard deviation.
"""
if mean:
x = x - mean
if std:
x = x / (std + epsilon)
return x
# whitening
def get_zca_whitening_principal_components_img(X):
"""Return the ZCA whitening principal components matrix.
Parameters
-----------
x : numpy array
Batch of image with dimension of [n_example, row, col, channel] (default).
"""
flatX = np.reshape(X, (X.shape[0], X.shape[1] * X.shape[2] * X.shape[3]))
print("zca : computing sigma ..")
sigma = np.dot(flatX.T, flatX) / flatX.shape[0]
print("zca : computing U, S and V ..")
U, S, V = linalg.svd(sigma)
print("zca : computing principal components ..")
principal_components = np.dot(np.dot(U, np.diag(1. / np.sqrt(S + 10e-7))), U.T)
return principal_components
def zca_whitening(x, principal_components):
"""Apply ZCA whitening on an image by given principal components matrix.
Parameters
-----------
x : numpy array
An image with dimension of [row, col, channel] (default).
principal_components : matrix from ``get_zca_whitening_principal_components_img``.
"""
# flatx = np.reshape(x, (x.size))
print(principal_components.shape, x.shape) # ((28160, 28160), (160, 176, 1))
# flatx = np.reshape(x, (x.shape))
# flatx = np.reshape(x, (x.shape[0], ))
print(flatx.shape) # (160, 176, 1)
whitex = np.dot(flatx, principal_components)
x = np.reshape(whitex, (x.shape[0], x.shape[1], x.shape[2]))
return x
# developing
# def barrel_transform(x, intensity):
# # https://github.com/fchollet/keras/blob/master/keras/preprocessing/image.py
# # TODO
# pass
#
# def barrel_transform_multi(x, intensity):
# # https://github.com/fchollet/keras/blob/master/keras/preprocessing/image.py
# # TODO
# pass
# channel shift
def channel_shift(x, intensity, is_random=False, channel_index=2):
"""Shift the channels of an image, randomly or non-randomly, see `numpy.rollaxis <https://docs.scipy.org/doc/numpy/reference/generated/numpy.rollaxis.html>`_.
Parameters
-----------
x : numpy array
An image with dimension of [row, col, channel] (default).
intensity : float
Intensity of shifting.
is_random : boolean, default False
If True, randomly shift.
channel_index : int
Index of channel, default 2.
"""
if is_random:
factor = np.random.uniform(-intensity, intensity)
else:
factor = intensity
x = np.rollaxis(x, channel_index, 0)
min_x, max_x = np.min(x), np.max(x)
channel_images = [np.clip(x_channel + factor, min_x, max_x)
for x_channel in x]
x = np.stack(channel_images, axis=0)
x = np.rollaxis(x, 0, channel_index+1)
return x
# x = np.rollaxis(x, channel_index, 0)
# min_x, max_x = np.min(x), np.max(x)
# channel_images = [np.clip(x_channel + np.random.uniform(-intensity, intensity), min_x, max_x)
# for x_channel in x]
# x = np.stack(channel_images, axis=0)
# x = np.rollaxis(x, 0, channel_index+1)
# return x
def channel_shift_multi(x, intensity, channel_index=2):
"""Shift the channels of images with the same arguments, randomly or non-randomly, see `numpy.rollaxis <https://docs.scipy.org/doc/numpy/reference/generated/numpy.rollaxis.html>`_ .
Usually be used for image segmentation which x=[X, Y], X and Y should be matched.
Parameters
-----------
x : list of numpy array
List of images with dimension of [n_images, row, col, channel] (default).
others : see ``channel_shift``.
"""
if is_random:
factor = np.random.uniform(-intensity, intensity)
else:
factor = intensity
results = []
for data in x:
data = np.rollaxis(data, channel_index, 0)
min_x, max_x = np.min(data), np.max(data)
channel_images = [np.clip(x_channel + factor, min_x, max_x)
for x_channel in x]
data = np.stack(channel_images, axis=0)
data = np.rollaxis(x, 0, channel_index+1)
results.append( data )
return np.asarray(results)
# noise
def drop(x, keep=0.5):
"""Randomly set some pixels to zero by a given keeping probability.
Parameters
-----------
x : numpy array
An image with dimension of [row, col, channel] or [row, col].
keep : float (0, 1)
The keeping probability, the lower more values will be set to zero.
"""
if len(x.shape) == 3:
if x.shape[-1]==3: # color
img_size = x.shape
mask = np.random.binomial(n=1, p=keep, size=x.shape[:-1])
for i in range(3):
x[:,:,i] = np.multiply(x[:,:,i] , mask)
elif x.shape[-1]==1: # greyscale image
img_size = x.shape
x = np.multiply(x , np.random.binomial(n=1, p=keep, size=img_size))
else:
raise Exception("Unsupported shape {}".format(x.shape))
elif len(x.shape) == 2 or 1: # greyscale matrix (image) or vector
img_size = x.shape
x = np.multiply(x , np.random.binomial(n=1, p=keep, size=img_size))
else:
raise Exception("Unsupported shape {}".format(x.shape))
return x
# x = np.asarray([[1,2,3,4,5,6,7,8,9,10],[1,2,3,4,5,6,7,8,9,10]])
# x = np.asarray([x,x,x,x,x,x])
# x.shape = 10, 4, 3
# # print(x)
# # exit()
# print(x.shape)
# # exit()
# print(drop(x, keep=1.))
# exit()
# manual transform
def transform_matrix_offset_center(matrix, x, y):
"""Return transform matrix offset center.
Parameters
----------
matrix : numpy array
Transform matrix
x, y : int
Size of image.
Examples
--------
- See ``rotation``, ``shear``, ``zoom``.
"""
o_x = float(x) / 2 + 0.5
o_y = float(y) / 2 + 0.5
offset_matrix = np.array([[1, 0, o_x], [0, 1, o_y], [0, 0, 1]])
reset_matrix = np.array([[1, 0, -o_x], [0, 1, -o_y], [0, 0, 1]])
transform_matrix = np.dot(np.dot(offset_matrix, matrix), reset_matrix)
return transform_matrix
def apply_transform(x, transform_matrix, channel_index=2, fill_mode='nearest', cval=0.):
"""Return transformed images by given transform_matrix from ``transform_matrix_offset_center``.
Parameters
----------
x : numpy array
Batch of images with dimension of 3, [batch_size, row, col, channel].
transform_matrix : numpy array
Transform matrix (offset center), can be generated by ``transform_matrix_offset_center``
channel_index : int
Index of channel, default 2.
fill_mode : string
Method to fill missing pixel, default ‘nearest’, more options ‘constant’, ‘reflect’ or ‘wrap’
- `scipy ndimage affine_transform <https://docs.scipy.org/doc/scipy-0.14.0/reference/generated/scipy.ndimage.interpolation.affine_transform.html>`_
cval : scalar, optional
Value used for points outside the boundaries of the input if mode='constant'. Default is 0.0
- `scipy ndimage affine_transform <https://docs.scipy.org/doc/scipy-0.14.0/reference/generated/scipy.ndimage.interpolation.affine_transform.html>`_
Examples
--------
- See ``rotation``, ``shift``, ``shear``, ``zoom``.
"""
x = np.rollaxis(x, channel_index, 0)
final_affine_matrix = transform_matrix[:2, :2]
final_offset = transform_matrix[:2, 2]
channel_images = [ndi.interpolation.affine_transform(x_channel, final_affine_matrix,
final_offset, order=0, mode=fill_mode, cval=cval) for x_channel in x]
x = np.stack(channel_images, axis=0)
x = np.rollaxis(x, 0, channel_index+1)
return x
def projective_transform_by_points(x, src, dst, map_args={}, output_shape=None, order=1, mode='constant', cval=0.0, clip=True, preserve_range=False):
"""Projective transform by given coordinates, usually 4 coordinates. see `scikit-image <http://scikit-image.org/docs/dev/auto_examples/applications/plot_geometric.html>`_.
Parameters
-----------
x : numpy array
An image with dimension of [row, col, channel] (default).
src : list or numpy
The original coordinates, usually 4 coordinates of (x, y).
dst : list or numpy
The coordinates after transformation, the number of coordinates is the same with src.
map_args : dict, optional
Keyword arguments passed to inverse_map.
output_shape : tuple (rows, cols), optional
Shape of the output image generated. By default the shape of the input image is preserved. Note that, even for multi-band images, only rows and columns need to be specified.
order : int, optional
The order of interpolation. The order has to be in the range 0-5:
- 0 Nearest-neighbor
- 1 Bi-linear (default)
- 2 Bi-quadratic
- 3 Bi-cubic
- 4 Bi-quartic
- 5 Bi-quintic
mode : {‘constant’, ‘edge’, ‘symmetric’, ‘reflect’, ‘wrap’}, optional
Points outside the boundaries of the input are filled according to the given mode. Modes match the behaviour of numpy.pad.
cval : float, optional
Used in conjunction with mode ‘constant’, the value outside the image boundaries.
clip : bool, optional
Whether to clip the output to the range of values of the input image. This is enabled by default, since higher order interpolation may produce values outside the given input range.
preserve_range : bool, optional
Whether to keep the original range of values. Otherwise, the input image is converted according to the conventions of img_as_float.
Examples
--------
>>> Assume X is an image from CIFAR 10, i.e. shape == (32, 32, 3)
>>> src = [[0,0],[0,32],[32,0],[32,32]]
>>> dst = [[10,10],[0,32],[32,0],[32,32]]
>>> x = projective_transform_by_points(X, src, dst)
References
-----------
- `scikit-image : geometric transformations <http://scikit-image.org/docs/dev/auto_examples/applications/plot_geometric.html>`_
- `scikit-image : examples <http://scikit-image.org/docs/dev/auto_examples/index.html>`_
"""
if type(src) is list: # convert to numpy
src = np.array(src)
if type(dst) is list:
dst = np.array(dst)
if np.max(x)>1: # convert to [0, 1]
x = x/255
m = transform.ProjectiveTransform()
m.estimate(dst, src)
warped = transform.warp(x, m, map_args=map_args, output_shape=output_shape, order=order, mode=mode, cval=cval, clip=clip, preserve_range=preserve_range)
return warped
# Numpy and PIL
def array_to_img(x, dim_ordering=(0,1,2), scale=True):
"""Converts a numpy array to PIL image object (uint8 format).
Parameters
----------
x : numpy array
A image with dimension of 3 and channels of 1 or 3.
dim_ordering : list or tuple of 3 int
Index of row, col and channel, default (0, 1, 2), for theano (1, 2, 0).
scale : boolean, default is True
If True, converts image to [0, 255] from any range of value like [-1, 2].
References
-----------
- `PIL Image.fromarray <http://pillow.readthedocs.io/en/3.1.x/reference/Image.html?highlight=fromarray>`_
"""
from PIL import Image
# if dim_ordering == 'default':
# dim_ordering = K.image_dim_ordering()
# if dim_ordering == 'th': # theano
# x = x.transpose(1, 2, 0)
x = x.transpose(dim_ordering)
if scale:
x += max(-np.min(x), 0)
x_max = np.max(x)
if x_max != 0:
# print(x_max)
# x /= x_max
x = x / x_max
x *= 255
if x.shape[2] == 3:
# RGB
return Image.fromarray(x.astype('uint8'), 'RGB')
elif x.shape[2] == 1:
# grayscale
return Image.fromarray(x[:, :, 0].astype('uint8'), 'L')
else:
raise Exception('Unsupported channel number: ', x.shape[2])
def find_contours(x, level=0.8, fully_connected='low', positive_orientation='low'):
""" Find iso-valued contours in a 2D array for a given level value, returns list of (n, 2)-ndarrays
see `skimage.measure.find_contours <http://scikit-image.org/docs/dev/api/skimage.measure.html#skimage.measure.find_contours>`_ .
Parameters
------------
x : 2D ndarray of double. Input data in which to find contours.
level : float. Value along which to find contours in the array.
fully_connected : str, {‘low’, ‘high’}. Indicates whether array elements below the given level value are to be considered fully-connected (and hence elements above the value will only be face connected), or vice-versa. (See notes below for details.)
positive_orientation : either ‘low’ or ‘high’. Indicates whether the output contours will produce positively-oriented polygons around islands of low- or high-valued elements. If ‘low’ then contours will wind counter-clockwise around elements below the iso-value. Alternately, this means that low-valued elements are always on the left of the contour.
"""
return skimage.measure.find_contours(x, level, fully_connected='low', positive_orientation='low')
def pt2map(list_points=[], size=(100, 100), val=1):
""" Inputs a list of points, return a 2D image.
Parameters
--------------
list_points : list of [x, y].
size : tuple of (w, h) for output size.
val : float or int for the contour value.
"""
i_m = np.zeros(size)
if list_points == []:
return i_m
for xx in list_points:
for x in xx:
# print(x)
i_m[int(np.round(x[0]))][int(np.round(x[1]))] = val
return i_m
def binary_dilation(x, radius=3):
""" Return fast binary morphological dilation of an image.
see `skimage.morphology.binary_dilation <http://scikit-image.org/docs/dev/api/skimage.morphology.html#skimage.morphology.binary_dilation>`_.
Parameters
-----------
x : 2D array image.
radius : int for the radius of mask.
"""
from skimage.morphology import disk, binary_dilation
mask = disk(radius)
x = binary_dilation(image, selem=mask)
return x
def dilation(x, radius=3):
""" Return greyscale morphological dilation of an image,
see `skimage.morphology.dilation <http://scikit-image.org/docs/dev/api/skimage.morphology.html#skimage.morphology.dilation>`_.
Parameters
-----------
x : 2D array image.
radius : int for the radius of mask.
"""
from skimage.morphology import disk, dilation
mask = disk(radius)
x = dilation(x, selem=mask)
return x
## Sequence
def pad_sequences(sequences, maxlen=None, dtype='int32', padding='post', truncating='pre', value=0.):
"""Pads each sequence to the same length:
the length of the longest sequence.
If maxlen is provided, any sequence longer
than maxlen is truncated to maxlen.
Truncation happens off either the beginning (default) or
the end of the sequence.
Supports post-padding and pre-padding (default).
Parameters
----------
sequences : list of lists where each element is a sequence
maxlen : int, maximum length
dtype : type to cast the resulting sequence.
padding : 'pre' or 'post', pad either before or after each sequence.
truncating : 'pre' or 'post', remove values from sequences larger than
maxlen either in the beginning or in the end of the sequence
value : float, value to pad the sequences to the desired value.
Returns
----------
x : numpy array with dimensions (number_of_sequences, maxlen)
Examples
----------
>>> sequences = [[1,1,1,1,1],[2,2,2],[3,3]]
>>> sequences = pad_sequences(sequences, maxlen=None, dtype='int32',
... padding='post', truncating='pre', value=0.)
... [[1 1 1 1 1]
... [2 2 2 0 0]
... [3 3 0 0 0]]
"""
lengths = [len(s) for s in sequences]
nb_samples = len(sequences)
if maxlen is None:
maxlen = np.max(lengths)
# take the sample shape from the first non empty sequence
# checking for consistency in the main loop below.
sample_shape = tuple()
for s in sequences:
if len(s) > 0:
sample_shape = np.asarray(s).shape[1:]
break
x = (np.ones((nb_samples, maxlen) + sample_shape) * value).astype(dtype)
for idx, s in enumerate(sequences):
if len(s) == 0:
continue # empty list was found
if truncating == 'pre':
trunc = s[-maxlen:]
elif truncating == 'post':
trunc = s[:maxlen]
else:
raise ValueError('Truncating type "%s" not understood' % truncating)
# check `trunc` has expected shape
trunc = np.asarray(trunc, dtype=dtype)
if trunc.shape[1:] != sample_shape:
raise ValueError('Shape of sample %s of sequence at position %s is different from expected shape %s' %
(trunc.shape[1:], idx, sample_shape))
if padding == 'post':
x[idx, :len(trunc)] = trunc
elif padding == 'pre':
x[idx, -len(trunc):] = trunc
else:
raise ValueError('Padding type "%s" not understood' % padding)
return x
def process_sequences(sequences, end_id=0, pad_val=0, is_shorten=True, remain_end_id=False):
"""Set all tokens(ids) after END token to the padding value, and then shorten (option) it to the maximum sequence length in this batch.
Parameters
-----------
sequences : numpy array or list of list with token IDs.
e.g. [[4,3,5,3,2,2,2,2], [5,3,9,4,9,2,2,3]]
end_id : int, the special token for END.
pad_val : int, replace the end_id and the ids after end_id to this value.
is_shorten : boolean, default True.
Shorten the sequences.
remain_end_id : boolean, default False.
Keep an end_id in the end.
Examples
---------
>>> sentences_ids = [[4, 3, 5, 3, 2, 2, 2, 2], <-- end_id is 2
... [5, 3, 9, 4, 9, 2, 2, 3]] <-- end_id is 2
>>> sentences_ids = precess_sequences(sentences_ids, end_id=vocab.end_id, pad_val=0, is_shorten=True)
... [[4, 3, 5, 3, 0], [5, 3, 9, 4, 9]]
"""
max_length = 0
for i_s, seq in enumerate(sequences):
is_end = False
for i_w, n in enumerate(seq):
if n == end_id and is_end == False: # 1st time to see end_id
is_end = True
if max_length < i_w:
max_length = i_w
if remain_end_id is False:
seq[i_w] = pad_val # set end_id to pad_val
elif is_end == True:
seq[i_w] = pad_val
if remain_end_id is True:
max_length += 1
if is_shorten:
for i, seq in enumerate(sequences):
sequences[i] = seq[:max_length]
return sequences
def sequences_add_start_id(sequences, start_id=0, remove_last=False):
"""Add special start token(id) in the beginning of each sequence.
Examples
---------
>>> sentences_ids = [[4,3,5,3,2,2,2,2], [5,3,9,4,9,2,2,3]]
>>> sentences_ids = sequences_add_start_id(sentences_ids, start_id=2)
... [[2, 4, 3, 5, 3, 2, 2, 2, 2], [2, 5, 3, 9, 4, 9, 2, 2, 3]]
>>> sentences_ids = sequences_add_start_id(sentences_ids, start_id=2, remove_last=True)
... [[2, 4, 3, 5, 3, 2, 2, 2], [2, 5, 3, 9, 4, 9, 2, 2]]
- For Seq2seq
>>> input = [a, b, c]
>>> target = [x, y, z]
>>> decode_seq = [start_id, a, b] <-- sequences_add_start_id(input, start_id, True)
"""
sequences_out = [[] for _ in range(len(sequences))]#[[]] * len(sequences)
for i in range(len(sequences)):
if remove_last:
sequences_out[i] = [start_id] + sequences[i][:-1]
else:
sequences_out[i] = [start_id] + sequences[i]
return sequences_out
def sequences_get_mask(sequences, pad_val=0):
"""Return mask for sequences.
Examples
---------
>>> sentences_ids = [[4, 0, 5, 3, 0, 0],
... [5, 3, 9, 4, 9, 0]]
>>> mask = sequences_get_mask(sentences_ids, pad_val=0)
... [[1 1 1 1 0 0]
... [1 1 1 1 1 0]]
"""
mask = np.ones_like(sequences)
for i, seq in enumerate(sequences):
for i_w in reversed(range(len(seq))):
if seq[i_w] == pad_val:
mask[i, i_w] = 0
else:
break # <-- exit the for loop, prepcess next sequence
return mask
## Text
# see tensorlayer.nlp
## Tensor Opt
def distorted_images(images=None, height=24, width=24):
"""Distort images for generating more training data.
Features
---------
They are cropped to height * width pixels randomly.
They are approximately whitened to make the model insensitive to dynamic range.
Randomly flip the image from left to right.
Randomly distort the image brightness.
Randomly distort the image contrast.
Whiten (Normalize) the images.
Parameters
----------
images : 4D Tensor
The tensor or placeholder of images
height : int
The height for random crop.
width : int
The width for random crop.
Returns
-------
result : tuple of Tensor
(Tensor for distorted images, Tensor for while loop index)
Examples
--------
>>> X_train, y_train, X_test, y_test = tl.files.load_cifar10_dataset(shape=(-1, 32, 32, 3), plotable=False)
>>> sess = tf.InteractiveSession()
>>> batch_size = 128
>>> x = tf.placeholder(tf.float32, shape=[batch_size, 32, 32, 3])
>>> distorted_images_op = tl.preprocess.distorted_images(images=x, height=24, width=24)
>>> sess.run(tf.initialize_all_variables())
>>> feed_dict={x: X_train[0:batch_size,:,:,:]}
>>> distorted_images, idx = sess.run(distorted_images_op, feed_dict=feed_dict)
>>> tl.visualize.images2d(X_train[0:9,:,:,:], second=2, saveable=False, name='cifar10', dtype=np.uint8, fig_idx=20212)
>>> tl.visualize.images2d(distorted_images[1:10,:,:,:], second=10, saveable=False, name='distorted_images', dtype=None, fig_idx=23012)
Notes
------
- The first image in 'distorted_images' should be removed.
References
-----------
- `tensorflow.models.image.cifar10.cifar10_input <https://github.com/tensorflow/tensorflow/blob/r0.9/tensorflow/models/image/cifar10/cifar10_input.py>`_
"""
print("This function is deprecated, please use tf.map_fn instead, e.g:\n \
t_image = tf.map_fn(lambda img: tf.image.random_brightness(img, max_delta=32. / 255.), t_image)\n \
t_image = tf.map_fn(lambda img: tf.image.random_contrast(img, lower=0.5, upper=1.5), t_image)\n \
t_image = tf.map_fn(lambda img: tf.image.random_saturation(img, lower=0.5, upper=1.5), t_image)\n \
t_image = tf.map_fn(lambda img: tf.image.random_hue(img, max_delta=0.032), t_image)")
exit()
# print(" [Warning] distorted_images will be deprecated due to speed, see TFRecord tutorial for more info...")
try:
batch_size = int(images._shape[0])
except:
raise Exception('unknow batch_size of images')
distorted_x = tf.Variable(tf.constant(0.1, shape=[1, height, width, 3]))
i = tf.Variable(tf.constant(0))
c = lambda distorted_x, i: tf.less(i, batch_size)
def body(distorted_x, i):
# 1. Randomly crop a [height, width] section of the image.
image = tf.random_crop(tf.gather(images, i), [height, width, 3])
# 2. Randomly flip the image horizontally.
image = tf.image.random_flip_left_right(image)
# 3. Randomly change brightness.
image = tf.image.random_brightness(image, max_delta=63)
# 4. Randomly change contrast.
image = tf.image.random_contrast(image, lower=0.2, upper=1.8)
# 5. Subtract off the mean and divide by the variance of the pixels.
image = tf.image.per_image_whitening(image)
# 6. Append the image to a batch.
image = tf.expand_dims(image, 0)
return tf.concat(0, [distorted_x, image]), tf.add(i, 1)
result = tf.while_loop(cond=c, body=body, loop_vars=(distorted_x, i), parallel_iterations=16)
return result
def crop_central_whiten_images(images=None, height=24, width=24):
"""Crop the central of image, and normailize it for test data.
They are cropped to central of height * width pixels.
Whiten (Normalize) the images.
Parameters
----------
images : 4D Tensor
The tensor or placeholder of images
height : int
The height for central crop.
width : int
The width for central crop.
Returns
-------
result : tuple Tensor
(Tensor for distorted images, Tensor for while loop index)
Examples
--------
>>> X_train, y_train, X_test, y_test = tl.files.load_cifar10_dataset(shape=(-1, 32, 32, 3), plotable=False)
>>> sess = tf.InteractiveSession()
>>> batch_size = 128
>>> x = tf.placeholder(tf.float32, shape=[batch_size, 32, 32, 3])
>>> central_images_op = tl.preprocess.crop_central_whiten_images(images=x, height=24, width=24)
>>> sess.run(tf.initialize_all_variables())
>>> feed_dict={x: X_train[0:batch_size,:,:,:]}
>>> central_images, idx = sess.run(central_images_op, feed_dict=feed_dict)
>>> tl.visualize.images2d(X_train[0:9,:,:,:], second=2, saveable=False, name='cifar10', dtype=np.uint8, fig_idx=20212)
>>> tl.visualize.images2d(central_images[1:10,:,:,:], second=10, saveable=False, name='central_images', dtype=None, fig_idx=23012)
Notes
------
The first image in 'central_images' should be removed.
Code References
----------------
- ``tensorflow.models.image.cifar10.cifar10_input``
"""
print("This function is deprecated, please use tf.map_fn instead, e.g:\n \
t_image = tf.map_fn(lambda img: tf.image.random_brightness(img, max_delta=32. / 255.), t_image)\n \
t_image = tf.map_fn(lambda img: tf.image.random_contrast(img, lower=0.5, upper=1.5), t_image)\n \
t_image = tf.map_fn(lambda img: tf.image.random_saturation(img, lower=0.5, upper=1.5), t_image)\n \
t_image = tf.map_fn(lambda img: tf.image.random_hue(img, max_delta=0.032), t_image)")
exit()
# print(" [Warning] crop_central_whiten_images will be deprecated due to speed, see TFRecord tutorial for more info...")
try:
batch_size = int(images._shape[0])
except:
raise Exception('unknow batch_size of images')
central_x = tf.Variable(tf.constant(0.1, shape=[1, height, width, 3]))
i = tf.Variable(tf.constant(0))
c = lambda central_x, i: tf.less(i, batch_size)
def body(central_x, i):
# 1. Crop the central [height, width] of the image.
image = tf.image.resize_image_with_crop_or_pad(tf.gather(images, i), height, width)
# 2. Subtract off the mean and divide by the variance of the pixels.
image = tf.image.per_image_whitening(image)
# 5. Append the image to a batch.
image = tf.expand_dims(image, 0)
return tf.concat(0, [central_x, image]), tf.add(i, 1)
result = tf.while_loop(cond=c, body=body, loop_vars=(central_x, i), parallel_iterations=16)
return result
#
| zjuela/LapSRN-tensorflow | tensorlayer/prepro.py | Python | apache-2.0 | 62,978 | 0.005096 |
#!/usr/bin/env python
import math
import time
import naoqi
import motion
from naoqi import ALProxy
import roslib
roslib.load_manifest('nao_driver')
import rospy
use_robot_state_publisher = False
if use_robot_state_publisher:
from robot_state.msg import RobotState
from scan_pose_bending import scanPoseBending
from camera_walk_pose import cameraWalkPose
def head_scan(motionProxy,headPitchStart=26.0,headPitchEnd=-10.0,angularResolution=0.5,laserScanFreq=10,use_scan_pose=False):
start_time = rospy.Time.now()
if(use_scan_pose):
rospy.loginfo("HeadscanTop IS using scan_pose_new")
else:
rospy.loginfo("HeadscanTop IS NOT using scan_pose_new")
if use_robot_state_publisher:
robotStatePub = rospy.Publisher("robot_state", RobotState)
rospy.sleep(0.5)
robot_state = RobotState()
# TODO: check limits of head and compare to headPitchEnd and headPitchStart
headPitchStart = headPitchStart * math.pi/180.0
headPitchEnd = headPitchEnd * math.pi/180.0
headPitchInflectionPoint = headPitchEnd + (20.0*math.pi/180.0) # go further up than final scan pose
degreePerSecond = angularResolution* laserScanFreq*0.33
radiansPerSecond = degreePerSecond*math.pi/180.0
#print("Radians per second: " + str(radiansPerSecond))
#print("Degree per second: " + str(degreePerSecond))
head_pitch_before_scan = motionProxy.getAngles("HeadPitch",True)[0]
head_pitch_camera_learning = 22.0*math.pi/180.0
if use_scan_pose:
scanPoseBending(motionProxy,head_pitch_before_scan,0.0,2)
#motionProxy.stiffnessInterpolation('Body',1.0, 0.5)
# compute some times
t = abs(head_pitch_before_scan - headPitchStart)/radiansPerSecond
t2 = abs(headPitchStart - headPitchEnd)/radiansPerSecond
t3 = abs(headPitchEnd - head_pitch_camera_learning)/radiansPerSecond
# set yaw to 0.0 and pitch to headPitchStart
motionProxy.angleInterpolation('Head', [0.0, headPitchStart], t/4.0, True)
if use_robot_state_publisher:
robot_state.stamp = rospy.Time.now()
robot_state.id = RobotState.SCANNING
robotStatePub.publish(robot_state)
# HACK to make nicer floor scans. TODO: improve me!
angles = list()
times = list()
for i in range(0,9):
angles.append( headPitchStart + (i+1)*(headPitchEnd - headPitchStart)/10.0)
tt = (i+1)*t2/10.0
times.append( tt*tt/t2 )
rospy.logdebug("using times %s"%(str(times)))
rospy.logdebug("using angles %s"%(str(angles)))
motionProxy.angleInterpolation('HeadPitch', angles, times, True)
#motionProxy.angleInterpolation('Head', [0.0, headPitchEnd], t2, True)
if use_robot_state_publisher:
rospy.sleep(0.5)
robot_state = RobotState()
robot_state.stamp = rospy.Time.now()
robot_state.id = RobotState.SCAN_COMPLETE
rospy.sleep(0.5)
# adopt final pose TODO: this should not necessary be cameraWalkPose
if use_scan_pose:
cameraWalkPose(motionProxy,head_pitch_camera_learning,0.0)
#else:
#t4 = abs(head_pitch_camera_learning - head_pitch_after_scan)/radiansPerSecond
#head_pitch_after_scan = head_pitch_before_scan
#head_pitch_after_scan = 20.0*math.pi/180.0
#motionProxy.angleInterpolation('Head', [0.0, head_pitch_after_scan], t4, True)
if use_robot_state_publisher:
# wait to trigger point cloud assembler and image classifier
rospy.sleep(0.5)
robotStatePub.publish(robot_state)
dur = rospy.Time.now() - start_time
rospy.loginfo("Scan took %f seconds."%(dur.to_sec()))
if __name__ == '__main__':
from optparse import OptionParser
rospy.init_node('headscan')
parser = OptionParser()
parser.add_option("--pip", dest="pip", default="127.0.0.1",
help="IP/hostname of parent broker. Default is 127.0.0.1.", metavar="IP")
parser.add_option("--pport", dest="pport", default=9559,
help="port of parent broker. Default is 9559.", metavar="PORT")
parser.add_option("--s", dest="headpitchstart", default=27.0,
help="start head pitch", metavar="START")
parser.add_option("--e", dest="headpitchend", default=-10.0,
help="end head pitch (negative angles are towards sky)",
metavar="END")
parser.add_option("--speed", dest="headpitchspeed", default=10,
help="head pitch speed(degree per second)", metavar="SPEED")
(options, args) = parser.parse_args()
headPitchStart = float(options.headpitchstart )
headPitchEnd = float(options.headpitchend)
# TODO: useless..
pitch_speed = float(options.headpitchspeed)
angularResolution = 1
laserScanFreq = pitch_speed
try:
motionProxy = ALProxy("ALMotion",options.pip,int(options.pport))
except RuntimeError,e:
print("Error creating Proxy to motion, exiting...")
print e
exit(1)
try:
ttsProxy = ALProxy("ALTextToSpeech",options.pip,int(options.pport))
except RuntimeError,e:
print("Error creating Proxy to tts ...")
print e
head_scan(motionProxy,headPitchStart,headPitchEnd,angularResolution,laserScanFreq,False)
print("Scan completed...")
exit(0)
| miguelsdc/nao_robot | nao_driver/scripts/headscanTop.py | Python | bsd-3-clause | 5,281 | 0.013634 |
from django.contrib.auth.models import Group, User
from django.db import models
from django.utils.translation import gettext_lazy as _
class GroupContact(models.Model):
"""
Groups displayed in contact page and their informations.
"""
class Meta:
verbose_name = _("Groupe de la page de contact")
verbose_name_plural = _("Groupes de la page de contact")
group = models.OneToOneField(Group, verbose_name=_("Groupe d'utilisateur"), on_delete=models.CASCADE)
name = models.CharField(_("Nom (ex: Le staff)"), max_length=32, unique=True)
description = models.TextField(_("Description (en markdown)"), blank=True, null=True)
email = models.EmailField(_("Adresse mail du groupe"), blank=True, null=True)
persons_in_charge = models.ManyToManyField(User, verbose_name=_("Responsables"), blank=True)
position = models.PositiveSmallIntegerField(_("Position dans la page"), unique=True)
def __str__(self):
return self.name
| ChantyTaguan/zds-site | zds/pages/models.py | Python | gpl-3.0 | 982 | 0.00611 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2018-10-05 09:09
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('explorer', '0006_auto_20181004_1159'),
]
operations = [
migrations.AddField(
model_name='fieldvalue',
name='column_name',
field=models.CharField(blank=True, max_length=80, null=True),
),
]
| muccg/rdrf | rdrf/explorer/migrations/0007_fieldvalue_column_name.py | Python | agpl-3.0 | 482 | 0 |
import matplotlib.pyplot as plt
import numpy as np
import sklearn
from sklearn import svm, datasets
from sklearn.metrics import precision_recall_curve
from sklearn.metrics import average_precision_score
from sklearn.cross_validation import train_test_split
from sklearn.preprocessing import label_binarize
from sklearn.multiclass import OneVsRestClassifier
import pdb
from sklearn import ensemble
from sklearn import neighbors
from sklearn import tree
#import data
data_path = "joined_matrix_split.txt"
mat = np.loadtxt(data_path)
features = mat[50000:60000, 0:40]
features = sklearn.preprocessing.scale(features, axis=1)
output_raw = mat[50000:60000, -1]
output = sklearn.preprocessing.binarize(output_raw)
# Split into training and test
random_state = np.random.RandomState(0)
X_train, X_test, y_train, y_test = train_test_split(features, output, test_size=.5,
random_state=random_state)
n_classes = 1
#run classifier
classifier = OneVsRestClassifier(svm.SVC(kernel='linear', probability=True,
random_state=random_state))
y_score = classifier.fit(X_train, y_train).decision_function(X_test)
# Compute Precision-Recall and plot curve
precision = dict()
recall = dict()
average_precision = dict()
precision[0], recall[0], _ = precision_recall_curve(y_test, y_score)
average_precision[0] = average_precision_score(y_test, y_score)
# now do rbf kernel
classifier = OneVsRestClassifier(svm.SVC(kernel='rbf', probability=True, random_state=random_state))
y_score = classifier.fit(X_train, y_train).decision_function(X_test)
# Compute Precision-Recall and plot curve
precision[1], recall[1], _ = precision_recall_curve(y_test, y_score)
average_precision[1] = average_precision_score(y_test, y_score)
# now do adaboost
model = ensemble.AdaBoostClassifier()
classifier = OneVsRestClassifier(model)
y_score = classifier.fit(X_train, y_train).decision_function(X_test)
# Compute Precision-Recall and plot curve
precision[2], recall[2], _ = precision_recall_curve(y_test, y_score)
average_precision[2] = average_precision_score(y_test, y_score)
"""
pdb.set_trace()
# now do kNN classifier
model = neighbors.KNeighborsClassifier()
classifier = OneVsRestClassifier(model)
y_score = classifier.fit(X_train, y_train).decision_function(X_test)
# Compute Precision-Recall and plot curve
precision[3], recall[3], _ = precision_recall_curve(y_test, y_score)
average_precision[3] = average_precision_score(y_test, y_score)
# now do random forrest
model = ensemble.RandomForestClassifier()
classifier = OneVsRestClassifier(model)
y_score = classifier.fit(X_train, y_train).decision_function(X_test)
# Compute Precision-Recall and plot curve
precision[4], recall[4], _ = precision_recall_curve(y_test, y_score)
average_precision[4] = average_precision_score(y_test, y_score)
# now do decision trees
model = tree.DecisionTreeClassifier()
classifier = OneVsRestClassifier(model)
y_score = classifier.fit(X_train, y_train).decision_function(X_test)
# Compute Precision-Recall and plot curve
precision[5], recall[5], _ = precision_recall_curve(y_test, y_score)
average_precision[5] = average_precision_score(y_test, y_score)
# Plot Precision-Recall curve
#plt.clf()
#plt.plot(recall[0], precision[0], label='Precision-Recall curve')
#plt.xlabel('Recall')
#plt.ylabel('Precision')
#plt.ylim([0.0, 1.05])
#plt.xlim([0.0, 1.0])
#plt.title('Linear SVC Precision vs. Recall: AUC={0:0.2f}'.format(average_precision[0]))
#plt.legend(loc="lower left")
#plt.show()
"""
kernel = {}
kernel[0] = "linear SVC"
kernel[1] = "rbf SVC"
kernel[2] = "AdaBoost classifier"
#kernel[3] = "k-nearest-neighbors classifier"
#kernel[4] = "random forest classifier"
#kernel[5] = "decision tree classifier"
# Plot Precision-Recall curve for each class
plt.clf()
for i in range(3):
plt.plot(recall[i], precision[i],
label='Precision-recall curve of {0} (area = {1:0.2f})'
''.format(kernel[i], average_precision[i]))
plt.xlim([0.0, 1.0])
plt.ylim([0.0, 1.05])
plt.xlabel('Recall')
plt.ylabel('Precision')
plt.title('Classification on aggregate crime; precision vs. recall')
plt.legend(loc="lower right")
plt.show()
| JamesWo/cs194-16-data_manatees | precision_recall.py | Python | apache-2.0 | 4,204 | 0.002617 |
#!/usr/bin/env python2.6
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2010,2013 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" A nose plugin to shunt rebuilding the database all the time """
import os
import logging
log = logging.getLogger('nose.plugins.aqdb_plugin')
#Assume that the caller will already have nose available
import nose
#Set this environment variable to trigger the no_rebuild effect
ENVIRONMENT_VARNAME = 'AQDB_NOREBUILD'
class DontRebuildPlugin(nose.plugins.Plugin):
""" Custom plugin to shunt rebuilding the database during iterative
development cycles of aquilon components. """
def options(self, parser, env):
parser.add_option('--no-rebuild',
action='store_true',
dest='no_rebuild',
default=False,
help="Don't rebuild the database")
def configure(self, options, conf):
#if environment variable is set, don't enable and exit
#(ignore the plugin if the variable already exists)
if ENVIRONMENT_VARNAME in os.environ.keys():
log.info('found %s in the environment, skipping db rebuild ' % (
ENVIRONMENT_VARNAME))
return
if options.no_rebuild:
self.enabled = True
os.environ[ENVIRONMENT_VARNAME] = 'enabled'
log.debug("Plugin says don't rebuild the database")
else:
log.debug("Plugin says to rebuild the database")
def begin(self):
log.debug("DontRebuildPlugin.begin()")
def reset(self):
log.debug("DontRebuildPlugin.end()")
os.environ[ENVIRONMENT_VARNAME] = ''
#if __name__ == '__main__':
#TESTING:
# from nose.plugins.plugintest import run_buffered as run
# import unittest
# suite=unittest.TestSuite([])
# argv=['-v', '--no-rebuild']
# nose.plugins.plugintest.run(argv=argv,
# suite=suite, plugins=[DontRebuildPlugin()])
#RUNNING:
# nose.main(addplugins=[DontRebuildPlugin()])
| stdweird/aquilon | tests/aqdb/aqdb_nose_plugin.py | Python | apache-2.0 | 2,638 | 0.003033 |
import datetime
import bleach
import re
months = {
1: "jan",
2: "feb",
3: "mars",
4: ["apr\u00edl", "apr"],
5: "ma\u00ed",
6: "j\u00fan\u00ed",
7: "j\u00fal\u00ed",
8: "\u00e1g\u00fast",
9: "sept",
10: "okt",
11: "n\u00f3v",
12: "des",
}
def decode_date_string(date_string=None):
"""
Handles parsing abbreviated localised date strings into an ISO8601 compatible timestamp.
Args:
date_string (:obj: `string`, optional): unicode string with the extracted date
Examples:
>>> decode_date_string(None)
>>> decode_date_string('27. apr\u00edl 2011')
'2011-04-27'
>>> decode_date_string('1. ma\u00ed.')
'2020-05-01'
"""
if date_string is None:
# for when the input is None, we simply return
return
# remove any extra spaces
date_string = date_string.strip(" ")
regex = re.compile(
r"(?P<date>\d+). (?P<month>\w+)([. ]+)?(?P<year>\d+)?", re.UNICODE
)
match = regex.match(date_string)
date = int(match.group("date"))
month = translate_month(match.group("month"))
year = match.group("year")
# in some instances we don't have a specified year, it's assumed to be obvious in the context of the listing
# just use the current year as a shortcut for now
# TODO: make this work for dates that wrap over a year boundary
if year is None:
year = datetime.datetime.utcnow().year
return "{}-{:02}-{:02}".format(year, month, date)
def translate_month(month):
"""
Translates the month string into an integer value
Args:
month (unicode): month string parsed from the website listings.
Returns:
int: month index starting from 1
Examples:
>>> translate_month('jan')
1
"""
for key, values in months.items():
if month in values:
return key
def clean_html(input_html):
allowed_tags = bleach.sanitizer.ALLOWED_TAGS + ["br", "p"]
cleaner = bleach.sanitizer.Cleaner(
tags=allowed_tags, strip=True, strip_comments=True
)
cleaned_lines = []
if isinstance(input_html, (list, tuple)):
for line in input_html:
cleaned_lines.append(cleaner.clean(line).strip())
elif isinstance(input_html, str):
cleaned_lines.append(cleaner.clean(input_html).strip())
return "<br>".join(cleaned_lines)
| multiplechoice/workplace | jobs/common.py | Python | apache-2.0 | 2,436 | 0.003284 |
#!/usr/bin/python3
# This file is part of Epoptes, http://epoptes.org
# Copyright 2012-2018 the Epoptes team, see AUTHORS.
# SPDX-License-Identifier: GPL-3.0-or-later
"""
Display a simple window with a message.
"""
import os
import sys
from _common import gettext as _
from gi.repository import Gtk
class MessageWindow(Gtk.Window):
"""Display a simple window with a message."""
def __init__(self, text, title="Epoptes", markup=True,
icon_name="dialog-information"):
super().__init__(title=title, icon_name=icon_name)
self.set_position(Gtk.WindowPosition.CENTER)
grid = Gtk.Grid(column_spacing=10, row_spacing=10, margin=10)
self.add(grid)
image = Gtk.Image.new_from_icon_name(icon_name, Gtk.IconSize.DIALOG)
grid.add(image)
# Always load the plain text first in case the markup parsing fails
label = Gtk.Label(
label=text, selectable=True, hexpand=True, vexpand=True,
halign=Gtk.Align.START, valign=Gtk.Align.START)
if markup:
label.set_markup(text)
grid.add(label)
button = Gtk.Button.new_from_stock(Gtk.STOCK_CLOSE)
button.set_hexpand(False)
button.set_halign(Gtk.Align.END)
button.connect("clicked", Gtk.main_quit)
grid.attach(button, 1, 1, 2, 1)
self.set_focus_child(button)
accelgroup = Gtk.AccelGroup()
key, modifier = Gtk.accelerator_parse('Escape')
accelgroup.connect(
key, modifier, Gtk.AccelFlags.VISIBLE, Gtk.main_quit)
self.add_accel_group(accelgroup)
def main():
"""Run the module from the command line."""
if len(sys.argv) <= 1 or len(sys.argv) > 5:
print(_("Usage: {} text [title] [markup] [icon_name]").format(
os.path.basename(__file__)), file=sys.stderr)
exit(1)
text = sys.argv[1]
if len(sys.argv) > 2 and sys.argv[2]:
title = sys.argv[2]
else:
title = "Epoptes"
if len(sys.argv) > 3 and sys.argv[3]:
markup = sys.argv[3].lower() == "true"
else:
markup = True
if len(sys.argv) > 4:
icon_name = sys.argv[4]
else:
icon_name = "dialog-information"
window = MessageWindow(text, title, markup, icon_name)
window.connect("destroy", Gtk.main_quit)
window.show_all()
Gtk.main()
if __name__ == '__main__':
main()
| Epoptes/epoptes | epoptes-client/message.py | Python | gpl-3.0 | 2,401 | 0 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Author',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=256)),
('last_name', models.CharField(max_length=256)),
('position', models.CharField(max_length=512, null=True, blank=True)),
('twitter_handle', models.CharField(max_length=60, null=True, blank=True)),
('short_bio', models.TextField(null=True, blank=True)),
('website', models.CharField(max_length=256, null=True, blank=True)),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Event',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('slug', models.SlugField(unique=True, max_length=255, blank=True)),
('start_date', models.DateTimeField()),
('end_date', models.DateTimeField()),
('location', models.TextField()),
('title', models.TextField()),
('content', models.TextField()),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='FAQ',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('question', models.TextField()),
('answer', models.TextField()),
('weight', models.IntegerField(default=0)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='MediaContact',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=256)),
('position', models.CharField(max_length=256)),
('company', models.CharField(max_length=256)),
('telephone', models.CharField(max_length=40)),
('email', models.CharField(max_length=256)),
('display_on_contact', models.BooleanField(default=True)),
('weight', models.IntegerField(default=0)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Member',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=256)),
('last_name', models.CharField(max_length=256)),
('position', models.CharField(max_length=512, null=True, blank=True)),
('twitter_handle', models.CharField(max_length=60, null=True, blank=True)),
('short_bio', models.TextField(null=True, blank=True)),
('website', models.CharField(max_length=256, null=True, blank=True)),
('member_type', models.CharField(default=b'general', max_length=20, choices=[(b'chair', b'chair'), (b'general', b'general'), (b'supporting', b'supporting'), (b'research_adviser', b'research adviser')])),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Partner',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=256)),
('short_name', models.CharField(max_length=30, null=True, blank=True)),
('website', models.URLField(null=True, blank=True)),
('website_display', models.CharField(max_length=256, null=True, blank=True)),
('description', models.TextField(null=True, blank=True)),
('press_description', models.TextField(null=True, blank=True)),
('logo', models.ImageField(null=True, upload_to=b'partner_logos', blank=True)),
('weight', models.IntegerField(default=0)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='PressRelease',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('slug', models.SlugField(unique=True, max_length=255, blank=True)),
('release_date', models.DateTimeField()),
('location', models.TextField()),
('title', models.TextField()),
('content', models.TextField()),
('release_tag', models.TextField(default=b'For immediate release')),
('end_tag', models.CharField(default=b'-30-', max_length=20)),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='PressReleaseFooter',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=256)),
('media_contacts', models.ManyToManyField(to='commission.MediaContact', null=True, blank=True)),
('partners', models.ManyToManyField(to='commission.Partner', null=True, blank=True)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Publication',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('slug', models.SlugField(unique=True, max_length=255, blank=True)),
('title', models.TextField()),
('description', models.TextField()),
('publish_date', models.DateField()),
('document', models.FileField(null=True, upload_to=b'publications', blank=True)),
('document_link', models.URLField(null=True, blank=True)),
('document_link_title', models.CharField(default=b'Download PDF', max_length=128)),
('image', models.ImageField(null=True, upload_to=b'publications/images', blank=True)),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='PublicationAuthor',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('weight', models.PositiveIntegerField(default=0)),
('author', models.ForeignKey(related_name=b'ordered_authors', to='commission.Author')),
('publication', models.ForeignKey(to='commission.Publication')),
],
options={
'ordering': ('weight',),
},
bases=(models.Model,),
),
migrations.CreateModel(
name='PublicationType',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=40)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Supporter',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=255)),
('website', models.URLField(null=True, blank=True)),
('weight', models.IntegerField(default=0)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Video',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('slug', models.SlugField(unique=True, max_length=255, blank=True)),
('video_id', models.CharField(max_length=1024)),
('title', models.CharField(max_length=1024)),
('description', models.TextField()),
('weight', models.PositiveIntegerField(default=0)),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
migrations.AddField(
model_name='publication',
name='authors',
field=models.ManyToManyField(to='commission.Author', null=True, through='commission.PublicationAuthor', blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='publication',
name='type',
field=models.ForeignKey(to='commission.PublicationType'),
preserve_default=True,
),
migrations.AddField(
model_name='pressrelease',
name='footer',
field=models.ForeignKey(blank=True, to='commission.PressReleaseFooter', null=True),
preserve_default=True,
),
migrations.AddField(
model_name='member',
name='partner',
field=models.ForeignKey(blank=True, to='commission.Partner', null=True),
preserve_default=True,
),
migrations.AddField(
model_name='author',
name='partner',
field=models.ForeignKey(blank=True, to='commission.Partner', null=True),
preserve_default=True,
),
]
| Ourinternet/website | commission/migrations/0001_initial.py | Python | mit | 10,354 | 0.003767 |
import os
import sys
from optparse import OptionParser
from jobTree.src.bioio import logger, setLoggingFromOptions
from jobTree.scriptTree.stack import Stack
from jobTree.scriptTree.target import Target
from margin.utils import pathToBaseNanoporeDir
from margin.marginCallerLib import marginCallerTargetFn
def main():
#Parse the inputs args/options
parser = OptionParser(usage="usage: inputSamFile referenceFastaFile outputVcfFile [options]",
version="%prog 0.1")
#Options
parser.add_option("--noMargin", dest="noMargin", help="Do not marginalise over the read \
alignments, rather use the input alignment to call the variants (this will be faster)",
default=False, action="store_true")
parser.add_option("--alignmentModel", default=os.path.join(pathToBaseNanoporeDir(),
"src", "margin", "mappers", "last_hmm_20.txt"),
help="The model to use in realigning the reads to the reference.")
parser.add_option("--errorModel", default=os.path.join(pathToBaseNanoporeDir(),
"src", "margin", "mappers", "last_hmm_20.txt"),
help="The model to use in calculating the difference between the predicted true reference and the reads.")
parser.add_option("--maxAlignmentLengthPerJob", default=7000000,
help="Maximum total alignment length of alignments to include in one posterior prob calculation job.",
type=int)
parser.add_option("--threshold", default=0.3,
help="The posterior probability threshold for a non-reference base above which to report a variant.",
type=float)
#Add the jobTree options
Stack.addJobTreeOptions(parser)
#Parse the options/arguments
options, args = parser.parse_args()
#Setup logging
setLoggingFromOptions(options)
#Print help message if no input
if len(sys.argv) == 1:
parser.print_help()
sys.exit(0)
#Exit if the arguments are not what we expect
if len(args) != 3:
raise RuntimeError("Expected three arguments, got: %s" % " ".join(args))
print options.errorModel
print options.threshold
#This line invokes jobTree
i = Stack(Target.makeTargetFn(fn=marginCallerTargetFn, args=(args[0], args[1], args[2], options))).startJobTree(options)
#The return value of the jobtree script is the number of failed jobs. If we have any then
#report this.
if i != 0:
raise RuntimeError("Got failed jobs")
if __name__ == '__main__':
from margin.marginCaller import *
main()
| isovic/marginAlign | src/margin/marginCaller.py | Python | mit | 2,779 | 0.020511 |
#!/usr/bin/env python
# Copyright 2019 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import copy
import tempfile
from pyscf import lib, gto, scf
from pyscf.tools import cubegen
mol = gto.Mole()
mol.atom = '''
O 0.00000000, 0.000000, 0.119748
H 0.00000000, 0.761561, -0.478993
H 0.00000000, -0.761561, -0.478993 '''
mol.verbose = 0
mol.build()
mf = scf.RHF(mol).run()
def tearDownModule():
global mol, mf
del mol, mf
class KnownValues(unittest.TestCase):
def test_mep(self):
ftmp = tempfile.NamedTemporaryFile()
mep = cubegen.mep(mol, ftmp.name, mf.make_rdm1(),
nx=10, ny=10, nz=10)
self.assertEqual(mep.shape, (10,10,10))
self.assertAlmostEqual(lib.finger(mep), -0.3198103636180436, 9)
mep = cubegen.mep(mol, ftmp.name, mf.make_rdm1(),
nx=10, ny=10, nz=10, resolution=0.5)
self.assertEqual(mep.shape, (12,18,15))
self.assertAlmostEqual(lib.finger(mep), -4.653995909548524, 9)
def test_orb(self):
ftmp = tempfile.NamedTemporaryFile()
orb = cubegen.orbital(mol, ftmp.name, mf.mo_coeff[:,0],
nx=10, ny=10, nz=10)
self.assertEqual(orb.shape, (10,10,10))
self.assertAlmostEqual(lib.finger(orb), -0.11804191128016768, 9)
orb = cubegen.orbital(mol, ftmp.name, mf.mo_coeff[:,0],
nx=10, ny=10, nz=10, resolution=0.5)
self.assertEqual(orb.shape, (12,18,15))
self.assertAlmostEqual(lib.finger(orb), -0.8591778390706646, 9)
orb = cubegen.orbital(mol, ftmp.name, mf.mo_coeff[:,0],
nx=10, ny=1, nz=1)
self.assertEqual(orb.shape, (10,1,1))
self.assertAlmostEqual(lib.finger(orb), 6.921008881822988e-09, 9)
def test_rho(self):
ftmp = tempfile.NamedTemporaryFile()
rho = cubegen.density(mol, ftmp.name, mf.make_rdm1(),
nx=10, ny=10, nz=10)
self.assertEqual(rho.shape, (10,10,10))
self.assertAlmostEqual(lib.finger(rho), -0.3740462814001553, 9)
rho = cubegen.density(mol, ftmp.name, mf.make_rdm1(),
nx=10, ny=10, nz=10, resolution=0.5)
self.assertEqual(rho.shape, (12,18,15))
self.assertAlmostEqual(lib.finger(rho), -1.007950007160415, 9)
if __name__ == "__main__":
print("Full Tests for molden")
unittest.main()
| sunqm/pyscf | pyscf/tools/test/test_cubegen.py | Python | apache-2.0 | 2,994 | 0.007348 |
##
## Bragg fiber
##
from numpy import *
from pylab import *
from Polymode import *
from Polymode import LayeredSolver
ncore = 1.0
n1=1.4
n2=1.6
rcore = 5.0
Nlayer = 20
m = 0
neff_opt = 0.998
wl_opt = 1.0
d1 = wl_opt/(4*sqrt(n1**2 - neff_opt**2))
d2 = wl_opt/(4*sqrt(n2**2 - neff_opt**2))
dlayer = d1+d2
wlrange=[0.4,1.4]
print "Bragg layer widths d1=%.4g, d2=%.4g, rcore = %.5g " % (d1,d2, rcore)
mg = Material.Fixed(ncore)
m1 = Material.Fixed(n1)
m2 = Material.Fixed(n2)
#create structure object with external substrate of m1
wg = Waveguide.Waveguide(material=m1, symmetry=1)
#Core
wg.add_shape(Waveguide.Circle(mg, center=(0,0), radius=rcore))
#Cladding
for ii in range(Nlayer):
ri = rcore+ii*dlayer
a1 = Waveguide.Annulus(m1, r=(ri,ri+d1))
a2 = Waveguide.Annulus(m2, r=(ri+d2,ri+dlayer))
wg.add_shapes(a1,a2)
#Use the LayeredSolver
solver = LayeredSolver.LayeredSolver(wg)
#Do an overall scan of the condition number of the eigenvalue
#problem
wlrange=[0.8,1.2]
wlcscan = Solver.WavelengthConditionScan(solver, Nscan=(50,50))
wlcscan(wlrange, m, neffrange=[ncore-0.05, ncore])
wlcscan.plot(style={'cmap':cm.gray_r})
#Track the fundamental mode
wlrange=[0.95,1.15]
wltrack = Solver.WavelengthTrack(solver)
wltrack.ga_target=1e-4
wltrack(wlrange, m, nefflist=[0.992], number=1)
wltrack.plot()
| juhasch/polymode | examples/ex8_bragg_fiber.py | Python | gpl-3.0 | 1,327 | 0.019593 |
# coding=utf-8
r"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from twilio.base import deserialize
from twilio.base import serialize
from twilio.base import values
from twilio.base.instance_context import InstanceContext
from twilio.base.instance_resource import InstanceResource
from twilio.base.list_resource import ListResource
from twilio.base.page import Page
class InviteList(ListResource):
def __init__(self, version, service_sid, channel_sid):
"""
Initialize the InviteList
:param Version version: Version that contains the resource
:param service_sid: The SID of the Service that the resource is associated with
:param channel_sid: The SID of the Channel the new resource belongs to
:returns: twilio.rest.chat.v1.service.channel.invite.InviteList
:rtype: twilio.rest.chat.v1.service.channel.invite.InviteList
"""
super(InviteList, self).__init__(version)
# Path Solution
self._solution = {'service_sid': service_sid, 'channel_sid': channel_sid, }
self._uri = '/Services/{service_sid}/Channels/{channel_sid}/Invites'.format(**self._solution)
def create(self, identity, role_sid=values.unset):
"""
Create the InviteInstance
:param unicode identity: The `identity` value that identifies the new resource's User
:param unicode role_sid: The Role assigned to the new member
:returns: The created InviteInstance
:rtype: twilio.rest.chat.v1.service.channel.invite.InviteInstance
"""
data = values.of({'Identity': identity, 'RoleSid': role_sid, })
payload = self._version.create(method='POST', uri=self._uri, data=data, )
return InviteInstance(
self._version,
payload,
service_sid=self._solution['service_sid'],
channel_sid=self._solution['channel_sid'],
)
def stream(self, identity=values.unset, limit=None, page_size=None):
"""
Streams InviteInstance records from the API as a generator stream.
This operation lazily loads records as efficiently as possible until the limit
is reached.
The results are returned as a generator, so this operation is memory efficient.
:param unicode identity: The `identity` value of the resources to read
:param int limit: Upper limit for the number of records to return. stream()
guarantees to never return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, stream() will attempt to read the
limit with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.chat.v1.service.channel.invite.InviteInstance]
"""
limits = self._version.read_limits(limit, page_size)
page = self.page(identity=identity, page_size=limits['page_size'], )
return self._version.stream(page, limits['limit'])
def list(self, identity=values.unset, limit=None, page_size=None):
"""
Lists InviteInstance records from the API as a list.
Unlike stream(), this operation is eager and will load `limit` records into
memory before returning.
:param unicode identity: The `identity` value of the resources to read
:param int limit: Upper limit for the number of records to return. list() guarantees
never to return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, list() will attempt to read the limit
with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.chat.v1.service.channel.invite.InviteInstance]
"""
return list(self.stream(identity=identity, limit=limit, page_size=page_size, ))
def page(self, identity=values.unset, page_token=values.unset,
page_number=values.unset, page_size=values.unset):
"""
Retrieve a single page of InviteInstance records from the API.
Request is executed immediately
:param unicode identity: The `identity` value of the resources to read
:param str page_token: PageToken provided by the API
:param int page_number: Page Number, this value is simply for client state
:param int page_size: Number of records to return, defaults to 50
:returns: Page of InviteInstance
:rtype: twilio.rest.chat.v1.service.channel.invite.InvitePage
"""
data = values.of({
'Identity': serialize.map(identity, lambda e: e),
'PageToken': page_token,
'Page': page_number,
'PageSize': page_size,
})
response = self._version.page(method='GET', uri=self._uri, params=data, )
return InvitePage(self._version, response, self._solution)
def get_page(self, target_url):
"""
Retrieve a specific page of InviteInstance records from the API.
Request is executed immediately
:param str target_url: API-generated URL for the requested results page
:returns: Page of InviteInstance
:rtype: twilio.rest.chat.v1.service.channel.invite.InvitePage
"""
response = self._version.domain.twilio.request(
'GET',
target_url,
)
return InvitePage(self._version, response, self._solution)
def get(self, sid):
"""
Constructs a InviteContext
:param sid: The unique string that identifies the resource
:returns: twilio.rest.chat.v1.service.channel.invite.InviteContext
:rtype: twilio.rest.chat.v1.service.channel.invite.InviteContext
"""
return InviteContext(
self._version,
service_sid=self._solution['service_sid'],
channel_sid=self._solution['channel_sid'],
sid=sid,
)
def __call__(self, sid):
"""
Constructs a InviteContext
:param sid: The unique string that identifies the resource
:returns: twilio.rest.chat.v1.service.channel.invite.InviteContext
:rtype: twilio.rest.chat.v1.service.channel.invite.InviteContext
"""
return InviteContext(
self._version,
service_sid=self._solution['service_sid'],
channel_sid=self._solution['channel_sid'],
sid=sid,
)
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.IpMessaging.V1.InviteList>'
class InvitePage(Page):
def __init__(self, version, response, solution):
"""
Initialize the InvitePage
:param Version version: Version that contains the resource
:param Response response: Response from the API
:param service_sid: The SID of the Service that the resource is associated with
:param channel_sid: The SID of the Channel the new resource belongs to
:returns: twilio.rest.chat.v1.service.channel.invite.InvitePage
:rtype: twilio.rest.chat.v1.service.channel.invite.InvitePage
"""
super(InvitePage, self).__init__(version, response)
# Path Solution
self._solution = solution
def get_instance(self, payload):
"""
Build an instance of InviteInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.chat.v1.service.channel.invite.InviteInstance
:rtype: twilio.rest.chat.v1.service.channel.invite.InviteInstance
"""
return InviteInstance(
self._version,
payload,
service_sid=self._solution['service_sid'],
channel_sid=self._solution['channel_sid'],
)
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.IpMessaging.V1.InvitePage>'
class InviteContext(InstanceContext):
def __init__(self, version, service_sid, channel_sid, sid):
"""
Initialize the InviteContext
:param Version version: Version that contains the resource
:param service_sid: The SID of the Service to fetch the resource from
:param channel_sid: The SID of the Channel the resource to fetch belongs to
:param sid: The unique string that identifies the resource
:returns: twilio.rest.chat.v1.service.channel.invite.InviteContext
:rtype: twilio.rest.chat.v1.service.channel.invite.InviteContext
"""
super(InviteContext, self).__init__(version)
# Path Solution
self._solution = {'service_sid': service_sid, 'channel_sid': channel_sid, 'sid': sid, }
self._uri = '/Services/{service_sid}/Channels/{channel_sid}/Invites/{sid}'.format(**self._solution)
def fetch(self):
"""
Fetch the InviteInstance
:returns: The fetched InviteInstance
:rtype: twilio.rest.chat.v1.service.channel.invite.InviteInstance
"""
payload = self._version.fetch(method='GET', uri=self._uri, )
return InviteInstance(
self._version,
payload,
service_sid=self._solution['service_sid'],
channel_sid=self._solution['channel_sid'],
sid=self._solution['sid'],
)
def delete(self):
"""
Deletes the InviteInstance
:returns: True if delete succeeds, False otherwise
:rtype: bool
"""
return self._version.delete(method='DELETE', uri=self._uri, )
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.IpMessaging.V1.InviteContext {}>'.format(context)
class InviteInstance(InstanceResource):
def __init__(self, version, payload, service_sid, channel_sid, sid=None):
"""
Initialize the InviteInstance
:returns: twilio.rest.chat.v1.service.channel.invite.InviteInstance
:rtype: twilio.rest.chat.v1.service.channel.invite.InviteInstance
"""
super(InviteInstance, self).__init__(version)
# Marshaled Properties
self._properties = {
'sid': payload.get('sid'),
'account_sid': payload.get('account_sid'),
'channel_sid': payload.get('channel_sid'),
'service_sid': payload.get('service_sid'),
'identity': payload.get('identity'),
'date_created': deserialize.iso8601_datetime(payload.get('date_created')),
'date_updated': deserialize.iso8601_datetime(payload.get('date_updated')),
'role_sid': payload.get('role_sid'),
'created_by': payload.get('created_by'),
'url': payload.get('url'),
}
# Context
self._context = None
self._solution = {
'service_sid': service_sid,
'channel_sid': channel_sid,
'sid': sid or self._properties['sid'],
}
@property
def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: InviteContext for this InviteInstance
:rtype: twilio.rest.chat.v1.service.channel.invite.InviteContext
"""
if self._context is None:
self._context = InviteContext(
self._version,
service_sid=self._solution['service_sid'],
channel_sid=self._solution['channel_sid'],
sid=self._solution['sid'],
)
return self._context
@property
def sid(self):
"""
:returns: The unique string that identifies the resource
:rtype: unicode
"""
return self._properties['sid']
@property
def account_sid(self):
"""
:returns: The SID of the Account that created the resource
:rtype: unicode
"""
return self._properties['account_sid']
@property
def channel_sid(self):
"""
:returns: The SID of the Channel the new resource belongs to
:rtype: unicode
"""
return self._properties['channel_sid']
@property
def service_sid(self):
"""
:returns: The SID of the Service that the resource is associated with
:rtype: unicode
"""
return self._properties['service_sid']
@property
def identity(self):
"""
:returns: The string that identifies the resource's User
:rtype: unicode
"""
return self._properties['identity']
@property
def date_created(self):
"""
:returns: The RFC 2822 date and time in GMT when the resource was created
:rtype: datetime
"""
return self._properties['date_created']
@property
def date_updated(self):
"""
:returns: The RFC 2822 date and time in GMT when the resource was last updated
:rtype: datetime
"""
return self._properties['date_updated']
@property
def role_sid(self):
"""
:returns: The SID of the Role assigned to the member
:rtype: unicode
"""
return self._properties['role_sid']
@property
def created_by(self):
"""
:returns: The identity of the User that created the invite
:rtype: unicode
"""
return self._properties['created_by']
@property
def url(self):
"""
:returns: The absolute URL of the Invite resource
:rtype: unicode
"""
return self._properties['url']
def fetch(self):
"""
Fetch the InviteInstance
:returns: The fetched InviteInstance
:rtype: twilio.rest.chat.v1.service.channel.invite.InviteInstance
"""
return self._proxy.fetch()
def delete(self):
"""
Deletes the InviteInstance
:returns: True if delete succeeds, False otherwise
:rtype: bool
"""
return self._proxy.delete()
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.IpMessaging.V1.InviteInstance {}>'.format(context)
| Vagab0nd/SiCKRAGE | lib3/twilio/rest/ip_messaging/v1/service/channel/invite.py | Python | gpl-3.0 | 15,284 | 0.002225 |
# proxy module
from __future__ import absolute_import
from codetools.contexts.with_mask import *
| enthought/etsproxy | enthought/contexts/with_mask.py | Python | bsd-3-clause | 97 | 0 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class SubResource(Model):
"""SubResource.
:param id: Resource ID.
:type id: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
}
def __init__(self, *, id: str=None, **kwargs) -> None:
super(SubResource, self).__init__(**kwargs)
self.id = id
| lmazuel/azure-sdk-for-python | azure-mgmt-network/azure/mgmt/network/v2017_03_01/models/sub_resource_py3.py | Python | mit | 823 | 0.00243 |
class Member(dict):
def __init__(self, *args, **kwargs):
super(Member, self).__init__(*args, **kwargs) | asascience-open/paegan | paegan/cdm/dsg/member.py | Python | gpl-3.0 | 114 | 0.008772 |
#!/usr/bin/env python3
import glob
import os
import sys
import shutil
import subprocess
from distutils.sysconfig import get_python_inc
# Overall script settings
binder_executable = glob.glob(f'{os.getcwd()}/../../build/llvm-4.0.0/build_4.0.0*/bin/binder')[0]
bindings_dir = 'cmake_bindings'
binder_source = f'{os.getcwd()}/../../source'
pybind_source = f'{os.getcwd()}/../../build/pybind11/include'
use_pybind_stl = True
# use_pybind_stl = False
this_project_source = f'{os.getcwd()}/include'
this_project_include = this_project_source
this_project_namespace_to_bind = 'testers'
python_module_name = 'test_struct'
def make_all_includes():
all_includes = []
all_include_filename = 'all_cmake_includes.hpp'
for filename in (glob.glob(f'{this_project_source}/**/*.hpp', recursive=True) +
glob.glob(f'{this_project_source}/**/*.cpp', recursive=True) +
glob.glob(f'{this_project_source}/**/*.h', recursive=True) +
glob.glob(f'{this_project_source}/**/*.cc', recursive=True) +
glob.glob(f'{this_project_source}/**/*.c', recursive=True)):
with open(filename, 'r') as fh:
for line in fh:
if line.startswith('#include'):
all_includes.append(line.strip())
all_includes = list(set(all_includes))
# This is to ensure that the list is always the same and doesn't
# depend on the filesystem state. Not technically necessary, but
# will cause inconsistent errors without it.
all_includes.sort()
with open(all_include_filename, 'w') as fh:
for include in all_includes:
fh.write(f'{include}\n')
return all_include_filename
def make_bindings_code(all_includes_fn):
shutil.rmtree(bindings_dir, ignore_errors=True)
os.mkdir(bindings_dir)
command = (f'{binder_executable} --root-module {python_module_name} '
f'--prefix {os.getcwd()}/{bindings_dir}/ '
f'--bind {this_project_namespace_to_bind} '
+ ('--config config.cfg ' if use_pybind_stl else '') +
f' {all_includes_fn} -- -std=c++11 '
f'-I{this_project_include} -DNDEBUG -v').split()
print('BINDER COMMAND:', ' '.join(command))
subprocess.check_call(command)
sources_to_compile = []
with open(f'{bindings_dir}/{python_module_name}.sources', 'r') as fh:
for line in fh:
sources_to_compile.append(line.strip())
return sources_to_compile
def compile_sources(sources_to_compile):
os.chdir(bindings_dir)
lines_to_write = []
lines_to_write.append(f'project({python_module_name})')
for include_dir in [binder_source, this_project_source, this_project_include, pybind_source, get_python_inc()]:
lines_to_write.append(f'include_directories({include_dir})')
lines_to_write.append('set_property(GLOBAL PROPERTY POSITION_INDEPENDENT_CODE ON)') # -fPIC
lines_to_write.append('add_definitions(-DNDEBUG)')
lines_to_write.append(f'add_library({python_module_name} SHARED')
for source in sources_to_compile:
lines_to_write.append(f'\t{source}')
lines_to_write.append(')')
lines_to_write.append(f'set_target_properties({python_module_name} PROPERTIES PREFIX "")')
lines_to_write.append(f'set_target_properties({python_module_name} PROPERTIES SUFFIX ".so")')
with open('CMakeLists.txt', 'w') as f:
for line in lines_to_write:
f.write(f'{line}\n')
# Done making CMakeLists.txt
subprocess.call('cmake -G Ninja'.split())
subprocess.call('ninja')
sys.path.append('.')
if python_module_name == 'test_struct':
sys.stdout.flush()
print('Testing Python lib...')
import test_struct
test_obj = test_struct.testers.test_my_struct()
print(test_obj.an_int)
if use_pybind_stl:
print(test_obj.a_vector)
def main():
all_includes_fn = make_all_includes()
sources_to_compile = make_bindings_code(all_includes_fn)
compile_sources(sources_to_compile)
if __name__ == '__main__':
main()
| RosettaCommons/binder | examples/example_struct/make_bindings_via_cmake.py | Python | mit | 4,103 | 0.002437 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import bottle
import commands
from bottle import route, send_file, template
@route('/')
def index():
bottle.TEMPLATES.clear() # For rapid development
return template("index", master_port = master_port)
@route('/framework/:id#[0-9-]*#')
def framework(id):
bottle.TEMPLATES.clear() # For rapid development
return template("framework", master_port = master_port, framework_id = id)
@route('/static/:filename#.*#')
def static(filename):
send_file(filename, root = './webui/static')
@route('/log/:level#[A-Z]*#')
def log_full(level):
send_file('mesos-master.' + level, root = log_dir,
guessmime = False, mimetype = 'text/plain')
@route('/log/:level#[A-Z]*#/:lines#[0-9]*#')
def log_tail(level, lines):
bottle.response.content_type = 'text/plain'
command = 'tail -%s %s/mesos-master.%s' % (lines, log_dir, level)
return commands.getoutput(command)
bottle.TEMPLATE_PATH.append('./webui/master/')
# TODO(*): Add an assert to confirm that all the arguments we are
# expecting have been passed to us, which will give us a better error
# message when they aren't!
master_port = sys.argv[1]
webui_port = sys.argv[2]
log_dir = sys.argv[3]
bottle.debug(True)
bottle.run(host = '0.0.0.0', port = webui_port)
| charlescearl/VirtualMesos | src/webui/master/webui.py | Python | apache-2.0 | 2,037 | 0.015218 |
import sys
import regex as re
from BeautifulSoup import BeautifulStoneSoup as bss
# REQUIRES BeautifulSoup3. BS4 breaks on Python recursion errors when it gets badly damaged texts.
def cleanup(infile, outfile):
# main routine. takes a file handle for input and output; called at the bottom of the file.
text = infile.read()
# custom regexes for things that BeautifulSoup can't handle go here. Keep to a minimum. Below examples are for the Encyc.
# text = re.sub(r"<\?>",r"<gap reason='omitted' unit='character' />",text)
# text = re.sub(r"<\->",r"<gap reason='omitted' unit='bracket' />",text)
# text = re.sub(r"<MVO_PIM=\"(.*?)\">",r'<figure><graphic url="\g<1>"></graphic></figure>',text)
# text = re.sub(r"<omit=(.*?)>",r"<gap reason='omitted' unit='\g<1>' />",text)
print(len(text))
soup = bss(text, selfClosingTags=self_closing)
for tag in soup.findAll():
if tag.name in fix_case:
tag.name = fix_case[tag.name]
print(soup, file=outfile)
outfile.close()
# Annoyingly, BeautifulSoup requires you to declare ALL self-closing tags yourself; it will badly mangle your text if you miss one, so get this right.
self_closing = ["p", "index", "pb", "milestone", "col", "c", "omit", "hr1", "hr2", "hr3", "file_group", "gap", "volume"]
# BeautifulSoup lowercases all element names; to get things closer to standard TEI, I've included a list here which I use to restore them after parsing
capitalized_elements = [
"accMat",
"addName",
"addrLine",
"addSpan",
"adminInfo",
"altGrp",
"altIdent",
"altIdentifier",
"appInfo",
"attDef",
"attList",
"attRef",
"biblFull",
"biblScope",
"biblStruct",
"binaryObject",
"bindingDesc",
"calendarDesc",
"castGroup",
"castItem",
"castList",
"catDesc",
"catRef",
"charDecl",
"charName",
"charProp",
"citedRange",
"classCode",
"classDecl",
"classRef",
"classSpec",
"constraintSpec",
"cRefPattern",
"custEvent",
"custodialHist",
"damageSpan",
"decoDesc",
"decoNote",
"defaultVal",
"delSpan",
"dictScrap",
"divGen",
"docAuthor",
"docDate",
"docEdition",
"docImprint",
"docTitle",
"editionStmt",
"editorialDecl",
"egXML",
"eLeaf",
"elementRef",
"elementSpec",
"encodingDesc",
"entryFree",
"eTree",
"fDecl",
"fDescr",
"figDesc",
"fileDesc",
"finalRubric",
"fLib",
"floatingText",
"fsConstraints",
"fsdDecl",
"fsDecl",
"fsDescr",
"fsdLink",
"fvLib",
"genName",
"geoDecl",
"geogFeat",
"geogName",
"glyphName",
"gramGrp",
"handDesc",
"handNote",
"handNotes",
"handShift",
"headItem",
"headLabel",
"iNode",
"interpGrp",
"iType",
"joinGrp",
"lacunaEnd",
"lacunaStart",
"langKnowledge",
"langKnown",
"langUsage",
"layoutDesc",
"linkGrp",
"listApp",
"listBibl",
"listChange",
"listEvent",
"listForest",
"listNym",
"listOrg",
"listPerson",
"listPlace",
"listPrefixDef",
"listRef",
"listRelation",
"listTranspose",
"listWit",
"localName",
"locusGrp",
"macroRef",
"macroSpec",
"measureGrp",
"memberOf",
"metDecl",
"metSym",
"moduleRef",
"moduleSpec",
"msContents",
"msDesc",
"msIdentifier",
"msItem",
"msItemStruct",
"msName",
"msPart",
"musicNotation",
"nameLink",
"notatedMusic",
"notesStmt",
"objectDesc",
"objectType",
"oRef",
"orgName",
"origDate",
"origPlace",
"oVar",
"particDesc",
"persName",
"personGrp",
"physDesc",
"placeName",
"postBox",
"postCode",
"pRef",
"prefixDef",
"profileDesc",
"projectDesc",
"publicationStmt",
"pubPlace",
"pVar",
"rdgGrp",
"recordHist",
"recordingStmt",
"refsDecl",
"refState",
"relatedItem",
"relationGrp",
"respStmt",
"revisionDesc",
"roleDesc",
"roleName",
"samplingDecl",
"schemaSpec",
"scriptDesc",
"scriptNote",
"scriptStmt",
"sealDesc",
"secFol",
"seriesStmt",
"settingDesc",
"soCalled",
"socecStatus",
"sourceDesc",
"sourceDoc",
"spanGrp",
"specDesc",
"specGrp",
"specGrpRef",
"specList",
"spGrp",
"stdVals",
"styleDefDecl",
"substJoin",
"superEntry",
"supportDesc",
"surfaceGrp",
"tagsDecl",
"tagUsage",
"TEI",
"teiCorpus",
"teiHeader",
"textClass",
"textDesc",
"textLang",
"titlePage",
"titlePart",
"titleStmt",
"typeDesc",
"typeNote",
"unicodeName",
"valDesc",
"valItem",
"valList",
"vAlt",
"variantEncoding",
"vColl",
"vDefault",
"vLabel",
"vMerge",
"vNot",
"vRange",
"witDetail",
"witEnd",
"witStart",
"interpGrp",
"spanGrp",
"addrLine",
"biblScope",
"biblStruct",
"binaryObject",
"citedRange",
"divGen",
"headItem",
"headLabel",
"listBibl",
"measureGrp",
"postBox",
"postCode",
"pubPlace",
"relatedItem",
"respStmt",
"soCalled",
"teiCorpus",
"textLang",
"particDesc",
"settingDesc",
"textDesc",
"dictScrap",
"entryFree",
"gramGrp",
"iType",
"oRef",
"oVar",
"pRef",
"pVar",
"superEntry",
"castGroup",
"castItem",
"castList",
"roleDesc",
"spGrp",
"figDesc",
"notatedMusic",
"charDecl",
"charName",
"charProp",
"glyphName",
"localName",
"unicodeName",
"appInfo",
"biblFull",
"cRefPattern",
"calendarDesc",
"catDesc",
"catRef",
"classCode",
"classDecl",
"editionStmt",
"editorialDecl",
"encodingDesc",
"fileDesc",
"geoDecl",
"handNote",
"langUsage",
"listChange",
"listPrefixDef",
"notesStmt",
"prefixDef",
"profileDesc",
"projectDesc",
"publicationStmt",
"refState",
"refsDecl",
"revisionDesc",
"samplingDecl",
"scriptNote",
"seriesStmt",
"sourceDesc",
"stdVals",
"styleDefDecl",
"tagUsage",
"tagsDecl",
"teiHeader",
"textClass",
"titleStmt",
"typeNote",
"fDecl",
"fDescr",
"fLib",
"fsConstraints",
"fsDecl",
"fsDescr",
"fsdDecl",
"fsdLink",
"fvLib",
"vAlt",
"vColl",
"vDefault",
"vLabel",
"vMerge",
"vNot",
"vRange",
"altGrp",
"joinGrp",
"linkGrp",
"accMat",
"adminInfo",
"altIdentifier",
"bindingDesc",
"custEvent",
"custodialHist",
"decoDesc",
"decoNote",
"finalRubric",
"handDesc",
"layoutDesc",
"locusGrp",
"msContents",
"msDesc",
"msIdentifier",
"msItem",
"msItemStruct",
"msName",
"msPart",
"musicNotation",
"objectDesc",
"objectType",
"origDate",
"origPlace",
"physDesc",
"recordHist",
"scriptDesc",
"sealDesc",
"secFol",
"supportDesc",
"typeDesc",
"addName",
"genName",
"geogFeat",
"geogName",
"langKnowledge",
"langKnown",
"listEvent",
"listNym",
"listOrg",
"listPerson",
"listPlace",
"listRelation",
"nameLink",
"orgName",
"persName",
"personGrp",
"placeName",
"relationGrp",
"roleName",
"socecStatus",
"eLeaf",
"eTree",
"iNode",
"listForest",
"recordingStmt",
"scriptStmt",
"altIdent",
"attDef",
"attList",
"attRef",
"classRef",
"classSpec",
"constraintSpec",
"defaultVal",
"egXML",
"elementRef",
"elementSpec",
"listRef",
"macroRef",
"macroSpec",
"memberOf",
"moduleRef",
"moduleSpec",
"schemaSpec",
"specDesc",
"specGrp",
"specGrpRef",
"specList",
"valDesc",
"valItem",
"valList",
"lacunaEnd",
"lacunaStart",
"listApp",
"listWit",
"rdgGrp",
"variantEncoding",
"witDetail",
"witEnd",
"witStart",
"TEI",
"docAuthor",
"docDate",
"docEdition",
"docImprint",
"docTitle",
"floatingText",
"titlePage",
"titlePart",
"addSpan",
"damageSpan",
"delSpan",
"handNotes",
"handShift",
"listTranspose",
"sourceDoc",
"substJoin",
"surfaceGrp",
"metDecl",
"metSym",
]
fix_case = {}
for el in capitalized_elements:
fix_case[el.lower()] = el
for filename in sys.argv[1:]:
print("Cleaning %s" % filename, file=sys.stderr)
filenameout = filename + ".xml"
infile = open(filename)
outfile = open(filenameout, "w")
cleanup(infile, outfile)
| ARTFL-Project/PhiloLogic4 | extras/utilities/tei_cleanup.py | Python | gpl-3.0 | 8,791 | 0.00091 |
import os
import ast
import six
from .ModelComponents import ComponentBase
_comps_name = '__components'
class ExprsToAssigns(ast.NodeTransformer):
"""
Walks the Abstract Syntax Tree from the parsed model file, and transforms
bare expressions into augmented assignments that are tacked on to the end
of the components list, ie:
Sky(...)
becomes:
components += [Sky(...)]
"""
def visit_Expr(self, node):
return ast.copy_location(ast.AugAssign(
target=ast.Name(id=_comps_name, ctx=ast.Store()),
op=ast.Add(),
value=ast.List(elts=[node.value], ctx=ast.Load())
), node)
def component_list_from_file(filename):
"""
Read in a file containing fit components and return them as a list
"""
with open(filename) as f:
model_tree = ast.parse(f.read())
# Inject distribution and component imports. Put them at the beginning
# so user imports may override them. level=1 means relative import, e.g.:
# from .ModelComponents import *
ast.increment_lineno(model_tree, n=3)
comps = ast.ImportFrom(module='psfMC.ModelComponents',
names=[ast.alias(name='*', asname=None)], level=0)
dists = ast.ImportFrom(module='psfMC.distributions',
names=[ast.alias(name='*', asname=None)], level=0)
model_tree.body.insert(0, comps)
model_tree.body.insert(1, dists)
# Insert a statement creating an empty list called components
comps_node = ast.Assign(targets=[ast.Name(id=_comps_name, ctx=ast.Store())],
value=ast.List(elts=[], ctx=ast.Load()))
model_tree.body.insert(2, comps_node)
# Transform bare components expressions into list append statements
model_tree = ExprsToAssigns().visit(model_tree)
ast.fix_missing_locations(model_tree)
# Process file within its local dir, so file references within are relative
# to its location instead of the script run location.
prev_dir = os.getcwd()
model_dir = os.path.dirname(filename)
if model_dir != '':
os.chdir(model_dir)
model_namespace = dict()
byte_code = compile(model_tree, filename, mode='exec')
six.exec_(byte_code, model_namespace)
os.chdir(prev_dir)
# Filter out only those object that are subclasses of ComponentBase
return [comp for comp in model_namespace[_comps_name]
if isinstance(comp, ComponentBase.ComponentBase)]
| mmechtley/psfMC | psfMC/model_parser.py | Python | mit | 2,467 | 0.000405 |
eg.RegisterPlugin(
name = "Epson TW-700 Projector",
author = "Tom Wilson",
version = "0.1.0",
kind = "external",
guid = "{f6cfb453-416b-4276-92ac-c58ffe98972b}",
url = "",
description = (''),
canMultiLoad = True,
createMacrosOnAdd = True,
)
# Now we import some other things we will need later
import new
import thread
# Define commands
# (name, title, description (same as title if None), command)
commandsList = (
(
(
'Power',
(
('Power On', 'Power on', None, 'PWR ON'),
('Power Off', 'Power off', None, 'PWR OFF'),
)
),
(
'Inputs',
(
('Component', 'Component', None, 'SOURCE 10'),
('PC', 'PC', None, 'SOURCE 20'),
('HDMI', 'HDMI', None, 'SOURCE 30'),
)
),
)
)
class EpsonTW700SerialAction(eg.ActionClass):
def __call__(self):
self.plugin.sendCommand(self.serialcmd)
class EpsonTW700SerialsetVolumeAbsolute(eg.ActionWithStringParameter):
name='Set absolute volume'
description='Sets the absolute volume'
def __call__(self, volume):
return self.plugin.setVolume(volume, False)
def GetLabel(self, volume):
return "Set Absolute Volume to %d" % volume
def Configure(self, volume=-40):
panel = eg.ConfigPanel(self)
valueCtrl = panel.SpinIntCtrl(volume, min=-80.5, max=16.5)
panel.AddLine("Set absolute volume to", valueCtrl)
while panel.Affirmed():
panel.SetResult(valueCtrl.GetValue())
class EpsonTW700SerialsetVolumeRelative(eg.ActionWithStringParameter):
name='Set relative volume'
description='Sets the relative volume'
def __call__(self, volume):
return self.plugin.setVolume(volume, True)
def GetLabel(self, volume):
return "Set Relative Volume to %d" % volume
def Configure(self, volume=0):
panel = eg.ConfigPanel(self)
valueCtrl = panel.SpinIntCtrl(volume, min=-80.5, max=16.5)
panel.AddLine("Set relative volume to", valueCtrl)
while panel.Affirmed():
panel.SetResult(valueCtrl.GetValue())
class EpsonTW700Serial(eg.PluginClass):
def __init__(self):
self.serial = None
self.response = None
for groupname, list in commandsList:
group = self.AddGroup(groupname)
for classname, title, desc, serial in list:
if desc is None:
desc = title
clsAttributes = dict(name=title, description=desc, serialcmd=serial)
cls = new.classobj(classname, (EpsonTW700SerialAction,), clsAttributes)
group.AddAction(cls)
if (groupname == 'Volume'):
group.AddAction(EpsonTW700SerialsetVolumeAbsolute)
group.AddAction(EpsonTW700SerialsetVolumeRelative)
def sendCommandSerial(self, cmd):
if self.serial is None:
return True
# Send command
cmd += '\r'
self.serial.write(cmd)
return True
# Serial port reader
def reader(self):
line=""
while self.readerkiller is False:
ch=self.serial.read()
if ch=='\r':
continue;
if ch=='\n':
if line != "":
self.parseLine(line)
self.TriggerEvent(line)
line=""
else:
line+=ch
def parseLine(self, line):
if line.startswith("@MAIN:VOL="):
self.volume = float(line.split("=")[1])
print "The volume is now: %f" % self.volume
def getResponseFloat(self):
return float(self.response)
def getResponseInt(self):
self.PrintError(self.response)
if (self.response[0] == '-' or self.response[0] == '+'):
if not self.response[1:].isdigit():
self.PrintError("Bad response")
return None
elif not self.response.isdigit():
self.PrintError("Bad response")
return None
return int(self.response)
def sendCommand(self, serialcmd):
result = self.sendCommandSerial(serialcmd)
return result
def setVolume(self, volume, relative):
if relative and self.volume is None:
# can't set the relative volume if we don't know the current state...
return
if relative:
volume = self.volume + volume
if volume > 16.5:
volume = 10
elif volume < -80.5:
volume = -80.5
command = "@MAIN:VOL=%.1f" % (volume)
self.sendCommandSerial(command)
return volume
def getInitialState(self):
self.sendCommandSerial("@MAIN:VOL=?")
def __start__(self, port=0):
try:
self.serial = eg.SerialPort(port)
self.serial.baudrate = 9600
self.serial.timeout = 0.5
self.serial.setDTR(1)
self.serial.setRTS(1)
self.readerkiller = False
thread.start_new_thread(self.reader,())
self.getInitialState()
except:
self.PrintError("Unable to open serial port")
def __stop__(self):
self.readerkiller = True
if self.serial is not None:
self.serial.close()
self.serial = None
def Configure(self, port=0):
portCtrl = None
panel = eg.ConfigPanel(self)
portCtrl = panel.SerialPortChoice(port)
panel.AddLine("Port:", portCtrl)
while panel.Affirmed():
panel.SetResult(portCtrl.GetValue())
| Kabal/eventghost-epson-tw700 | __init__.py | Python | mit | 5,840 | 0.010103 |
import os
from sqlalchemy import Column, String, Integer, create_engine
from flask_sqlalchemy import SQLAlchemy
import json
database_name = "trivia"
database_path = "postgres://{}:{}@{}/{}".format(
'postgres', 'abc@123', 'localhost:5432', database_name)
db = SQLAlchemy()
'''
setup_db(app)
binds a flask application and a SQLAlchemy service
'''
def setup_db(app, database_path=database_path):
app.config["SQLALCHEMY_DATABASE_URI"] = database_path
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
db.app = app
db.init_app(app)
db.create_all()
'''
Question
'''
class Question(db.Model):
__tablename__ = 'questions'
id = Column(Integer, primary_key=True)
question = Column(String)
answer = Column(String)
category = Column(String)
difficulty = Column(Integer)
def __init__(self, question, answer, category, difficulty):
self.question = question
self.answer = answer
self.category = category
self.difficulty = difficulty
def insert(self):
db.session.add(self)
db.session.commit()
def update(self):
db.session.commit()
def delete(self):
db.session.delete(self)
db.session.commit()
def format(self):
return {
'id': self.id,
'question': self.question,
'answer': self.answer,
'category': self.category,
'difficulty': self.difficulty
}
'''
Category
'''
class Category(db.Model):
__tablename__ = 'categories'
id = Column(Integer, primary_key=True)
type = Column(String)
def __init__(self, type):
self.type = type
def format(self):
return {
'id': self.id,
'type': self.type
}
| manishbisht/Udacity | Full Stack Web Developer Nanodegree v2/P2 - Trivia API/backend/models.py | Python | mit | 1,781 | 0 |
#!/usr/bin/env python
#------------------------------------------------------------------------------
# Copyright (C) 2013 Albert Simenon
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#------------------------------------------------------------------------------
""" Gecoaching.com python utility """
__filename__ = "gc_util.py"
__version__ = "0.0.3"
__author__ = "Albert Simenon"
__email__ = "[email protected]"
__purpose__ = "Utility to download pocket queries from www.geocaching.com"
__date__ = "20/12/2013"
import argparse
import os
import progressbar
import urllib2
from selenium import webdriver
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
CHROMEDRIVER = "driver/chromedriver"
MAX_CACHES_PER_POCKET_QUERY = 950
MAX_CACHES_LAST_POCKET_QUERY = 500
BROWSERS = ["phantomjs","chrome","firefox","iexplorer"]
class GCSite:
""" Geocaching.com web browser class """
BASE_URL = "http://www.geocaching.com"
LOGIN_URL = "%s/login" % (BASE_URL)
POCKET_QUERY_URL = "%s/pocket" % (BASE_URL)
CHUNK_SIZE = 1024
XPATH_DOWNLOADPQ = "//a[contains(@href,'downloadpq')]"
def __init__(self, driver, args):
self.driver = driver
self.args = args
def login(self):
""" Login on Geocaching.com """
self.driver.get(self.LOGIN_URL)
element = self.driver.find_element_by_id("ctl00_ContentBody_tbUsername")
element.send_keys(self.args.user)
element = self.driver.find_element_by_id("ctl00_ContentBody_tbPassword")
element.send_keys(self.args.password)
element = self.driver.find_element_by_id("ctl00_ContentBody_btnSignIn")
element.click()
def download_pq_by_element(self, element):
""" Download pocket query with selenium webelement """
url = element.get_attribute("href")
filename = "%s.zip" % (element.get_attribute("text").strip())
opener = urllib2.build_opener()
cookies = self.driver.get_cookies()
if cookies:
cookiestring = ''
for cookie in cookies:
cookiestring += "%s=%s;" % (cookie["name"], cookie["value"])
opener.addheaders.append(
('Cookie', cookiestring))
fhandle = opener.open(url)
total_size = int(fhandle.info().getheader('Content-Length').strip())
pbar = progressbar.ProgressBar(maxval=total_size).start()
print filename
with open(self.args.output + filename, 'wb') as foutput:
while True:
data = fhandle.read(self.CHUNK_SIZE)
if not data:
break
foutput.write(data)
pbar.update(foutput.tell())
pbar.finish()
def download_pocket_queries(self):
""" Download all pocket queries on geocaching.com """
self.driver.get(self.POCKET_QUERY_URL)
elements = self.driver.find_elements_by_xpath(self.XPATH_DOWNLOADPQ)
if elements:
for element in elements:
self.download_pq_by_element(element)
else:
print "No pocket queries available to download !"
def arg_parser():
""" Argument parser """
parser = argparse.ArgumentParser()
parser.formatter_class = argparse.RawDescriptionHelpFormatter
parser.description = "%s, version %s by %s (%s)\n\n%s" \
% (__filename__,__version__,__author__,__email__,__purpose__)
parser.add_argument(
"--browser","-b",
choices=BROWSERS,
default=BROWSERS[0],
help="browser used for visiting geocaching.com")
parser.add_argument(
"--download",
action="store_true",
help="download pocket queries")
parser.add_argument(
"--user","-u",
required=True,
help="Geocaching.com username")
parser.add_argument(
"--password","-p",
required=True,
help="Geocaching.com password")
parser.add_argument(
"--output","-o",
default="",
help="output directory")
args = parser.parse_args()
return args
def main():
""" Obviously the main routine """
args = arg_parser()
if args.browser == BROWSERS[0]:
user_agent = (
"Mozilla/5.0 (X11; Linux x86_64) " +
"AppleWebKit/537.36 (KHTML, like Gecko) " +
"Chrome/31.0.1650.63 Safari/537.36")
dcap = dict(DesiredCapabilities.PHANTOMJS)
dcap["phantomjs.page.settings.userAgent"] = user_agent
driver = webdriver.PhantomJS(desired_capabilities=dcap)
elif args.browser == BROWSERS[1]:
driver = webdriver.Chrome()
driver.set_window_size(800, 400)
elif args.browser == BROWSERS[2]:
driver = webdriver.Firefox()
elif args.browser == BROWSERS[3]:
driver = webdriver.Ie()
if args.download:
site = GCSite(driver, args)
site.login()
site.download_pocket_queries()
driver.quit()
if __name__ == "__main__":
main()
| simenon/pocket-query-downloader | gc_util.py | Python | gpl-2.0 | 5,704 | 0.007714 |
# Copyright 2012 Michael Still and Canonical Inc
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import hashlib
import os
import time
import mock
from oslo_concurrency import lockutils
from oslo_concurrency import processutils
from oslo_log import formatters
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_utils import importutils
from six.moves import cStringIO
from nova import conductor
import nova.conf
from nova import context
from nova import objects
from nova import test
from nova.tests.unit import fake_instance
from nova import utils
from nova.virt.libvirt import imagecache
from nova.virt.libvirt import utils as libvirt_utils
CONF = nova.conf.CONF
CONF.import_opt('host', 'nova.netconf')
@contextlib.contextmanager
def intercept_log_messages():
try:
mylog = logging.getLogger('nova')
stream = cStringIO()
handler = logging.logging.StreamHandler(stream)
handler.setFormatter(formatters.ContextFormatter())
mylog.logger.addHandler(handler)
yield stream
finally:
mylog.logger.removeHandler(handler)
class ImageCacheManagerTestCase(test.NoDBTestCase):
def setUp(self):
super(ImageCacheManagerTestCase, self).setUp()
self.stock_instance_names = set(['instance-00000001',
'instance-00000002',
'instance-00000003',
'banana-42-hamster'])
def test_read_stored_checksum_missing(self):
self.stub_out('os.path.exists', lambda x: False)
csum = imagecache.read_stored_checksum('/tmp/foo', timestamped=False)
self.assertIsNone(csum)
@mock.patch.object(os.path, 'exists', return_value=True)
@mock.patch.object(time, 'time', return_value=2000000)
@mock.patch.object(os.path, 'getmtime', return_value=1000000)
def test_get_age_of_file(self, mock_getmtime, mock_time, mock_exists):
image_cache_manager = imagecache.ImageCacheManager()
exists, age = image_cache_manager._get_age_of_file('/tmp')
self.assertTrue(exists)
self.assertEqual(1000000, age)
@mock.patch.object(os.path, 'exists', return_value=False)
def test_get_age_of_file_not_exists(self, mock_exists):
image_cache_manager = imagecache.ImageCacheManager()
exists, age = image_cache_manager._get_age_of_file('/tmp')
self.assertFalse(exists)
self.assertEqual(0, age)
def test_read_stored_checksum(self):
with utils.tempdir() as tmpdir:
self.flags(instances_path=tmpdir)
self.flags(image_info_filename_pattern=('$instances_path/'
'%(image)s.info'),
group='libvirt')
csum_input = '{"sha1": "fdghkfhkgjjksfdgjksjkghsdf"}\n'
fname = os.path.join(tmpdir, 'aaa')
info_fname = imagecache.get_info_filename(fname)
f = open(info_fname, 'w')
f.write(csum_input)
f.close()
csum_output = imagecache.read_stored_checksum(fname,
timestamped=False)
self.assertEqual(csum_input.rstrip(),
'{"sha1": "%s"}' % csum_output)
def test_read_stored_checksum_legacy_essex(self):
with utils.tempdir() as tmpdir:
self.flags(instances_path=tmpdir)
self.flags(image_info_filename_pattern=('$instances_path/'
'%(image)s.info'),
group='libvirt')
fname = os.path.join(tmpdir, 'aaa')
old_fname = fname + '.sha1'
f = open(old_fname, 'w')
f.write('fdghkfhkgjjksfdgjksjkghsdf')
f.close()
csum_output = imagecache.read_stored_checksum(fname,
timestamped=False)
self.assertEqual(csum_output, 'fdghkfhkgjjksfdgjksjkghsdf')
self.assertFalse(os.path.exists(old_fname))
info_fname = imagecache.get_info_filename(fname)
self.assertTrue(os.path.exists(info_fname))
def test_list_base_images(self):
listing = ['00000001',
'ephemeral_0_20_None',
'17d1b00b81642842e514494a78e804e9a511637c_5368709120.info',
'00000004',
'swap_1000']
images = ['e97222e91fc4241f49a7f520d1dcf446751129b3_sm',
'e09c675c2d1cfac32dae3c2d83689c8c94bc693b_sm',
'e97222e91fc4241f49a7f520d1dcf446751129b3',
'17d1b00b81642842e514494a78e804e9a511637c',
'17d1b00b81642842e514494a78e804e9a511637c_5368709120',
'17d1b00b81642842e514494a78e804e9a511637c_10737418240']
listing.extend(images)
self.stub_out('os.listdir', lambda x: listing)
self.stub_out('os.path.isfile', lambda x: True)
base_dir = '/var/lib/nova/instances/_base'
self.flags(instances_path='/var/lib/nova/instances')
image_cache_manager = imagecache.ImageCacheManager()
image_cache_manager._list_base_images(base_dir)
sanitized = []
for ent in image_cache_manager.unexplained_images:
sanitized.append(ent.replace(base_dir + '/', ''))
self.assertEqual(sorted(sanitized), sorted(images))
expected = os.path.join(base_dir,
'e97222e91fc4241f49a7f520d1dcf446751129b3')
self.assertIn(expected, image_cache_manager.unexplained_images)
expected = os.path.join(base_dir,
'17d1b00b81642842e514494a78e804e9a511637c_'
'10737418240')
self.assertIn(expected, image_cache_manager.unexplained_images)
unexpected = os.path.join(base_dir, '00000004')
self.assertNotIn(unexpected, image_cache_manager.unexplained_images)
for ent in image_cache_manager.unexplained_images:
self.assertTrue(ent.startswith(base_dir))
self.assertEqual(len(image_cache_manager.originals), 2)
expected = os.path.join(base_dir,
'17d1b00b81642842e514494a78e804e9a511637c')
self.assertIn(expected, image_cache_manager.originals)
unexpected = os.path.join(base_dir,
'17d1b00b81642842e514494a78e804e9a511637c_'
'10737418240')
self.assertNotIn(unexpected, image_cache_manager.originals)
self.assertEqual(1, len(image_cache_manager.back_swap_images))
self.assertIn('swap_1000', image_cache_manager.back_swap_images)
def test_list_backing_images_small(self):
self.stub_out('os.listdir',
lambda x: ['_base', 'instance-00000001',
'instance-00000002', 'instance-00000003'])
self.stub_out('os.path.exists',
lambda x: x.find('instance-') != -1)
self.stubs.Set(libvirt_utils, 'get_disk_backing_file',
lambda x: 'e97222e91fc4241f49a7f520d1dcf446751129b3_sm')
found = os.path.join(CONF.instances_path,
CONF.image_cache_subdirectory_name,
'e97222e91fc4241f49a7f520d1dcf446751129b3_sm')
image_cache_manager = imagecache.ImageCacheManager()
image_cache_manager.unexplained_images = [found]
image_cache_manager.instance_names = self.stock_instance_names
inuse_images = image_cache_manager._list_backing_images()
self.assertEqual(inuse_images, [found])
self.assertEqual(len(image_cache_manager.unexplained_images), 0)
def test_list_backing_images_resized(self):
self.stub_out('os.listdir',
lambda x: ['_base', 'instance-00000001',
'instance-00000002', 'instance-00000003'])
self.stub_out('os.path.exists',
lambda x: x.find('instance-') != -1)
self.stubs.Set(libvirt_utils, 'get_disk_backing_file',
lambda x: ('e97222e91fc4241f49a7f520d1dcf446751129b3_'
'10737418240'))
found = os.path.join(CONF.instances_path,
CONF.image_cache_subdirectory_name,
'e97222e91fc4241f49a7f520d1dcf446751129b3_'
'10737418240')
image_cache_manager = imagecache.ImageCacheManager()
image_cache_manager.unexplained_images = [found]
image_cache_manager.instance_names = self.stock_instance_names
inuse_images = image_cache_manager._list_backing_images()
self.assertEqual(inuse_images, [found])
self.assertEqual(len(image_cache_manager.unexplained_images), 0)
def test_list_backing_images_instancename(self):
self.stub_out('os.listdir',
lambda x: ['_base', 'banana-42-hamster'])
self.stub_out('os.path.exists',
lambda x: x.find('banana-42-hamster') != -1)
self.stubs.Set(libvirt_utils, 'get_disk_backing_file',
lambda x: 'e97222e91fc4241f49a7f520d1dcf446751129b3_sm')
found = os.path.join(CONF.instances_path,
CONF.image_cache_subdirectory_name,
'e97222e91fc4241f49a7f520d1dcf446751129b3_sm')
image_cache_manager = imagecache.ImageCacheManager()
image_cache_manager.unexplained_images = [found]
image_cache_manager.instance_names = self.stock_instance_names
inuse_images = image_cache_manager._list_backing_images()
self.assertEqual(inuse_images, [found])
self.assertEqual(len(image_cache_manager.unexplained_images), 0)
def test_list_backing_images_disk_notexist(self):
self.stub_out('os.listdir',
lambda x: ['_base', 'banana-42-hamster'])
self.stub_out('os.path.exists',
lambda x: x.find('banana-42-hamster') != -1)
def fake_get_disk(disk_path):
raise processutils.ProcessExecutionError()
self.stubs.Set(libvirt_utils, 'get_disk_backing_file', fake_get_disk)
image_cache_manager = imagecache.ImageCacheManager()
image_cache_manager.unexplained_images = []
image_cache_manager.instance_names = self.stock_instance_names
self.assertRaises(processutils.ProcessExecutionError,
image_cache_manager._list_backing_images)
def test_find_base_file_nothing(self):
self.stub_out('os.path.exists', lambda x: False)
base_dir = '/var/lib/nova/instances/_base'
fingerprint = '549867354867'
image_cache_manager = imagecache.ImageCacheManager()
res = list(image_cache_manager._find_base_file(base_dir, fingerprint))
self.assertEqual(0, len(res))
def test_find_base_file_small(self):
fingerprint = '968dd6cc49e01aaa044ed11c0cce733e0fa44a6a'
self.stub_out('os.path.exists',
lambda x: x.endswith('%s_sm' % fingerprint))
base_dir = '/var/lib/nova/instances/_base'
image_cache_manager = imagecache.ImageCacheManager()
res = list(image_cache_manager._find_base_file(base_dir, fingerprint))
base_file = os.path.join(base_dir, fingerprint + '_sm')
self.assertEqual(res, [(base_file, True, False)])
def test_find_base_file_resized(self):
fingerprint = '968dd6cc49e01aaa044ed11c0cce733e0fa44a6a'
listing = ['00000001',
'ephemeral_0_20_None',
'968dd6cc49e01aaa044ed11c0cce733e0fa44a6a_10737418240',
'00000004']
self.stub_out('os.listdir', lambda x: listing)
self.stub_out('os.path.exists',
lambda x: x.endswith('%s_10737418240' % fingerprint))
self.stub_out('os.path.isfile', lambda x: True)
base_dir = '/var/lib/nova/instances/_base'
image_cache_manager = imagecache.ImageCacheManager()
image_cache_manager._list_base_images(base_dir)
res = list(image_cache_manager._find_base_file(base_dir, fingerprint))
base_file = os.path.join(base_dir, fingerprint + '_10737418240')
self.assertEqual(res, [(base_file, False, True)])
def test_find_base_file_all(self):
fingerprint = '968dd6cc49e01aaa044ed11c0cce733e0fa44a6a'
listing = ['00000001',
'ephemeral_0_20_None',
'968dd6cc49e01aaa044ed11c0cce733e0fa44a6a_sm',
'968dd6cc49e01aaa044ed11c0cce733e0fa44a6a_10737418240',
'00000004']
self.stub_out('os.listdir', lambda x: listing)
self.stub_out('os.path.exists', lambda x: True)
self.stub_out('os.path.isfile', lambda x: True)
base_dir = '/var/lib/nova/instances/_base'
image_cache_manager = imagecache.ImageCacheManager()
image_cache_manager._list_base_images(base_dir)
res = list(image_cache_manager._find_base_file(base_dir, fingerprint))
base_file1 = os.path.join(base_dir, fingerprint)
base_file2 = os.path.join(base_dir, fingerprint + '_sm')
base_file3 = os.path.join(base_dir, fingerprint + '_10737418240')
self.assertEqual(res, [(base_file1, False, False),
(base_file2, True, False),
(base_file3, False, True)])
@contextlib.contextmanager
def _make_base_file(self, checksum=True, lock=True):
"""Make a base file for testing."""
with utils.tempdir() as tmpdir:
self.flags(instances_path=tmpdir)
self.flags(image_info_filename_pattern=('$instances_path/'
'%(image)s.info'),
group='libvirt')
fname = os.path.join(tmpdir, 'aaa')
base_file = open(fname, 'w')
base_file.write('data')
base_file.close()
if lock:
lockdir = os.path.join(tmpdir, 'locks')
lockname = os.path.join(lockdir, 'nova-aaa')
os.mkdir(lockdir)
lock_file = open(lockname, 'w')
lock_file.write('data')
lock_file.close()
base_file = open(fname, 'r')
if checksum:
imagecache.write_stored_checksum(fname)
base_file.close()
yield fname
def test_remove_base_file(self):
with self._make_base_file() as fname:
image_cache_manager = imagecache.ImageCacheManager()
image_cache_manager._remove_base_file(fname)
info_fname = imagecache.get_info_filename(fname)
lock_name = 'nova-' + os.path.split(fname)[-1]
lock_dir = os.path.join(CONF.instances_path, 'locks')
lock_file = os.path.join(lock_dir, lock_name)
# Files are initially too new to delete
self.assertTrue(os.path.exists(fname))
self.assertTrue(os.path.exists(info_fname))
self.assertTrue(os.path.exists(lock_file))
# Old files get cleaned up though
os.utime(fname, (-1, time.time() - 3601))
image_cache_manager._remove_base_file(fname)
self.assertFalse(os.path.exists(fname))
self.assertFalse(os.path.exists(info_fname))
self.assertFalse(os.path.exists(lock_file))
def test_remove_base_file_original(self):
with self._make_base_file() as fname:
image_cache_manager = imagecache.ImageCacheManager()
image_cache_manager.originals = [fname]
image_cache_manager._remove_base_file(fname)
info_fname = imagecache.get_info_filename(fname)
# Files are initially too new to delete
self.assertTrue(os.path.exists(fname))
self.assertTrue(os.path.exists(info_fname))
# This file should stay longer than a resized image
os.utime(fname, (-1, time.time() - 3601))
image_cache_manager._remove_base_file(fname)
self.assertTrue(os.path.exists(fname))
self.assertTrue(os.path.exists(info_fname))
# Originals don't stay forever though
os.utime(fname, (-1, time.time() - 3600 * 25))
image_cache_manager._remove_base_file(fname)
self.assertFalse(os.path.exists(fname))
self.assertFalse(os.path.exists(info_fname))
def test_remove_base_file_dne(self):
# This test is solely to execute the "does not exist" code path. We
# don't expect the method being tested to do anything in this case.
with utils.tempdir() as tmpdir:
self.flags(instances_path=tmpdir)
self.flags(image_info_filename_pattern=('$instances_path/'
'%(image)s.info'),
group='libvirt')
fname = os.path.join(tmpdir, 'aaa')
image_cache_manager = imagecache.ImageCacheManager()
image_cache_manager._remove_base_file(fname)
def test_remove_base_file_oserror(self):
with intercept_log_messages() as stream:
with utils.tempdir() as tmpdir:
self.flags(instances_path=tmpdir)
self.flags(image_info_filename_pattern=('$instances_path/'
'%(image)s.info'),
group='libvirt')
fname = os.path.join(tmpdir, 'aaa')
os.mkdir(fname)
os.utime(fname, (-1, time.time() - 3601))
# This will raise an OSError because of file permissions
image_cache_manager = imagecache.ImageCacheManager()
image_cache_manager._remove_base_file(fname)
self.assertTrue(os.path.exists(fname))
self.assertNotEqual(stream.getvalue().find('Failed to remove'),
-1)
def test_handle_base_image_unused(self):
img = '123'
with self._make_base_file() as fname:
os.utime(fname, (-1, time.time() - 3601))
image_cache_manager = imagecache.ImageCacheManager()
image_cache_manager.unexplained_images = [fname]
image_cache_manager._handle_base_image(img, fname)
self.assertEqual(image_cache_manager.unexplained_images, [])
self.assertEqual(image_cache_manager.removable_base_files,
[fname])
self.assertEqual(image_cache_manager.corrupt_base_files, [])
@mock.patch.object(libvirt_utils, 'update_mtime')
def test_handle_base_image_used(self, mock_mtime):
img = '123'
with self._make_base_file() as fname:
image_cache_manager = imagecache.ImageCacheManager()
image_cache_manager.unexplained_images = [fname]
image_cache_manager.used_images = {'123': (1, 0, ['banana-42'])}
image_cache_manager._handle_base_image(img, fname)
mock_mtime.assert_called_once_with(fname)
self.assertEqual(image_cache_manager.unexplained_images, [])
self.assertEqual(image_cache_manager.removable_base_files, [])
self.assertEqual(image_cache_manager.corrupt_base_files, [])
@mock.patch.object(libvirt_utils, 'update_mtime')
def test_handle_base_image_used_remotely(self, mock_mtime):
img = '123'
with self._make_base_file() as fname:
image_cache_manager = imagecache.ImageCacheManager()
image_cache_manager.unexplained_images = [fname]
image_cache_manager.used_images = {'123': (0, 1, ['banana-42'])}
image_cache_manager._handle_base_image(img, fname)
mock_mtime.assert_called_once_with(fname)
self.assertEqual(image_cache_manager.unexplained_images, [])
self.assertEqual(image_cache_manager.removable_base_files, [])
self.assertEqual(image_cache_manager.corrupt_base_files, [])
def test_handle_base_image_absent(self):
img = '123'
with intercept_log_messages() as stream:
image_cache_manager = imagecache.ImageCacheManager()
image_cache_manager.used_images = {'123': (1, 0, ['banana-42'])}
image_cache_manager._handle_base_image(img, None)
self.assertEqual(image_cache_manager.unexplained_images, [])
self.assertEqual(image_cache_manager.removable_base_files, [])
self.assertEqual(image_cache_manager.corrupt_base_files, [])
self.assertNotEqual(stream.getvalue().find('an absent base file'),
-1)
def test_handle_base_image_used_missing(self):
img = '123'
with utils.tempdir() as tmpdir:
self.flags(instances_path=tmpdir)
self.flags(image_info_filename_pattern=('$instances_path/'
'%(image)s.info'),
group='libvirt')
fname = os.path.join(tmpdir, 'aaa')
image_cache_manager = imagecache.ImageCacheManager()
image_cache_manager.unexplained_images = [fname]
image_cache_manager.used_images = {'123': (1, 0, ['banana-42'])}
image_cache_manager._handle_base_image(img, fname)
self.assertEqual(image_cache_manager.unexplained_images, [])
self.assertEqual(image_cache_manager.removable_base_files, [])
self.assertEqual(image_cache_manager.corrupt_base_files, [])
@mock.patch.object(libvirt_utils, 'update_mtime')
def test_handle_base_image_checksum_fails(self, mock_mtime):
self.flags(checksum_base_images=True, group='libvirt')
img = '123'
with self._make_base_file() as fname:
with open(fname, 'w') as f:
f.write('banana')
d = {'sha1': '21323454'}
with open('%s.info' % fname, 'w') as f:
f.write(jsonutils.dumps(d))
image_cache_manager = imagecache.ImageCacheManager()
image_cache_manager.unexplained_images = [fname]
image_cache_manager.used_images = {'123': (1, 0, ['banana-42'])}
image_cache_manager._handle_base_image(img, fname)
mock_mtime.assert_called_once_with(fname)
self.assertEqual(image_cache_manager.unexplained_images, [])
self.assertEqual(image_cache_manager.removable_base_files, [])
self.assertEqual(image_cache_manager.corrupt_base_files,
[fname])
@mock.patch.object(libvirt_utils, 'update_mtime')
@mock.patch.object(lockutils, 'external_lock')
def test_verify_base_images(self, mock_lock, mock_mtime):
hashed_1 = '356a192b7913b04c54574d18c28d46e6395428ab'
hashed_21 = '472b07b9fcf2c2451e8781e944bf5f77cd8457c8'
hashed_22 = '12c6fc06c99a462375eeb3f43dfd832b08ca9e17'
hashed_42 = '92cfceb39d57d914ed8b14d0e37643de0797ae56'
self.flags(instances_path='/instance_path',
image_cache_subdirectory_name='_base')
base_file_list = ['00000001',
'ephemeral_0_20_None',
'e97222e91fc4241f49a7f520d1dcf446751129b3_sm',
'e09c675c2d1cfac32dae3c2d83689c8c94bc693b_sm',
hashed_42,
hashed_1,
hashed_21,
hashed_22,
'%s_5368709120' % hashed_1,
'%s_10737418240' % hashed_1,
'00000004']
def fq_path(path):
return os.path.join('/instance_path/_base/', path)
# Fake base directory existence
orig_exists = os.path.exists
def exists(path):
# The python coverage tool got angry with my overly broad mocks
if not path.startswith('/instance_path'):
return orig_exists(path)
if path in ['/instance_path',
'/instance_path/_base',
'/instance_path/instance-1/disk',
'/instance_path/instance-2/disk',
'/instance_path/instance-3/disk',
'/instance_path/_base/%s.info' % hashed_42]:
return True
for p in base_file_list:
if path == fq_path(p):
return True
if path == fq_path(p) + '.info':
return False
if path in ['/instance_path/_base/%s_sm' % i for i in [hashed_1,
hashed_21,
hashed_22,
hashed_42]]:
return False
self.fail('Unexpected path existence check: %s' % path)
self.stub_out('os.path.exists', lambda x: exists(x))
# Fake up some instances in the instances directory
orig_listdir = os.listdir
def listdir(path):
# The python coverage tool got angry with my overly broad mocks
if not path.startswith('/instance_path'):
return orig_listdir(path)
if path == '/instance_path':
return ['instance-1', 'instance-2', 'instance-3', '_base']
if path == '/instance_path/_base':
return base_file_list
self.fail('Unexpected directory listed: %s' % path)
self.stub_out('os.listdir', lambda x: listdir(x))
# Fake isfile for these faked images in _base
orig_isfile = os.path.isfile
def isfile(path):
# The python coverage tool got angry with my overly broad mocks
if not path.startswith('/instance_path'):
return orig_isfile(path)
for p in base_file_list:
if path == fq_path(p):
return True
self.fail('Unexpected isfile call: %s' % path)
self.stub_out('os.path.isfile', lambda x: isfile(x))
# Fake the database call which lists running instances
instances = [{'image_ref': '1',
'host': CONF.host,
'name': 'instance-1',
'uuid': '123',
'vm_state': '',
'task_state': ''},
{'image_ref': '1',
'kernel_id': '21',
'ramdisk_id': '22',
'host': CONF.host,
'name': 'instance-2',
'uuid': '456',
'vm_state': '',
'task_state': ''}]
all_instances = [fake_instance.fake_instance_obj(None, **instance)
for instance in instances]
image_cache_manager = imagecache.ImageCacheManager()
# Fake the utils call which finds the backing image
def get_disk_backing_file(path):
if path in ['/instance_path/instance-1/disk',
'/instance_path/instance-2/disk']:
return fq_path('%s_5368709120' % hashed_1)
self.fail('Unexpected backing file lookup: %s' % path)
self.stubs.Set(libvirt_utils, 'get_disk_backing_file',
lambda x: get_disk_backing_file(x))
# Fake out verifying checksums, as that is tested elsewhere
self.stubs.Set(image_cache_manager, '_verify_checksum',
lambda x, y: True)
# Fake getmtime as well
orig_getmtime = os.path.getmtime
def getmtime(path):
if not path.startswith('/instance_path'):
return orig_getmtime(path)
return 1000000
self.stub_out('os.path.getmtime', lambda x: getmtime(x))
# Make sure we don't accidentally remove a real file
orig_remove = os.remove
def remove(path):
if not path.startswith('/instance_path'):
return orig_remove(path)
# Don't try to remove fake files
return
self.stub_out('os.remove', lambda x: remove(x))
self.mox.StubOutWithMock(objects.block_device.BlockDeviceMappingList,
'bdms_by_instance_uuid')
ctxt = context.get_admin_context()
objects.block_device.BlockDeviceMappingList.bdms_by_instance_uuid(
ctxt, ['123', '456']).AndReturn({})
self.mox.ReplayAll()
# And finally we can make the call we're actually testing...
# The argument here should be a context, but it is mocked out
image_cache_manager.update(ctxt, all_instances)
# Verify
active = [fq_path(hashed_1), fq_path('%s_5368709120' % hashed_1),
fq_path(hashed_21), fq_path(hashed_22)]
for act in active:
self.assertIn(act, image_cache_manager.active_base_files)
self.assertEqual(len(image_cache_manager.active_base_files),
len(active))
for rem in [fq_path('e97222e91fc4241f49a7f520d1dcf446751129b3_sm'),
fq_path('e09c675c2d1cfac32dae3c2d83689c8c94bc693b_sm'),
fq_path(hashed_42),
fq_path('%s_10737418240' % hashed_1)]:
self.assertIn(rem, image_cache_manager.removable_base_files)
# Ensure there are no "corrupt" images as well
self.assertEqual(len(image_cache_manager.corrupt_base_files), 0)
def test_verify_base_images_no_base(self):
self.flags(instances_path='/tmp/no/such/dir/name/please')
image_cache_manager = imagecache.ImageCacheManager()
image_cache_manager.update(None, [])
def test_is_valid_info_file(self):
hashed = 'e97222e91fc4241f49a7f520d1dcf446751129b3'
self.flags(instances_path='/tmp/no/such/dir/name/please')
self.flags(image_info_filename_pattern=('$instances_path/_base/'
'%(image)s.info'),
group='libvirt')
base_filename = os.path.join(CONF.instances_path, '_base', hashed)
is_valid_info_file = imagecache.is_valid_info_file
self.assertFalse(is_valid_info_file('banana'))
self.assertFalse(is_valid_info_file(
os.path.join(CONF.instances_path, '_base', '00000001')))
self.assertFalse(is_valid_info_file(base_filename))
self.assertFalse(is_valid_info_file(base_filename + '.sha1'))
self.assertTrue(is_valid_info_file(base_filename + '.info'))
def test_configured_checksum_path(self):
with utils.tempdir() as tmpdir:
self.flags(instances_path=tmpdir)
self.flags(image_info_filename_pattern=('$instances_path/'
'%(image)s.info'),
group='libvirt')
# Ensure there is a base directory
os.mkdir(os.path.join(tmpdir, '_base'))
# Fake the database call which lists running instances
instances = [{'image_ref': '1',
'host': CONF.host,
'name': 'instance-1',
'uuid': '123',
'vm_state': '',
'task_state': ''},
{'image_ref': '1',
'host': CONF.host,
'name': 'instance-2',
'uuid': '456',
'vm_state': '',
'task_state': ''}]
all_instances = []
for instance in instances:
all_instances.append(fake_instance.fake_instance_obj(
None, **instance))
def touch(filename):
f = open(filename, 'w')
f.write('Touched')
f.close()
old = time.time() - (25 * 3600)
hashed = 'e97222e91fc4241f49a7f520d1dcf446751129b3'
base_filename = os.path.join(tmpdir, hashed)
touch(base_filename)
touch(base_filename + '.info')
os.utime(base_filename + '.info', (old, old))
touch(base_filename + '.info')
os.utime(base_filename + '.info', (old, old))
self.mox.StubOutWithMock(
objects.block_device.BlockDeviceMappingList,
'bdms_by_instance_uuid')
ctxt = context.get_admin_context()
objects.block_device.BlockDeviceMappingList.bdms_by_instance_uuid(
ctxt, ['123', '456']).AndReturn({})
self.mox.ReplayAll()
image_cache_manager = imagecache.ImageCacheManager()
image_cache_manager.update(ctxt,
all_instances)
self.assertTrue(os.path.exists(base_filename))
self.assertTrue(os.path.exists(base_filename + '.info'))
def test_run_image_cache_manager_pass(self):
was = {'called': False}
def fake_get_all_by_filters(context, *args, **kwargs):
was['called'] = True
instances = []
for x in range(2):
instances.append(fake_instance.fake_db_instance(
image_ref='1',
uuid=x,
name=x,
vm_state='',
task_state=''))
return instances
with utils.tempdir() as tmpdir:
self.flags(instances_path=tmpdir)
self.stub_out('nova.db.instance_get_all_by_filters',
fake_get_all_by_filters)
compute = importutils.import_object(CONF.compute_manager)
self.flags(use_local=True, group='conductor')
compute.conductor_api = conductor.API()
ctxt = context.get_admin_context()
compute._run_image_cache_manager_pass(ctxt)
self.assertTrue(was['called'])
def test_store_swap_image(self):
image_cache_manager = imagecache.ImageCacheManager()
image_cache_manager._store_swap_image('swap_')
image_cache_manager._store_swap_image('swap_123')
image_cache_manager._store_swap_image('swap_456')
image_cache_manager._store_swap_image('swap_abc')
image_cache_manager._store_swap_image('123_swap')
image_cache_manager._store_swap_image('swap_129_')
self.assertEqual(len(image_cache_manager.back_swap_images), 2)
expect_set = set(['swap_123', 'swap_456'])
self.assertEqual(image_cache_manager.back_swap_images, expect_set)
@mock.patch.object(lockutils, 'external_lock')
@mock.patch.object(libvirt_utils, 'update_mtime')
@mock.patch('os.path.exists', return_value=True)
@mock.patch('os.path.getmtime')
@mock.patch('os.remove')
def test_age_and_verify_swap_images(self, mock_remove, mock_getmtime,
mock_exist, mock_mtime, mock_lock):
image_cache_manager = imagecache.ImageCacheManager()
expected_remove = set()
expected_exist = set(['swap_128', 'swap_256'])
image_cache_manager.back_swap_images.add('swap_128')
image_cache_manager.back_swap_images.add('swap_256')
image_cache_manager.used_swap_images.add('swap_128')
def getmtime(path):
return time.time() - 1000000
mock_getmtime.side_effect = getmtime
def removefile(path):
if not path.startswith('/tmp_age_test'):
return os.remove(path)
fn = os.path.split(path)[-1]
expected_remove.add(fn)
expected_exist.remove(fn)
mock_remove.side_effect = removefile
image_cache_manager._age_and_verify_swap_images(None, '/tmp_age_test')
self.assertEqual(1, len(expected_exist))
self.assertEqual(1, len(expected_remove))
self.assertIn('swap_128', expected_exist)
self.assertIn('swap_256', expected_remove)
@mock.patch.object(utils, 'synchronized')
@mock.patch.object(imagecache.ImageCacheManager, '_get_age_of_file',
return_value=(True, 100))
def test_lock_acquired_on_removing_old_enough_files(self, mock_get_age,
mock_synchronized):
base_file = '/tmp_age_test'
lock_path = os.path.join(CONF.instances_path, 'locks')
lock_file = os.path.split(base_file)[-1]
image_cache_manager = imagecache.ImageCacheManager()
image_cache_manager._remove_old_enough_file(
base_file, 60, remove_sig=False, remove_lock=False)
mock_synchronized.assert_called_once_with(lock_file, external=True,
lock_path=lock_path)
class VerifyChecksumTestCase(test.NoDBTestCase):
def setUp(self):
super(VerifyChecksumTestCase, self).setUp()
self.img = {'container_format': 'ami', 'id': '42'}
self.flags(checksum_base_images=True, group='libvirt')
def _make_checksum(self, tmpdir):
testdata = ('OpenStack Software delivers a massively scalable cloud '
'operating system.')
fname = os.path.join(tmpdir, 'aaa')
info_fname = imagecache.get_info_filename(fname)
with open(fname, 'w') as f:
f.write(testdata)
return fname, info_fname, testdata
def _write_file(self, info_fname, info_attr, testdata):
f = open(info_fname, 'w')
if info_attr == "csum valid":
csum = hashlib.sha1()
csum.update(testdata)
f.write('{"sha1": "%s"}\n' % csum.hexdigest())
elif info_attr == "csum invalid, not json":
f.write('banana')
else:
f.write('{"sha1": "banana"}')
f.close()
def _check_body(self, tmpdir, info_attr):
self.flags(instances_path=tmpdir)
self.flags(image_info_filename_pattern=('$instances_path/'
'%(image)s.info'),
group='libvirt')
fname, info_fname, testdata = self._make_checksum(tmpdir)
self._write_file(info_fname, info_attr, testdata)
image_cache_manager = imagecache.ImageCacheManager()
return image_cache_manager, fname
def test_verify_checksum(self):
with utils.tempdir() as tmpdir:
image_cache_manager, fname = self._check_body(tmpdir, "csum valid")
res = image_cache_manager._verify_checksum(self.img, fname)
self.assertTrue(res)
def test_verify_checksum_disabled(self):
self.flags(checksum_base_images=False, group='libvirt')
with utils.tempdir() as tmpdir:
image_cache_manager, fname = self._check_body(tmpdir, "csum valid")
res = image_cache_manager._verify_checksum(self.img, fname)
self.assertIsNone(res)
def test_verify_checksum_invalid_json(self):
with intercept_log_messages() as stream:
with utils.tempdir() as tmpdir:
image_cache_manager, fname = (
self._check_body(tmpdir, "csum invalid, not json"))
res = image_cache_manager._verify_checksum(
self.img, fname, create_if_missing=False)
self.assertFalse(res)
log = stream.getvalue()
# NOTE(mikal): this is a skip not a fail because the file is
# present, but is not in valid JSON format and therefore is
# skipped.
self.assertNotEqual(log.find('image verification skipped'), -1)
def test_verify_checksum_invalid_repaired(self):
with utils.tempdir() as tmpdir:
image_cache_manager, fname = (
self._check_body(tmpdir, "csum invalid, not json"))
res = image_cache_manager._verify_checksum(
self.img, fname, create_if_missing=True)
self.assertIsNone(res)
def test_verify_checksum_invalid(self):
with intercept_log_messages() as stream:
with utils.tempdir() as tmpdir:
image_cache_manager, fname = (
self._check_body(tmpdir, "csum invalid, valid json"))
res = image_cache_manager._verify_checksum(self.img, fname)
self.assertFalse(res)
log = stream.getvalue()
self.assertNotEqual(log.find('image verification failed'), -1)
def test_verify_checksum_file_missing(self):
with utils.tempdir() as tmpdir:
self.flags(instances_path=tmpdir)
self.flags(image_info_filename_pattern=('$instances_path/'
'%(image)s.info'),
group='libvirt')
fname, info_fname, testdata = self._make_checksum(tmpdir)
image_cache_manager = imagecache.ImageCacheManager()
res = image_cache_manager._verify_checksum('aaa', fname)
self.assertIsNone(res)
# Checksum requests for a file with no checksum now have the
# side effect of creating the checksum
self.assertTrue(os.path.exists(info_fname))
| cernops/nova | nova/tests/unit/virt/libvirt/test_imagecache.py | Python | apache-2.0 | 42,304 | 0.000307 |
# Copyright 2019, Google LLC All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import os
import pkg_resources
import typing
from typing import cast, Any, Callable, Optional, Sequence, Union
import warnings
from google.auth.credentials import AnonymousCredentials # type: ignore
from google.oauth2 import service_account # type: ignore
from google.cloud.pubsub_v1 import types
from google.cloud.pubsub_v1.subscriber import futures
from google.cloud.pubsub_v1.subscriber._protocol import streaming_pull_manager
from google.pubsub_v1.services.subscriber import client as subscriber_client
if typing.TYPE_CHECKING: # pragma: NO COVER
from google.cloud.pubsub_v1 import subscriber
from google.pubsub_v1.services.subscriber.transports.grpc import (
SubscriberGrpcTransport,
)
try:
__version__ = pkg_resources.get_distribution("google-cloud-pubsub").version
except pkg_resources.DistributionNotFound:
# Distribution might not be available if we are not running from within
# a PIP package.
__version__ = "0.0"
class Client(subscriber_client.SubscriberClient):
"""A subscriber client for Google Cloud Pub/Sub.
This creates an object that is capable of subscribing to messages.
Generally, you can instantiate this client with no arguments, and you
get sensible defaults.
Args:
kwargs: Any additional arguments provided are sent as keyword
keyword arguments to the underlying
:class:`~google.cloud.pubsub_v1.gapic.subscriber_client.SubscriberClient`.
Generally you should not need to set additional keyword
arguments. Optionally, regional endpoints can be set via
``client_options`` that takes a single key-value pair that
defines the endpoint.
Example:
.. code-block:: python
from google.cloud import pubsub_v1
subscriber_client = pubsub_v1.SubscriberClient(
# Optional
client_options = {
"api_endpoint": REGIONAL_ENDPOINT
}
)
"""
def __init__(self, **kwargs: Any):
# Sanity check: Is our goal to use the emulator?
# If so, create a grpc insecure channel with the emulator host
# as the target.
if os.environ.get("PUBSUB_EMULATOR_HOST"):
kwargs["client_options"] = {
"api_endpoint": os.environ.get("PUBSUB_EMULATOR_HOST")
}
kwargs["credentials"] = AnonymousCredentials()
# Instantiate the underlying GAPIC client.
super().__init__(**kwargs)
self._target = self._transport._host
self._closed = False
@classmethod
def from_service_account_file( # type: ignore[override]
cls, filename: str, **kwargs: Any
) -> "Client":
"""Creates an instance of this client using the provided credentials
file.
Args:
filename: The path to the service account private key json file.
kwargs: Additional arguments to pass to the constructor.
Returns:
A Subscriber :class:`~google.cloud.pubsub_v1.subscriber.client.Client`
instance that is the constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(filename)
kwargs["credentials"] = credentials
return cls(**kwargs)
from_service_account_json = from_service_account_file # type: ignore[assignment]
@property
def target(self) -> str:
"""Return the target (where the API is).
Returns:
The location of the API.
"""
return self._target
@property
def closed(self) -> bool:
"""Return whether the client has been closed and cannot be used anymore.
.. versionadded:: 2.8.0
"""
return self._closed
@property
def api(self):
"""The underlying gapic API client.
.. versionchanged:: 2.10.0
Instead of a GAPIC ``SubscriberClient`` client instance, this property is a
proxy object to it with the same interface.
.. deprecated:: 2.10.0
Use the GAPIC methods and properties on the client instance directly
instead of through the :attr:`api` attribute.
"""
msg = (
'The "api" property only exists for backward compatibility, access its '
'attributes directly thorugh the client instance (e.g. "client.foo" '
'instead of "client.api.foo").'
)
warnings.warn(msg, category=DeprecationWarning)
return super()
def subscribe(
self,
subscription: str,
callback: Callable[["subscriber.message.Message"], Any],
flow_control: Union[types.FlowControl, Sequence] = (),
scheduler: Optional["subscriber.scheduler.ThreadScheduler"] = None,
use_legacy_flow_control: bool = False,
await_callbacks_on_shutdown: bool = False,
) -> futures.StreamingPullFuture:
"""Asynchronously start receiving messages on a given subscription.
This method starts a background thread to begin pulling messages from
a Pub/Sub subscription and scheduling them to be processed using the
provided ``callback``.
The ``callback`` will be called with an individual
:class:`google.cloud.pubsub_v1.subscriber.message.Message`. It is the
responsibility of the callback to either call ``ack()`` or ``nack()``
on the message when it finished processing. If an exception occurs in
the callback during processing, the exception is logged and the message
is ``nack()`` ed.
The ``flow_control`` argument can be used to control the rate of at
which messages are pulled. The settings are relatively conservative by
default to prevent "message hoarding" - a situation where the client
pulls a large number of messages but can not process them fast enough
leading it to "starve" other clients of messages. Increasing these
settings may lead to faster throughput for messages that do not take
a long time to process.
The ``use_legacy_flow_control`` argument disables enforcing flow control
settings at the Cloud Pub/Sub server, and only the client side flow control
will be enforced.
This method starts the receiver in the background and returns a
*Future* representing its execution. Waiting on the future (calling
``result()``) will block forever or until a non-recoverable error
is encountered (such as loss of network connectivity). Cancelling the
future will signal the process to shutdown gracefully and exit.
.. note:: This uses Pub/Sub's *streaming pull* feature. This feature
properties that may be surprising. Please take a look at
https://cloud.google.com/pubsub/docs/pull#streamingpull for
more details on how streaming pull behaves compared to the
synchronous pull method.
Example:
.. code-block:: python
from google.cloud import pubsub_v1
subscriber_client = pubsub_v1.SubscriberClient()
# existing subscription
subscription = subscriber_client.subscription_path(
'my-project-id', 'my-subscription')
def callback(message):
print(message)
message.ack()
future = subscriber_client.subscribe(
subscription, callback)
try:
future.result()
except KeyboardInterrupt:
future.cancel() # Trigger the shutdown.
future.result() # Block until the shutdown is complete.
Args:
subscription:
The name of the subscription. The subscription should have already been
created (for example, by using :meth:`create_subscription`).
callback:
The callback function. This function receives the message as
its only argument and will be called from a different thread/
process depending on the scheduling strategy.
flow_control:
The flow control settings. Use this to prevent situations where you are
inundated with too many messages at once.
scheduler:
An optional *scheduler* to use when executing the callback. This
controls how callbacks are executed concurrently. This object must not
be shared across multiple ``SubscriberClient`` instances.
use_legacy_flow_control (bool):
If set to ``True``, flow control at the Cloud Pub/Sub server is disabled,
though client-side flow control is still enabled. If set to ``False``
(default), both server-side and client-side flow control are enabled.
await_callbacks_on_shutdown:
If ``True``, after canceling the returned future, the latter's
``result()`` method will block until the background stream and its
helper threads have been terminated, and all currently executing message
callbacks are done processing.
If ``False`` (default), the returned future's ``result()`` method will
not block after canceling the future. The method will instead return
immediately after the background stream and its helper threads have been
terminated, but some of the message callback threads might still be
running at that point.
Returns:
A future instance that can be used to manage the background stream.
"""
flow_control = types.FlowControl(*flow_control)
manager = streaming_pull_manager.StreamingPullManager(
self,
subscription,
flow_control=flow_control,
scheduler=scheduler,
use_legacy_flow_control=use_legacy_flow_control,
await_callbacks_on_shutdown=await_callbacks_on_shutdown,
)
future = futures.StreamingPullFuture(manager)
manager.open(callback=callback, on_callback_error=future.set_exception)
return future
def close(self) -> None:
"""Close the underlying channel to release socket resources.
After a channel has been closed, the client instance cannot be used
anymore.
This method is idempotent.
"""
transport = cast("SubscriberGrpcTransport", self._transport)
transport.grpc_channel.close()
self._closed = True
def __enter__(self) -> "Client":
if self._closed:
raise RuntimeError("Closed subscriber cannot be used as context manager.")
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
| googleapis/python-pubsub | google/cloud/pubsub_v1/subscriber/client.py | Python | apache-2.0 | 11,505 | 0.002086 |
import sys
import time
from scheduler import SoSpiderSchdulerBase
class SoClusterManager(SoSpiderSchdulerBase):
def __init__(self, name):
self.load_conf()
self.make_connection()
self.name = name
self.log_file = open('./logs/%s_cluster.log' % self.name, 'w')
def log(self, text):
self.log_file.write(text + '\n')
self.log_file.flush()
def run(self):
self.log('ClusterManger has running as %s' % self.name)
while True:
time.sleep(5)
current_spider_key = self.key_of_spider(self.name)
cluster_key = self.key_of_spider_cluster()
self.redis_cache.expire(current_spider_key, 30)
self.redis_cache.sadd(cluster_key, self.name)
all_spiders = self.redis_cache.smembers(cluster_key)
self.log('spiders supposed in cluster is %s' % all_spiders)
living_spiders = [
name for name in all_spiders
if self.redis_cache.exists(self.key_of_spider(name))
]
self.log('living spiders in cluster is %s' % living_spiders)
if living_spiders:
self.redis_cache.sadd(cluster_key, *living_spiders)
for spider in all_spiders:
if spider not in living_spiders:
self.redis_cache.srem(cluster_key, spider)
if __name__ == '__main__':
name = sys.argv[1]
manager = SoClusterManager(name)
manager.run()
| Rolight/So-Spider | cluster_manager.py | Python | gpl-3.0 | 1,490 | 0 |
#!/usr/bin/env python
from gnuradio import gr
from gnuradio import trellis, digital
from gnuradio import eng_notation
import math
import sys
import random
import fsm_utils
def run_test (fo,fi,interleaver,Kb,bitspersymbol,K,dimensionality,constellation,Es,N0,IT,seed):
tb = gr.top_block ()
# TX
src = gr.lfsr_32k_source_s()
src_head = gr.head (gr.sizeof_short,Kb/16) # packet size in shorts
s2fsmi = gr.packed_to_unpacked_ss(bitspersymbol,gr.GR_MSB_FIRST) # unpack shorts to symbols compatible with the outer FSM input cardinality
enc = trellis.sccc_encoder_ss(fo,0,fi,0,interleaver,K)
mod = gr.chunks_to_symbols_sf(constellation,dimensionality)
# CHANNEL
add = gr.add_ff()
noise = gr.noise_source_f(gr.GR_GAUSSIAN,math.sqrt(N0/2),seed)
# RX
dec = trellis.sccc_decoder_combined_fs(fo,0,-1,fi,0,-1,interleaver,K,IT,trellis.TRELLIS_MIN_SUM,dimensionality,constellation,digital.TRELLIS_EUCLIDEAN,1.0)
fsmi2s = gr.unpacked_to_packed_ss(bitspersymbol,gr.GR_MSB_FIRST) # pack FSM input symbols to shorts
dst = gr.check_lfsr_32k_s()
#tb.connect (src,src_head,s2fsmi,enc_out,inter,enc_in,mod)
tb.connect (src,src_head,s2fsmi,enc,mod)
tb.connect (mod,(add,0))
tb.connect (noise,(add,1))
#tb.connect (add,head)
#tb.connect (tail,fsmi2s,dst)
tb.connect (add,dec,fsmi2s,dst)
tb.run()
#print enc_out.ST(), enc_in.ST()
ntotal = dst.ntotal ()
nright = dst.nright ()
runlength = dst.runlength ()
return (ntotal,ntotal-nright)
def main(args):
nargs = len (args)
if nargs == 5:
fname_out=args[0]
fname_in=args[1]
esn0_db=float(args[2]) # Es/No in dB
IT=int(args[3])
rep=int(args[4]) # number of times the experiment is run to collect enough errors
else:
sys.stderr.write ('usage: test_tcm.py fsm_name_out fsm_fname_in Es/No_db iterations repetitions\n')
sys.exit (1)
# system parameters
Kb=1024*16 # packet size in bits (make it multiple of 16 so it can be packed in a short)
fo=trellis.fsm(fname_out) # get the outer FSM specification from a file
fi=trellis.fsm(fname_in) # get the innner FSM specification from a file
bitspersymbol = int(round(math.log(fo.I())/math.log(2))) # bits per FSM input symbol
if fo.O() != fi.I():
sys.stderr.write ('Incompatible cardinality between outer and inner FSM.\n')
sys.exit (1)
K=Kb/bitspersymbol # packet size in trellis steps
interleaver=trellis.interleaver(K,666) # construct a random interleaver
modulation = fsm_utils.psk8 # see fsm_utlis.py for available predefined modulations
dimensionality = modulation[0]
constellation = modulation[1]
if len(constellation)/dimensionality != fi.O():
sys.stderr.write ('Incompatible FSM output cardinality and modulation size.\n')
sys.exit (1)
# calculate average symbol energy
Es = 0
for i in range(len(constellation)):
Es = Es + constellation[i]**2
Es = Es / (len(constellation)/dimensionality)
N0=Es/pow(10.0,esn0_db/10.0); # calculate noise variance
tot_s=0 # total number of transmitted shorts
terr_s=0 # total number of shorts in error
terr_p=0 # total number of packets in error
for i in range(rep):
(s,e)=run_test(fo,fi,interleaver,Kb,bitspersymbol,K,dimensionality,constellation,Es,N0,IT,-long(666+i)) # run experiment with different seed to get different noise realizations
tot_s=tot_s+s
terr_s=terr_s+e
terr_p=terr_p+(terr_s!=0)
if ((i+1)%10==0): # display progress
print i+1,terr_p, '%.2e' % ((1.0*terr_p)/(i+1)),tot_s,terr_s, '%.2e' % ((1.0*terr_s)/tot_s)
# estimate of the (short or bit) error rate
print rep,terr_p, '%.2e' % ((1.0*terr_p)/(i+1)),tot_s,terr_s, '%.2e' % ((1.0*terr_s)/tot_s)
if __name__ == '__main__':
main (sys.argv[1:])
| n4hy/gnuradio | gr-trellis/src/examples/test_sccc_turbo1.py | Python | gpl-3.0 | 3,908 | 0.037615 |
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import job_search_commute_search
PROJECT_ID = os.environ["GOOGLE_CLOUD_PROJECT"]
def test_commute_search(tenant):
jobs = job_search_commute_search.search_jobs(PROJECT_ID, tenant)
for job in jobs:
assert "projects/" in job
| googleapis/python-talent | samples/snippets/job_search_commute_search_test.py | Python | apache-2.0 | 828 | 0 |
#! /usr/bin/env python
# coding=utf-8
# Copyright (c) 2019 Uber Technologies, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
def set_bit(v, index, x):
"""Set the index:th bit of v to 1 if x is truthy, else to 0, and return the new value."""
mask = 1 << index # Compute mask, an integer with just bit 'index' set.
v &= ~mask # Clear the bit indicated by the mask (if x is False)
if x:
v |= mask # If x was True, set the bit indicated by the mask.
return v # Return the result, we're done.
def set_bits(v, start_bit, slice_length, x):
bin_x = bin(x)
for i, index in enumerate(range(start_bit, start_bit + slice_length)):
val = int(bin_x[-(i + 1)]) if 2 + i < len(bin_x) else 0
v = set_bit(v, index, val)
return v
def components_to_h3(components):
h3 = 18446744073709551615
h3 = set_bits(h3, 64 - 5, 4, components['mode'])
h3 = set_bits(h3, 64 - 8, 3, components['edge'])
h3 = set_bits(h3, 64 - 12, 4, components['resolution'])
h3 = set_bits(h3, 64 - 19, 7, components['base_cell'])
for i, cell in enumerate(components['cells']):
h3 = set_bits(h3, 64 - 19 - (i + 1) * 3, 3, cell)
h3 = set_bits(h3, 64 - 1, 4, 0)
return h3
def bitslice(x, start_bit, slice_length):
ones_mask = 2 ** slice_length - 1
return (x & (ones_mask << start_bit)) >> start_bit
def h3_index_mode(h3_long):
return bitslice(h3_long, 64 - 5, 4)
def h3_edge(h3_long):
return bitslice(h3_long, 64 - 8, 3)
def h3_resolution(h3_long):
return bitslice(h3_long, 64 - 12, 4)
def h3_base_cell(h3_long):
return bitslice(h3_long, 64 - 19, 7)
def h3_octal_components(h3_long):
res = h3_resolution(h3_long)
return "{0:0{w}o}".format(
bitslice(h3_long + 2 ** 63, 64 - 19 - 3 * res, 3 * res), w=res)
def h3_component(h3_long, i):
return bitslice(h3_long, 64 - 19 - 3 * i, 3)
def h3_components(h3_long):
return [h3_component(h3_long, i) for i in
range(1, h3_resolution(h3_long) + 1)]
def h3_to_components(h3_value):
'''
Extract the values from an H3 hexadecimal value
Refer to this for the bit layout:
https://uber.github.io/h3/#/documentation/core-library/h3-index-representations
'''
# lat_long = (0, 0) # h3ToGeo(h3_value)
return {
'mode': h3_index_mode(h3_value),
'edge': h3_edge(h3_value),
'resolution': h3_resolution(h3_value),
'base_cell': h3_base_cell(h3_value),
'cells': h3_components(h3_value)
}
if __name__ == '__main__':
value = 622236723497533439
components = h3_to_components(value)
h3 = components_to_h3(components)
components2 = h3_to_components(h3)
print(value)
print(components)
print(h3)
print(components2)
| uber/ludwig | ludwig/utils/h3_util.py | Python | apache-2.0 | 3,350 | 0.000299 |
import bpy
from bpy.types import NodeSocket
from bpy.props import FloatVectorProperty
class MatrixSocket(NodeSocket):
bl_idname = "MatrixSocket"
bl_label = "Matrix Socket"
MatProp = FloatVectorProperty(
name='Mat',
description='Vector of 16 floats representing the Matrix',
default=(
1.0, 0.0, 0.0, 1.0,
0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0
),
size=16,
)
def get_default_value(self):
if 'MatProp' in self.keys():
matrix = self['MatProp']
return [
matrix[0], matrix[1], matrix[2], matrix[3],
matrix[4], matrix[5], matrix[6], matrix[7],
matrix[8], matrix[9], matrix[10], matrix[11],
matrix[12], matrix[13], matrix[14], matrix[15]
]
else:
return [
1.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0,
]
def draw(self, context, layout, node, text):
if self.is_output or self.is_linked:
layout.label(text)
else:
col = layout.column(align=True)
for i in range(4):
row = col.row(align=True)
for j in range(i*4, i*4 + 4):
row.prop(self, 'MatProp', text='', index=j)
def draw_color(self, context, node):
return(.2, .8, .2, 1.0)
def register():
bpy.utils.register_class(MatrixSocket)
def unregister():
bpy.utils.unregister_class(MatrixSocket)
| jakobharlan/avango | avango-blender/blender-addon/sockets/matrix_socket.py | Python | lgpl-3.0 | 1,640 | 0 |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2007 Johann Prieur <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
from common import *
def transport_headers():
"""Returns a dictionary, containing transport (http) headers
to use for the request"""
return {}
def soap_action():
"""Returns the SOAPAction value to pass to the transport
or None if no SOAPAction needs to be specified"""
return "http://www.hotmail.msn.com/ws/2004/09/oim/rsi/GetMetadata"
def soap_body():
"""Returns the SOAP xml body"""
return """
<GetMetadata xmlns="http://www.hotmail.msn.com/ws/2004/09/oim/rsi" />"""
def process_response(soap_response):
body = soap_response.body
try:
return body.find("./rsi:GetMetadataResponse")
except AttributeError:
return None
| Kjir/papyon | papyon/service/description/RSI/GetMetadata.py | Python | gpl-2.0 | 1,488 | 0.004704 |
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 1024 , FREQ = 'D', seed = 0, trendtype = "LinearTrend", cycle_length = 30, transform = "Integration", sigma = 0.0, exog_count = 100, ar_order = 0); | antoinecarme/pyaf | tests/artificial/transf_Integration/trend_LinearTrend/cycle_30/ar_/test_artificial_1024_Integration_LinearTrend_30__100.py | Python | bsd-3-clause | 271 | 0.084871 |
# -*- encoding: utf-8 -*-
from supriya.tools.synthdeftools.SignalRange import SignalRange
from supriya.tools.ugentools.UGen import UGen
class Dust(UGen):
r'''A unipolar random impulse generator.
::
>>> dust = ugentools.Dust.ar(
... density=23,
... )
>>> dust
Dust.ar()
'''
### CLASS VARIABLES ###
__documentation_section__ = 'Noise UGens'
__slots__ = ()
_ordered_input_names = (
'density',
)
_signal_range = SignalRange.UNIPOLAR
### INITIALIZER ###
def __init__(
self,
calculation_rate=None,
density=0.,
):
UGen.__init__(
self,
calculation_rate=calculation_rate,
density=density,
)
### PUBLIC METHODS ###
@classmethod
def ar(
cls,
density=0,
):
r'''Constructs an audio-rate unipolar random impulse generator.
::
>>> ugentools.Dust.ar(
... density=[1, 2],
... )
UGenArray({2})
Returns unit generator graph.
'''
from supriya.tools import synthdeftools
calculation_rate = synthdeftools.CalculationRate.AUDIO
ugen = cls._new_expanded(
calculation_rate=calculation_rate,
density=density,
)
return ugen
@classmethod
def kr(
cls,
density=0,
):
r'''Constructs a control-rate unipolar random impulse generator.
::
>>> ugentools.Dust.kr(
... density=[1, 2],
... )
UGenArray({2})
Returns unit generator graph.
'''
from supriya.tools import synthdeftools
calculation_rate = synthdeftools.CalculationRate.CONTROL
ugen = cls._new_expanded(
calculation_rate=calculation_rate,
density=density,
)
return ugen
### PUBLIC PROPERTIES ###
@property
def density(self):
r'''Gets `density` input of Dust.
::
>>> density = 0.25
>>> dust = ugentools.Dust.ar(
... density=density,
... )
>>> dust.density
0.25
Returns input.
'''
index = self._ordered_input_names.index('density')
return self._inputs[index] | andrewyoung1991/supriya | supriya/tools/ugentools/Dust.py | Python | mit | 2,414 | 0.003314 |
from django.contrib import admin
from .models import SimpleModel, Type, FKModel
admin.site.register(SimpleModel)
admin.site.register(Type)
admin.site.register(FKModel)
| wq/django-data-wizard | tests/data_app/admin.py | Python | mit | 170 | 0 |
from kivy.utils import platform
from kivy.logger import Logger
Logger.info('Loading RevMob module')
if platform() == 'android':
from .android import *
elif platform() == 'ios':
from .ios import *
else:
Logger.warning('[RevMob] Unkown platform: %s' % platform())
| aron-bordin/Kivy-Tutorials | 5_Perception/Perception/revmob/__init__.py | Python | gpl-2.0 | 275 | 0 |
# -*- coding: utf-8 -*-
# Copyright 2020 Green Valley Belgium NV
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @@license_version:1.7@@
import logging
from collections import defaultdict
from datetime import datetime
from email.MIMEText import MIMEText
from email.mime.image import MIMEImage
from email.mime.multipart import MIMEMultipart
from os import path
import jinja2
from google.appengine.ext import ndb
from typing import List, Dict
import solutions
from rogerthat.bizz.communities.communities import get_community
from rogerthat.dal.app import get_app_by_id
from rogerthat.dal.profile import get_service_profile
from rogerthat.models import App
from rogerthat.rpc import users
from rogerthat.settings import get_server_settings
from rogerthat.utils import send_mail_via_mime
from solutions import translate
from solutions.common.dal import get_solution_settings
from solutions.common.jobs.models import JobsSettings, JobNotificationType, JobSolicitation, OcaJobOffer
from solutions.common.jobs.to import JobsSettingsTO
from solutions.common.models import SolutionSettings
from solutions.jinja_extensions import TranslateExtension
JINJA_ENVIRONMENT = jinja2.Environment(
loader=jinja2.FileSystemLoader([path.join(path.dirname(__file__), '..', 'templates', 'emails')]),
extensions=[TranslateExtension])
def get_jobs_settings(service_user):
# type: (users.User) -> JobsSettings
key = JobsSettings.create_key(service_user)
settings = key.get()
if not settings:
settings = JobsSettings(key=key, emails=[], notifications=JobNotificationType.all())
return settings
def update_jobs_settings(service_user, data):
# type: (users.User, JobsSettingsTO) -> JobsSettings
settings = get_jobs_settings(service_user)
settings.notifications = data.notifications
settings.emails = data.emails
settings.put()
return settings
def send_job_notifications_for_service(jobs_settings_key, min_date, max_date):
# type: (ndb.Key, datetime, datetime) -> None
jobs_settings = jobs_settings_key.get() # type: JobsSettings
if not jobs_settings.emails:
logging.debug('No emails set, not sending jobs notifications')
return
service_user = users.User(jobs_settings_key.parent().id())
solicitations = JobSolicitation.list_unseen_by_service(service_user, min_date, max_date) \
.fetch(None) # type: List[JobSolicitation]
if not solicitations:
logging.debug('No new updates for jobs from service %s', service_user)
return
sln_settings = get_solution_settings(jobs_settings.service_user)
language = sln_settings.main_language
jobs = ndb.get_multi({solicitation.job_key for solicitation in solicitations}) # type: List[OcaJobOffer]
updates_per_job = defaultdict(int)
for solicitation in solicitations:
updates_per_job[solicitation.job_key.id()] += 1
subject = _get_subject_for_update_count(language, len(jobs), len(solicitations))
html_body, text_body = _get_body_for_job_updates(language, jobs, updates_per_job)
_send_email_for_notification(sln_settings, jobs_settings, subject, html_body, text_body)
def _get_subject_for_update_count(lang, jobs_count, updates_count):
# type: (unicode, int, int) -> unicode
if jobs_count == 1:
if updates_count == 1:
return translate(lang, 'there_is_an_update_about_your_job')
else:
return translate(lang, 'there_are_some_update_about_your_job')
else:
return translate(lang, 'there_are_some_update_about_your_jobs')
def _get_body_for_job_updates(lang, jobs, updates_per_job):
# type: (unicode, List[OcaJobOffer], Dict[int, int]) -> tuple[unicode, unicode]
html_body = []
text_body = []
if len(jobs) == 1:
job_name = jobs[0].function.title
html_job_name = '<b>%s</b>' % job_name
update_count = updates_per_job[jobs[0].id]
if update_count == 1:
html_body.append(translate(lang, 'jobs_one_new_update_message', job_name=html_job_name))
text_body.append(translate(lang, 'jobs_one_new_update_message', job_name=job_name))
else:
html_body.append(translate(lang, 'jobs_some_updates_message', job_name=html_job_name))
text_body.append(translate(lang, 'jobs_some_updates_message', job_name=job_name))
else:
msg = translate(lang, 'jobs_multiple_updates_message')
html_body.append(msg)
html_body.append('<ul>')
text_body.append(msg)
text_body.append('')
for job in jobs:
update_count = updates_per_job[job.id]
if update_count == 1:
update_line = translate(lang, 'one_new_update')
else:
update_line = translate(lang, 'x_new_updates', count=update_count)
html_body.append('<li><b>%s:</b> %s</li>' % (job.function.title, update_line))
text_body.append('* %s: %s' % (job.function.title, update_line))
html_body.append('</ul>')
return '<br>'.join(html_body), '\n'.join(text_body)
def _send_email_for_notification(sln_settings, jobs_settings, subject, html_body, text_body):
# type: (SolutionSettings, JobsSettings, unicode, unicode, unicode) -> None
def transl(key, **params):
return translate(sln_settings.main_language, key, **params)
settings = get_server_settings()
service_profile = get_service_profile(jobs_settings.service_user)
community = get_community(service_profile.community_id)
app = get_app_by_id(community.default_app)
mime_root = MIMEMultipart('related')
mime_root['Subject'] = subject
if app.type == App.APP_TYPE_ROGERTHAT:
mime_root['From'] = settings.senderEmail
else:
mime_root['From'] = '%s <%s>' % (community.name, app.dashboard_email_address)
mime_root['To'] = ', '.join(jobs_settings.emails)
mime = MIMEMultipart('alternative')
mime_root.attach(mime)
button_css = 'display: inline-block; margin-left: 0.5em; margin-right: 0.5em; -webkit-border-radius: 6px;' \
' -moz-border-radius: 6px; border-radius: 6px; font-family: Arial; color: #ffffff; font-size: 14px;' \
' background: #3abb9e; padding: 8px 16px 8px 16px; text-decoration: none;'
signin_url = settings.get_signin_url()
if sln_settings.login:
signin_url += '?email=%s' % sln_settings.login.email()
btn = u'<a href="%(signin_url)s" style="%(button_css)s">%(dashboard)s</a>' % {
'signin_url': signin_url,
'button_css': button_css,
'dashboard': transl('dashboard')
}
action_text = transl('if-email-body-3-button', dashboard_button=btn)
footer_text = transl('jobs_notification_footer', service_name=sln_settings.name, app_name=app.name,
dashboard_url='%s (%s)' % (transl('dashboard').lower(), signin_url))
footer_html = transl('jobs_notification_footer', service_name=sln_settings.name, app_name=app.name,
dashboard_url='<a href="%s">%s</a>' % (signin_url, transl('dashboard').lower()))
html_params = {
'message': html_body,
'action_text': action_text,
'footer': footer_html.replace('\n', '<br>'),
}
url_txt = transl('if-email-body-3-url', dashboard_url=signin_url)
text_params = {
'message': text_body,
'url_text': url_txt,
'footer': footer_text,
}
body_html = JINJA_ENVIRONMENT.get_template('solicitation_message_html.tmpl').render(html_params)
body = JINJA_ENVIRONMENT.get_template('solicitation_message.tmpl').render(text_params)
mime.attach(MIMEText(body.encode('utf-8'), 'plain', 'utf-8'))
mime.attach(MIMEText(body_html.encode('utf-8'), 'html', 'utf-8'))
with open(path.join(path.dirname(solutions.__file__), 'common', 'templates', 'emails', 'oca-email-header.png'), 'r') as f:
img_data = f.read()
img = MIMEImage(img_data, 'png')
img.add_header('Content-Id', '<osa-footer>')
img.add_header('Content-Disposition', 'inline', filename='Onze Stad App footer')
mime_root.attach(img)
send_mail_via_mime(settings.senderEmail, sln_settings.inbox_mail_forwarders, mime_root)
| our-city-app/oca-backend | src/solutions/common/jobs/notifications.py | Python | apache-2.0 | 8,697 | 0.003449 |
import wx
import wx.xrc as xrc
import structobject
uimgr = None
def get():
return uimgr
class UIManager(object):
def __init__(self, resource_file, top_level_name, top_level_type="frame", reuse_app=False, load_on=None):
global uimgr
self._event_aliases = {}
self.resource_name = resource_file
if not reuse_app:
self.app = wx.App(redirect=False)
else:
self.app = wx.GetApp()
self.resource = xrc.XmlResource(resource_file)
self.top_level = getattr(self, "get_%s"%top_level_type)(top_level_name, load_on)
self.app.TopLevelWindow = self.top_level
self.add_event_alias("button", "clicked")
self.add_event_alias("menu", "selected")
self._dlg_stack = []
self.current_dialog = None
uimgr = self
def get_frame(self, name, load_on=None):
if load_on:
self.resource.LoadFrame(load_on, getattr(self, "top_level", None), name)
return load_on
else:
return self.resource.LoadFrame(getattr(self, "top_level", None), name)
def get_dialog(self, name, load_on=None):
if load_on:
self.resource.LoadDialog(load_on, self.usable_parent, name)
return load_on
else:
return self.resource.LoadDialog(self.usable_parent, name)
def auto_bind(self, parent, eventsobj, only=[], alternate_bind_of=[]):
if eventsobj is None: return
handlers = [name for name in dir(eventsobj) if name.startswith("on_")]
olen = len(only)
for h in handlers:
try:
on, name, event = h.split("_", 2)
except ValueError: raise ValueError("Supposed handler %s has wrong name."%h)
if olen != 0 and name not in only: continue
event = self._lookup_event(event)
parent.Bind(event, getattr(eventsobj, h), id=xrc.XRCID(name))
for a in alternate_bind_of: self._alternate_bind(parent, a, eventsobj)
def run(self, show=True):
if show: self.top_level.Show()
self.app.MainLoop()
def close(self):
if self.usable_parent:
self.top_level.Close()
self.app.ExitMainLoop()
def add_event_alias(self, wx_event, event):
self._event_aliases[event] = wx_event
def show_error(self, title, message):
wx.MessageBox(message, title, parent=self.current_dialog or self.top_level, style=wx.ICON_ERROR|wx.OK)
def get_values(self, parent, *vals):
result = {}
for name in vals:
try:
result[name] = parent.FindWindowByName(name).Value
except AttributeError:
result[name] = parent.FindWindowByName(name).Selection
return structobject.Structobject(**result)
def set_values(self, parent, valsobj, *vals):
for name in vals:
try:
parent.FindWindowByName(name).Value = unicode(getattr(valsobj, name, ""))
except AttributeError:
parent.FindWindowByName(name).Selection = getattr(valsobj, name)
def clear_fields(self, parent, *fields):
for f in fields:
parent.FindWindowByName(f).Value = ""
def show_dialog(self, name, eventsobj=None, only=[], alternate_bind_of=[]):
dlg = self.prepare_show_dialog(name, eventsobj, only, alternate_bind_of)
dlg.ShowModal()
def prepare_show_dialog(self, name, eventsobj=None, only=[], alternate_bind_of=[]):
dlg = self.get_dialog(name)
if self.current_dialog is not None:
dlg.Bind(wx.EVT_CLOSE, self._unwind_stack)
self._dlg_stack.append(dlg)
self.current_dialog = dlg
if eventsobj:
self.auto_bind(self.current_dialog, eventsobj, only, alternate_bind_of)
else: # The subclassing case
self.auto_bind(self.current_dialog, self.current_dialog, only, alternate_bind_of)
#Hack to make wxgtk's focus strangeness happy
#Sets focus to the first non statictext widget of the panel which must be there, so we don't care... It could go wrong though in the future.
for c in self.current_dialog.Children[0].Children:
if not isinstance(c, wx.StaticText):
c.SetFocus()
break
return dlg
def prepare_xrc_dialog(self, cls, alternate_bind_of=[], parent=None, **post_init_kwargs):
if not parent: parent = self.top_level
inst = cls()
if hasattr(wx, "PreDialog"):
pre = wx.PreDialog()
self.resource.LoadOnDialog(pre, parent, cls.xrc_name)
inst.PostCreate(pre)
else: # Phoenix
self.resource.LoadDialog(inst, parent, cls.xrc_name)
self.auto_bind(inst, inst, alternate_bind_of=alternate_bind_of)
if hasattr(inst, "post_init"): inst.post_init(**post_init_kwargs)
return inst
def _unwind_stack(self, evt):
try: self._dlg_stack.remove(self.current_dialog)
except ValueError: pass
try: self.current_dialog = self._dlg_stack.pop()
except IndexError: self.current_dialog = None
evt.Skip()
def _alternate_bind(self, parent, meth, evtobj):
on, name, event = meth.split("_", 2)
evt = self._lookup_event(event)
parent.FindWindowByName(name).Bind(evt, getattr(evtobj, meth))
def _lookup_event(self, evt):
if not hasattr(wx, "EVT_%s"%evt.upper()):
evt = self._event_aliases[evt]
return getattr(wx, "EVT_%s"%evt.upper())
@property
def usable_parent(self):
obj = getattr(self, "top_level", None)
if not issubclass(obj.__class__, wx.Window): return None
return obj | tyrylu/orgedit | uimanager.py | Python | mit | 5,741 | 0.00749 |
__author__ = 'Mars'
import random
class Question:
def __init__(self, monster):
self.name = "Question"
self.monster = monster
class PercentMonsterLevel1(Question):
def __init__(self, monster):
Question.__init__(self,monster)
self.name = "PercentMonsterLevel1"
def makeQ(self):
nums = [10, 50] # creating array of percentges
num1 = random.choice(nums) # choosing random percentage
nums2 = [10, 20, 40, 100]
print
num2 = random.choice(nums2)
q1 = ("What is {0} percent of {1}?").format(num1, num2) # question string
i = 0
options = []
while (i<4):
options.append(random.randint(0,100))
i+=1
a1 = int((num1 / 100.0) * num2) # num1 is the percentage, which should mutltiply by num2
options.append(a1)
random.shuffle(options)
print("Choose the correct answer: {0}").format(options)
return q1, a1, options
class PercentMonsterLevel2(Question):
def __init__(self,monster):
Question.__init__(self,monster)
self.name = "PercentMonsterLevel2"
def makeQ(self):
nums = [10, 20, 40, 80] # creating array of percentges
num1 = random.choice(nums) # choosing random percentage
nums2 = [10, 20, 40, 100]
print
num2 = random.choice(nums2)
q1 = ("What is {0} percent of {1} ").format(num1, num2) # question string
i = 0
options = []
while (i<4):
options.append(random.randint(0,100))
i+=1
a1 = int((num1 / 100.0) * num2) # num1 is the percentage, which should mutltiply by num2
options.append(a1)
random.shuffle(options)
print("Choose the correct answer: {0}").format(options)
return q1, a1, options
#Subclass of Monster class for geometry-related monsters
class GeoMonsterLevel1(Question):
def __init__(self, monster):
Question.__init__(self, monster)
self.name = "GeoMonsterLevel1"
def getType(self):
return(self.__class__)
def makeQ(self):
shapes = ["Square", "Rectangle", "Triangle", "Octagon", "Pentagon", ]
shape = random.sample(shapes, 1)[0]
q1 = ("How many sides does a {0} have?").format(shape)
options = [3,4,8,5]
if shape == "Square" or shape =="Rectangle":
a1 = 4
elif shape == "Triangle":
a1 = 3
elif shape == "Octagon":
a1=8
elif shape =="Pentagon":
a1=5
print("Choose the correct answer: {0}").format(options)
return q1, a1, options
class GeoMonsterLevel2(Question):
def __init__(self, monster):
Question.__init__(self, monster)
self.name = "GeoMonsterLevel2"
def getType(self):
return(self.__class__)
def makeQ(self):
angle = random.randint(0,180)
q1 = ("What kind of angle is a {0} degrees?").format(angle)
options = ["Acute" , "Right" , "Obtuse"]
if (angle < 90):
a1 = "acute"
elif angle == 90:
a1 = "right"
elif angle > 90:
a1 = "obtuse"
print("Choose the correct answer: {0}").format(options)
return q1,a1,options
class GeoMonsterLevel3(Question):
def makeQ(self):
angles = [30, 45, 60, 90, 120, 180]
angle = random.choice(angles)
q1 = ("How many {0} degree angles are needed to make a full circle?").format(angle)
a1 = 360 / angle
options = [12, 8, 6, 4, 3, 2]
random.shuffle(options)
print("Choose the correct answer: {0}").format(options)
return q1, a1, options
# Subclass of Monster class for geometry-related monsters
class MultiMonsterLevel1(Question):
def __init__(self, monster):
Question.__init__(self, monster)
self.name = "MultiMonsterLevel1"
def makeQ(self):
nums1 = [1,2,5,10] #creating array of numbers to multiply
num1 = random.choice(nums1) #choosing random number to multiply
nums2 = [1,2,3,4,5,6,7,8,9,10]
num2 = random.choice(nums2)
q1 = ("What is {0} multiplied by {1}? ").format(num1, num2) #question string
a1 = int( num1 * num2 ) #What is num1 times num2
i = 0
options = []
while (i<4):
options.append(random.randint(1, 100))
i+=1
options.append(a1)
random.shuffle(options)
print("Choose the correct answer: {0}").format(options)
return q1, a1, options
class MultiMonsterLevel2(Question):
def makeQ(self):
nums1 = [2,3,4,5,6,7,8,9,10] #creating array of numbers to multiply
num1 = random.choice(nums1) #choosing random number to multiply
nums2 = [2,3,4,5,6,7,8,9,10]
num2 = random.choice(nums2)
q1 = ("What is {0} multiplied by {1}? ").format(num1, num2) #question string
a1 = int( num1 * num2 ) #What is num1 times num2
i = 0
options = []
while (i<4):
options.append(random.randint(1,100))
i+=1
options.append(a1)
random.shuffle(options)
print("Choose the correct answer: {0}").format(options)
return q1, a1, options
class MultiMonsterLevel3(Question):
def makeQ(self):
nums1 = [6,7,8,9,10] #creating array of numbers to multiply
num1 = random.choice(nums1) #choosing random number to multiply
nums2 = [10,11,12,20,25,50]
num2 = random.choice(nums2)
q1 = ("What is {0} multiplied by {1}? ").format(num1, num2) #question string
a1 = int( num1 * num2 ) #What is num1 times num2
i = 0
options = []
while (i<4):
options.append(random.randint(1,100))
i+=1
options.append(a1)
random.shuffle(options)
print("Choose the correct answer: {0}").format(options)
return q1, a1, options | rdp1070/PokeMath | Question.py | Python | gpl-3.0 | 5,970 | 0.019598 |
import graphene
from graphene_django.types import DjangoObjectType
from django.contrib.auth import (get_user_model, authenticate,
login as auth_login, logout as auth_logout,
update_session_auth_hash)
from django.contrib.auth.forms import (UserCreationForm, AuthenticationForm,
PasswordChangeForm)
from cryptographer.models import Cryptographer
from cryptographer.schema import CryptographerType
from helpers.schema import FormError, list_errors
class LoginUser(graphene.Mutation):
class Arguments:
username = graphene.String()
password = graphene.String()
user = graphene.Field(CryptographerType)
success = graphene.Boolean()
errors = graphene.List(FormError)
def mutate(self, info, username, password):
if info.context.user.is_authenticated:
return LoginUser(
user=None,
success=False,
errors=list_errors({ "__all__": ['Cannot login when already logged in']})
)
form = AuthenticationForm(info.context, { "username": username, "password": password })
if form.is_valid():
success = True
user = form.get_user()
auth_login(info.context, user)
return LoginUser(user=user.cryptographer, success=True, errors=[])
else:
return LoginUser(user=None, success=False, errors=list_errors(form.errors))
class SignupUser(graphene.Mutation):
class Arguments:
username = graphene.String()
password1 = graphene.String()
password2 = graphene.String()
user = graphene.Field(CryptographerType)
success = graphene.Boolean()
errors = graphene.List(FormError)
def mutate(self, info, username, password1, password2):
if info.context.user.is_authenticated:
return SignupUser(
user=None,
success=False,
errors=list_errors({ "__all__": ['Cannot signup when already logged in']})
)
form = UserCreationForm({
"username": username,
"password1": password1,
"password2": password2
})
if form.is_valid():
form.save()
user = authenticate(username=username, password=password1)
# create a Cryptographer and link it to the user
c = Cryptographer(user=user)
c.save()
auth_login(info.context, user)
return SignupUser(user=c, success=True, errors=[])
else:
return SignupUser(user=None, success=False, errors=list_errors(form.errors))
class LogoutUser(graphene.Mutation):
success = graphene.Boolean()
user = graphene.Field(CryptographerType)
def mutate(self, info):
auth_logout(info.context)
return LogoutUser(success=True, user=None)
class ChangePassword(graphene.Mutation):
class Arguments:
old_password = graphene.String()
new_password1 = graphene.String()
new_password2 = graphene.String()
success = graphene.Boolean()
errors = graphene.List(FormError)
def mutate(self, info, old_password, new_password1, new_password2):
form = PasswordChangeForm(info.context.user, data={
"old_password": old_password,
"new_password1": new_password1,
"new_password2": new_password2
})
if form.is_valid():
form.save()
update_session_auth_hash(info.context, form.user)
return ChangePassword(success=True, errors=[])
else:
return ChangePassword(success=False, errors=list_errors(form.errors))
class Mutation(object):
login_user = LoginUser.Field()
signup_user = SignupUser.Field()
logout_user = LogoutUser.Field()
change_password = ChangePassword.Field()
| pshrmn/cryptonite | cryptonite/user_auth/schema.py | Python | mit | 3,906 | 0.00384 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.