code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
---|---|---|---|---|---|
"""
Module for code that should run during Studio startup (deprecated)
"""
import django
from django.conf import settings
# Force settings to run so that the python path is modified
settings.INSTALLED_APPS # pylint: disable=pointless-statement
def run():
"""
Executed during django startup
NOTE: DO **NOT** add additional code to this method or this file! The Platform Team
is moving all startup code to more standard locations using Django best practices.
"""
django.setup()
import mimetypes
for extension, mimetype in settings.EXTRA_MIMETYPES.iteritems():
mimetypes.add_type(mimetype, extension)
| Stanford-Online/edx-platform | cms/startup.py | Python | agpl-3.0 | 650 |
#! /usr/bin/python
# -*- coding: utf-8 -*-
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
# Cosm.com to CSV
# Copyright (C) 2013 by Xose Pérez
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
__app__ = "Cosm.com to CSV"
__version__ = "0.1"
__author__ = "Xose Pérez"
__contact__ = "[email protected]"
__copyright__ = "Copyright (C) 2013 Xose Pérez"
__license__ = 'GPL v3'
import re
import datetime
from libs.services.Cosm import Cosm
import argparse
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Cosm.com to CSV')
parser.add_argument("-k", dest="api_key", help="cosm.com API key", required=True)
parser.add_argument("-f", dest="feed", help="coms.com feed", required=True, type=int)
parser.add_argument("-d", dest="datastream", help="datastream in the feed", required=True)
parser.add_argument("-s", dest="start", help="start datetime (YYYY-MM-DD [HH:MM:SS])", required=True)
parser.add_argument("-e", dest="end", help="end datetime (YYYY-MM-DD [HH:MM:SS])", required=True)
parser.add_argument("--format", dest="format", help="output timestamp format")
options = parser.parse_args()
cosm = Cosm(options.api_key)
start = datetime.datetime(*[int(x) for x in re.findall(r'\d+', options.start)])
end = datetime.datetime(*[int(x) for x in re.findall(r'\d+', options.end)])
print "timestamp,value"
for ts, value in cosm.get(options.feed, options.datastream, start, end):
if options.format:
ts = datetime.datetime(*[int(x) for x in re.findall(r'\d+', ts)])
ts = ts.strftime(options.format)
print "%s,%s" % (ts, value)
| xoseperez/mqtt2cloud | cosm2csv.py | Python | gpl-3.0 | 2,269 |
# -*- coding: utf-8 -*-
import moose
import re
def fixPath(path):
path = re.sub(r'/+', '/', path)
return path
paths = [ '/a'
, '//a'
, '/a/b'
, '/a/b/'
, '//a//b/////'
, '/a/./b'
, '///a/././b'
]
expectedPath = [fixPath(p) for p in paths]
expectedChanges = zip(paths, expectedPath)
for p in expectedPath:
print( 'Accessing %s' % p )
try:
moose.Neutral(p)
except Exception as e:
print( 'Same path access by element' )
moose.element( p )
foundPath = []
for p in moose.wildcardFind('/##'):
if "/a" in p.path:
foundPath.append(p.path)
testFailed = False
for f in foundPath:
f = re.sub(r'\[\d+\]', '', f)
if f not in expectedPath:
testFailed = True
if testFailed:
print("Test failed on paths")
| dharmasam9/moose-core | tests/python/test_accessing_existing_paths.py | Python | gpl-3.0 | 827 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution Addon
# Copyright (C) 2009-2012 IRSID (<http://irsid.ru>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'IRSID Base',
'version': '2.0',
'category': 'Hidden',
'sequence': 1,
'summary': 'IRSID base module',
'description': """
The base module, needed for installation of other IRSID modules.
===================================================
""",
'author': 'IRSID',
'website': 'irsid.ru',
'depends': ['base','mail'],
'data': [
],
'demo': [],
'test': [
],
'installable': True,
'auto_install': False,
'css': [ ],
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| prospwro/odoo | addons/irsid_base/__openerp__.py | Python | agpl-3.0 | 1,538 |
from boardme import db
import datetime
from sqlalchemy.orm import relationship
from werkzeug import generate_password_hash, check_password_hash
class User(db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
first_name = db.Column(db.String(128))
last_name = db.Column(db.String(128))
username = db.Column(db.String(128))
mobile = db.Column(db.String(128))
password = db.Column(db.String(128))
currency_type = db.Column(db.String(128))
wallet = db.Column(db.Float)
created_ts = db.Column(db.DateTime, default=datetime.datetime.now())
last_updated_ts = db.Column(db.DateTime, default=datetime.datetime.now())
history = relationship('TravelHistory', back_populates='user')
def __init__(self, first_name, last_name, username, mobile, password, currency_type, wallet):
self.first_name = first_name
self.last_name = last_name
self.username = username
self.mobile = mobile
self.password = generate_password_hash(password)
self.currency_type = currency_type
self.wallet = wallet
def check_password(self, _password):
return check_password_hash(self.password, _password)
def full_name(self):
return self.first_name + ' ' + self.last_name
def to_dict(self):
return {
'id': self.id,
'firstName': self.first_name,
'lastName': self.last_name,
'fullName': self.first_name + ' ' + self.last_name,
'username': self.username,
'mobile': self.mobile,
'currencyType': self.currency_type,
'wallet': self.wallet,
'createdTS': self.created_ts,
'lastUpdatedTS': self.last_updated_ts,
}
| DextrousInc/board-me-server | wsgi/boardme/models/users.py | Python | mit | 1,768 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# yandex-search documentation build configuration file, created by
# sphinx-quickstart on Tue Jul 9 22:26:36 2013.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import re
# If extensions (or modules to document with autodoc) are in another
# directory, add these directories to sys.path here. If the directory is
# relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# Get the project root dir, which is the parent dir of this
project_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# -- General configuration ---------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'yandex search'
copyright = u'2017, Johannes Ahlmann'
# The version info for the project you're documenting, acts as replacement
# for |version| and |release|, also used in various other places throughout
# the built documents.
#
# The full version, including alpha/beta/rc tags.
release = open(os.path.join(project_root, 'VERSION')).read().strip()
# The short X.Y version.
version = re.findall(r'\d+\.\d+\.\d+', release)[0]
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to
# some non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built
# documents.
#keep_warnings = False
# -- Options for HTML output -------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a
# theme further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as
# html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the
# top of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon
# of the docs. This file should be a Windows icon file (.ico) being
# 16x16 or 32x32 pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets)
# here, relative to this directory. They are copied after the builtin
# static files, so a file named "default.css" will overwrite the builtin
# "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names
# to template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer.
# Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer.
# Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages
# will contain a <link> tag referring to it. The value of this option
# must be the base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'yandex_searchdoc'
# -- Options for LaTeX output ------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index', 'yandex_search.tex',
u'yandex search Documentation',
u'Johannes Ahlmann', 'manual'),
]
# The name of an image file (relative to this directory) to place at
# the top of the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings
# are parts, not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'yandex_search',
u'yandex search Documentation',
[u'Johannes Ahlmann'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ----------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'yandex_search',
u'yandex search Documentation',
u'Johannes Ahlmann',
'yandex-search',
'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| fluquid/yandex-search | docs/conf.py | Python | mit | 8,370 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2009-2014:
# Gabes Jean, [email protected]
# Gerhard Lausser, [email protected]
# Gregory Starck, [email protected]
# Hartmut Goebel, [email protected]
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
from shinken.satellitelink import SatelliteLink, SatelliteLinks
from shinken.property import BoolProp, IntegerProp, StringProp, ListProp
class PollerLink(SatelliteLink):
"""This class is the link between Arbiter and Poller. With it, arbiter
can see if a poller is alive, and can send it new configuration
"""
id = 0
my_type = 'poller'
# To_send: send or not to satellite conf
properties = SatelliteLink.properties.copy()
properties.update({
'poller_name': StringProp(fill_brok=['full_status'], to_send=True),
'port': IntegerProp(default=7771, fill_brok=['full_status']),
'min_workers': IntegerProp(default=0, fill_brok=['full_status'], to_send=True),
'max_workers': IntegerProp(default=30, fill_brok=['full_status'], to_send=True),
'processes_by_worker': IntegerProp(default=256, fill_brok=['full_status'], to_send=True),
'poller_tags': ListProp(default=['None'], to_send=True),
})
def get_name(self):
return getattr(self, 'poller_name', 'UNNAMED-POLLER')
def register_to_my_realm(self):
self.realm.pollers.append(self)
class PollerLinks(SatelliteLinks):
"""Please Add a Docstring to describe the class here"""
name_property = "poller_name"
inner_class = PollerLink
| kaji-project/shinken | shinken/pollerlink.py | Python | agpl-3.0 | 2,235 |
##############################################################################
#
# Copyright (C) 2016 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <[email protected]>
#
# The licence is in the file __manifest__.py
#
##############################################################################
import logging
from odoo import models, fields
logger = logging.getLogger(__name__)
class GmcMessage(models.Model):
""" Add child in messages. """
_inherit = "gmc.message"
child_id = fields.Many2one("compassion.child", "Child", readonly=False)
| CompassionCH/compassion-modules | child_compassion/models/gmc_message.py | Python | agpl-3.0 | 641 |
from prismriver.plugin.common import Plugin
from prismriver.struct import Song
# todo: load lyrics translations
class MusixmatchPlugin(Plugin):
ID = 'musixmatch'
RANK = 6
def __init__(self, config):
super(MusixmatchPlugin, self).__init__('Musixmatch', config)
def search_song(self, artist, title):
to_delete = ['!', '"', '(', ')']
to_replace = [' ', '.', "'", ' + ']
link = 'https://www.musixmatch.com/lyrics/{}/{}'.format(
self.prepare_url_parameter(artist, to_delete=to_delete, to_replace=to_replace),
self.prepare_url_parameter(title, to_delete=to_delete, to_replace=to_replace))
# return 404 if song not found
page = self.download_webpage_text(link)
if page:
soup = self.prepare_soup(page)
title_pane = soup.find('div', {'class': 'mxm-track-title'})
song_artist = title_pane.find('a').text
song_title_tag = title_pane.find('h1', recursive=False)
self.remove_tags_from_block(song_title_tag, ['small'])
song_title = song_title_tag.text
base_lyrics_pane = soup.find('div', {'class': 'mxm-lyrics'})
lyrics = ''
for lyrics_pane in base_lyrics_pane.findAll('p', {'class': 'mxm-lyrics__content'}):
lyrics += (lyrics_pane.text + '\n')
return Song(song_artist, song_title, self.sanitize_lyrics([lyrics]))
| anlar/prismriver | prismriver/plugin/musixmatch.py | Python | mit | 1,445 |
import pydot
class Node:
count = 0
type = 'Node (unspecified)'
shape = 'ellipse'
def __init__(self,children=None):
self.ID = str(Node.count)
Node.count+=1
if not children:
self.children = []
elif hasattr(children,'__len__'):
self.children = children
else:
self.children = [children]
self.next = []
def addNext(self,next):
self.next.append(next)
def nodeTree(self, prefix=''):
result = "%s%s\n" % (prefix, repr(self))
for c in self.children:
if not isinstance(c,Node):
result += "%s Error: Child of type %r: %r\n" % (prefix,type(c),c)
continue
result += c.nodeTree(prefix)
return result
def makegraphicaltree(self, dot=None, edgeLabels=True):
if not dot: dot = pydot.Dot()
dot.add_node(pydot.Node(self.ID,label=repr(self), shape=self.shape))
label = edgeLabels and len(self.children)-1
for i, c in enumerate(self.children):
c.makegraphicaltree(dot, edgeLabels)
edge = pydot.Edge(self.ID,c.ID)
if label:
edge.set_label(str(i))
dot.add_edge(edge)
return dot
def threadTree(self, graph, seen = None, col=0):
colors = ('red', 'green', 'blue', 'yellow', 'magenta', 'cyan')
if not seen: seen = []
if self in seen: return
seen.append(self)
new = not graph.get_node(self.ID)
if new:
graphnode = pydot.Node(self.ID,label=repr(self), shape=self.shape)
graphnode.set_style('dotted')
graph.add_node(graphnode)
label = len(self.next)-1
for i, c in enumerate(self.next):
if not c: return
col = (col + 1) % len(colors)
color = colors[col]
c.threadTree(graph, seen, col)
edge = pydot.Edge(self.ID,c.ID)
edge.set_color(color)
edge.set_arrowsize('.5')
edge.set_constraint('false')
if label:
edge.set_taillabel(str(i))
edge.set_labelfontcolor(color)
graph.add_edge(edge)
return graph
def __str__(self):
return self.nodeTree()
def __repr__(self):
return self.type
class TokenNode(Node):
type = 'token'
def __init__(self, tok):
Node.__init__(self)
self.tok = tok
def __repr__(self):
return repr(self.tok)
class OpNode(Node):
type = 'op'
def __init__(self, op, children):
Node.__init__(self,children)
self.op = op
try:
self.nbargs = len(children)
except AttributeError:
self.nbargs = 1
def __repr__(self):
return "%s (%s)" % (self.op, self.nbargs)
class EntryNode(Node):
type = 'ENTRY'
def __init__(self):
Node.__init__(self, None)
def addToClass(cls):
def decorator(func):
setattr(cls,func.__name__,func)
return func
return decorator
""" ARBOL AST: CLASES """
class Sentencias(Node):
type = 'sentencia'
class Funcion(Node):
type = 'function'
class FuncionNType(Node):
type = 'funcion'
class Argumentos(Node):
type = 'argv'
class Return(Node):
type = 'return'
class VarNode(Node):
type = 'var'
class WriteNode(Node):
type = 'write'
class PromptNode(Node):
type = 'prompt'
class IfNode(Node):
type = 'if'
class ForNode(Node):
type = 'for'
class AssignNode(Node):
type = 'assign'
class CallFunNode(Node):
type = 'callfun'
class IdNode(Node):
type = 'ids'
class ExpresionesNode(Node):
type = 'Expresiones'
class CondicionesNode(Node):
type = 'Condiciones'
class LambdaNode(Node):
type = 'lambda'
| roberseik/Python-Compiler | AST.py | Python | gpl-2.0 | 4,039 |
import numpy as np
from scipy.optimize import linear_sum_assignment
from ...utils.validation import check_consistent_length, check_array
__all__ = ["consensus_score"]
def _check_rows_and_columns(a, b):
"""Unpacks the row and column arrays and checks their shape."""
check_consistent_length(*a)
check_consistent_length(*b)
checks = lambda x: check_array(x, ensure_2d=False)
a_rows, a_cols = map(checks, a)
b_rows, b_cols = map(checks, b)
return a_rows, a_cols, b_rows, b_cols
def _jaccard(a_rows, a_cols, b_rows, b_cols):
"""Jaccard coefficient on the elements of the two biclusters."""
intersection = ((a_rows * b_rows).sum() *
(a_cols * b_cols).sum())
a_size = a_rows.sum() * a_cols.sum()
b_size = b_rows.sum() * b_cols.sum()
return intersection / (a_size + b_size - intersection)
def _pairwise_similarity(a, b, similarity):
"""Computes pairwise similarity matrix.
result[i, j] is the Jaccard coefficient of a's bicluster i and b's
bicluster j.
"""
a_rows, a_cols, b_rows, b_cols = _check_rows_and_columns(a, b)
n_a = a_rows.shape[0]
n_b = b_rows.shape[0]
result = np.array(list(list(similarity(a_rows[i], a_cols[i],
b_rows[j], b_cols[j])
for j in range(n_b))
for i in range(n_a)))
return result
def consensus_score(a, b, similarity="jaccard"):
"""The similarity of two sets of biclusters.
Similarity between individual biclusters is computed. Then the
best matching between sets is found using the Hungarian algorithm.
The final score is the sum of similarities divided by the size of
the larger set.
Read more in the :ref:`User Guide <biclustering>`.
Parameters
----------
a : (rows, columns)
Tuple of row and column indicators for a set of biclusters.
b : (rows, columns)
Another set of biclusters like ``a``.
similarity : string or function, optional, default: "jaccard"
May be the string "jaccard" to use the Jaccard coefficient, or
any function that takes four arguments, each of which is a 1d
indicator vector: (a_rows, a_columns, b_rows, b_columns).
References
----------
* Hochreiter, Bodenhofer, et. al., 2010. `FABIA: factor analysis
for bicluster acquisition
<https://www.ncbi.nlm.nih.gov/pmc/articles/PMC2881408/>`__.
"""
if similarity == "jaccard":
similarity = _jaccard
matrix = _pairwise_similarity(a, b, similarity)
row_indices, col_indices = linear_sum_assignment(1. - matrix)
n_a = len(a[0])
n_b = len(b[0])
return matrix[row_indices, col_indices].sum() / max(n_a, n_b)
| chrsrds/scikit-learn | sklearn/metrics/cluster/bicluster.py | Python | bsd-3-clause | 2,762 |
from hazelcast.serialization.bits import *
from hazelcast.protocol.builtin import FixSizedTypesCodec
from hazelcast.protocol.client_message import OutboundMessage, REQUEST_HEADER_SIZE, create_initial_buffer, RESPONSE_HEADER_SIZE
from hazelcast.protocol.codec.custom.raft_group_id_codec import RaftGroupIdCodec
from hazelcast.protocol.builtin import StringCodec
# hex: 0x070100
_REQUEST_MESSAGE_TYPE = 459008
# hex: 0x070101
_RESPONSE_MESSAGE_TYPE = 459009
_REQUEST_SESSION_ID_OFFSET = REQUEST_HEADER_SIZE
_REQUEST_THREAD_ID_OFFSET = _REQUEST_SESSION_ID_OFFSET + LONG_SIZE_IN_BYTES
_REQUEST_INVOCATION_UID_OFFSET = _REQUEST_THREAD_ID_OFFSET + LONG_SIZE_IN_BYTES
_REQUEST_INITIAL_FRAME_SIZE = _REQUEST_INVOCATION_UID_OFFSET + UUID_SIZE_IN_BYTES
_RESPONSE_RESPONSE_OFFSET = RESPONSE_HEADER_SIZE
def encode_request(group_id, name, session_id, thread_id, invocation_uid):
buf = create_initial_buffer(_REQUEST_INITIAL_FRAME_SIZE, _REQUEST_MESSAGE_TYPE)
FixSizedTypesCodec.encode_long(buf, _REQUEST_SESSION_ID_OFFSET, session_id)
FixSizedTypesCodec.encode_long(buf, _REQUEST_THREAD_ID_OFFSET, thread_id)
FixSizedTypesCodec.encode_uuid(buf, _REQUEST_INVOCATION_UID_OFFSET, invocation_uid)
RaftGroupIdCodec.encode(buf, group_id)
StringCodec.encode(buf, name, True)
return OutboundMessage(buf, True)
def decode_response(msg):
initial_frame = msg.next_frame()
return FixSizedTypesCodec.decode_long(initial_frame.buf, _RESPONSE_RESPONSE_OFFSET)
| hazelcast/hazelcast-python-client | hazelcast/protocol/codec/fenced_lock_lock_codec.py | Python | apache-2.0 | 1,475 |
import logging
from allauth.socialaccount.models import SocialToken
from django.conf import settings
from requests_oauthlib import OAuth2Session
from .models import GithubProject, GithubOrganization, BitbucketProject, BitbucketTeam
from tastyapi import apiv2
log = logging.getLogger(__name__)
def make_github_project(user, org, privacy, repo_json):
log.info('Trying GitHub: %s' % repo_json['full_name'])
if (repo_json['private'] is True and privacy == 'private' or
repo_json['private'] is False and privacy == 'public'):
project, created = GithubProject.objects.get_or_create(
full_name=repo_json['full_name'],
)
if project.organization and project.organization != org:
log.debug('Not importing %s because mismatched orgs' % repo_json['name'])
return None
else:
project.organization = org
project.users.add(user)
project.name = repo_json['name']
project.description = repo_json['description']
project.git_url = repo_json['git_url']
project.ssh_url = repo_json['ssh_url']
project.html_url = repo_json['html_url']
project.json = repo_json
project.save()
return project
else:
log.debug('Not importing %s because mismatched type' % repo_json['name'])
def make_github_organization(user, org_json):
org, created = GithubOrganization.objects.get_or_create(
login=org_json.get('login'),
)
org.html_url = org_json.get('html_url')
org.name = org_json.get('name')
org.email = org_json.get('email')
org.json = org_json
org.users.add(user)
org.save()
return org
def get_token_for_project(project, force_local=False):
if not getattr(settings, 'ALLOW_PRIVATE_REPOS', False):
return None
token = None
try:
if getattr(settings, 'DONT_HIT_DB', True) and not force_local:
token = apiv2.project(project.pk).token().get()['token']
else:
for user in project.users.all():
tokens = SocialToken.objects.filter(account__user__username=user.username, app__provider='github')
if tokens.exists():
token = tokens[0].token
except Exception:
log.error('Failed to get token for user', exc_info=True)
return token
def github_paginate(session, url):
"""
Scans trough all github paginates results and returns the concatenated
list of results.
:param session: requests client instance
:param url: start url to get the data from.
See https://developer.github.com/v3/#pagination
"""
result = []
while url:
r = session.get(url)
result.extend(r.json())
next = r.links.get('next')
if next:
url = next.get('url')
else:
url = None
return result
def import_github(user, sync):
""" Do the actual github import """
repo_type = getattr(settings, 'GITHUB_PRIVACY', 'public')
tokens = SocialToken.objects.filter(
account__user__username=user.username, app__provider='github')
github_connected = False
if tokens.exists():
github_connected = True
if sync:
token = tokens[0]
session = OAuth2Session(
client_id=token.app.client_id,
token={
'access_token': str(token.token),
'token_type': 'bearer'
}
)
# Get user repos
owner_resp = github_paginate(session, 'https://api.github.com/user/repos?per_page=100')
try:
for repo in owner_resp:
make_github_project(user=user, org=None, privacy=repo_type, repo_json=repo)
except TypeError, e:
print e
# Get org repos
try:
resp = session.get('https://api.github.com/user/orgs')
for org_json in resp.json():
org_resp = session.get('https://api.github.com/orgs/%s' % org_json['login'])
org_obj = make_github_organization(user=user, org_json=org_resp.json())
# Add repos
org_repos_resp = github_paginate(session, 'https://api.github.com/orgs/%s/repos?per_page=100' % org_json['login'])
for repo in org_repos_resp:
make_github_project(user=user, org=org_obj, privacy=repo_type, repo_json=repo)
except TypeError, e:
print e
return github_connected
###
### Bitbucket
###
def bitbucket_paginate(session, url):
"""
Scans trough all github paginates results and returns the concatenated
list of results.
:param session: requests client instance
:param url: start url to get the data from.
"""
result = []
while url:
r = session.get(url)
result.extend([r.json()])
next_url = r.json().get('next')
if next_url:
url = next_url
else:
url = None
return result
def make_bitbucket_project(user, org, privacy, repo_json):
log.info('Trying Bitbucket: %s' % repo_json['full_name'])
if (repo_json['is_private'] is True and privacy == 'private' or
repo_json['is_private'] is False and privacy == 'public'):
project, created = BitbucketProject.objects.get_or_create(
full_name=repo_json['full_name'],
)
if project.organization and project.organization != org:
log.debug('Not importing %s because mismatched orgs' % repo_json['name'])
return None
else:
project.organization = org
project.users.add(user)
project.name = repo_json['name']
project.description = repo_json['description']
project.git_url = repo_json['links']['clone'][0]['href']
project.ssh_url = repo_json['links']['clone'][1]['href']
project.html_url = repo_json['links']['html']['href']
project.vcs = repo_json['scm']
project.json = repo_json
project.save()
return project
else:
log.debug('Not importing %s because mismatched type' % repo_json['name'])
def process_bitbucket_json(user, json, repo_type):
try:
for page in json:
for repo in page['values']:
make_bitbucket_project(user=user, org=None, privacy=repo_type, repo_json=repo)
except TypeError, e:
print e
def import_bitbucket(user, sync):
""" Do the actual github import """
repo_type = getattr(settings, 'GITHUB_PRIVACY', 'public')
tokens = SocialToken.objects.filter(
account__user__username=user.username, app__provider='bitbucket')
bitbucket_connected = False
if tokens.exists():
bitbucket_connected = True
if sync:
token = tokens[0]
session = OAuth2Session(
client_id=token.app.client_id,
token={
'access_token': str(token.token),
'token_type': 'bearer'
}
)
# Get user repos
owner_resp = bitbucket_paginate(session, 'https://bitbucket.org/api/2.0/repositories/{owner}'.format(owner=token.account.uid))
process_bitbucket_json(user, owner_resp, repo_type)
# Get org repos
# resp = session.get('https://bitbucket.org/api/1.0/user/privileges/')
# for team in resp.json()['teams'].keys():
# org_resp = bitbucket_paginate(session, 'https://bitbucket.org/api/2.0/teams/{teamname}/repositories' % team)
# process_bitbucket_json(user, org_resp, repo_type)
return bitbucket_connected
| adrianmugnoz/Documentacion-Divulgame | readthedocs/oauth/utils.py | Python | mit | 7,756 |
# coding: utf-8
# Copyright 2014 Globo.com Player authors. All rights reserved.
# Use of this source code is governed by a MIT License
# license that can be found in the LICENSE file.
from collections import namedtuple
import os
import errno
import math
from parser import parse, format_date_time
from mixins import BasePathMixin, GroupedBasePathMixin
class M3U8(object):
'''
Represents a single M3U8 playlist. Should be instantiated with
the content as string.
Parameters:
`content`
the m3u8 content as string
`base_path`
all urls (key and segments url) will be updated with this base_path,
ex.:
base_path = "http://videoserver.com/hls"
/foo/bar/key.bin --> http://videoserver.com/hls/key.bin
http://vid.com/segment1.ts --> http://videoserver.com/hls/segment1.ts
can be passed as parameter or setted as an attribute to ``M3U8`` object.
`base_uri`
uri the playlist comes from. it is propagated to SegmentList and Key
ex.: http://example.com/path/to
Attributes:
`keys`
Returns the list of `Key` objects used to encrypt the segments from m3u8.
It covers the whole list of possible situations when encryption either is
used or not.
1. No encryption.
`keys` list will only contain a `None` element.
2. Encryption enabled for all segments.
`keys` list will contain the key used for the segments.
3. No encryption for first element(s), encryption is applied afterwards
`keys` list will contain `None` and the key used for the rest of segments.
4. Multiple keys used during the m3u8 manifest.
`keys` list will contain the key used for each set of segments.
`segments`
a `SegmentList` object, represents the list of `Segment`s from this playlist
`is_variant`
Returns true if this M3U8 is a variant playlist, with links to
other M3U8s with different bitrates.
If true, `playlists` is a list of the playlists available,
and `iframe_playlists` is a list of the i-frame playlists available.
`is_endlist`
Returns true if EXT-X-ENDLIST tag present in M3U8.
http://tools.ietf.org/html/draft-pantos-http-live-streaming-07#section-3.3.8
`playlists`
If this is a variant playlist (`is_variant` is True), returns a list of
Playlist objects
`iframe_playlists`
If this is a variant playlist (`is_variant` is True), returns a list of
IFramePlaylist objects
`playlist_type`
A lower-case string representing the type of the playlist, which can be
one of VOD (video on demand) or EVENT.
`media`
If this is a variant playlist (`is_variant` is True), returns a list of
Media objects
`target_duration`
Returns the EXT-X-TARGETDURATION as an integer
http://tools.ietf.org/html/draft-pantos-http-live-streaming-07#section-3.3.2
`media_sequence`
Returns the EXT-X-MEDIA-SEQUENCE as an integer
http://tools.ietf.org/html/draft-pantos-http-live-streaming-07#section-3.3.3
`program_date_time`
Returns the EXT-X-PROGRAM-DATE-TIME as a string
http://tools.ietf.org/html/draft-pantos-http-live-streaming-07#section-3.3.5
`version`
Return the EXT-X-VERSION as is
`allow_cache`
Return the EXT-X-ALLOW-CACHE as is
`files`
Returns an iterable with all files from playlist, in order. This includes
segments and key uri, if present.
`base_uri`
It is a property (getter and setter) used by
SegmentList and Key to have absolute URIs.
`is_i_frames_only`
Returns true if EXT-X-I-FRAMES-ONLY tag present in M3U8.
http://tools.ietf.org/html/draft-pantos-http-live-streaming-07#section-3.3.12
`is_independent_segments`
Returns true if EXT-X-INDEPENDENT-SEGMENTS tag present in M3U8.
https://tools.ietf.org/html/draft-pantos-http-live-streaming-13#section-3.4.16
'''
simple_attributes = (
# obj attribute # parser attribute
('is_variant', 'is_variant'),
('is_endlist', 'is_endlist'),
('is_i_frames_only', 'is_i_frames_only'),
('target_duration', 'targetduration'),
('media_sequence', 'media_sequence'),
('program_date_time', 'program_date_time'),
('is_independent_segments', 'is_independent_segments'),
('version', 'version'),
('allow_cache', 'allow_cache'),
('playlist_type', 'playlist_type')
)
def __init__(self, content=None, base_path=None, base_uri=None, strict=False):
if content is not None:
self.data = parse(content, strict)
else:
self.data = {}
self._base_uri = base_uri
if self._base_uri:
if not self._base_uri.endswith('/'):
self._base_uri += '/'
self._initialize_attributes()
self.base_path = base_path
def _initialize_attributes(self):
self.keys = [ Key(base_uri=self.base_uri, **params) if params else None
for params in self.data.get('keys', []) ]
self.segments = SegmentList([ Segment(base_uri=self.base_uri, keyobject=find_key(segment.get('key', {}), self.keys), **segment)
for segment in self.data.get('segments', []) ])
#self.keys = get_uniques([ segment.key for segment in self.segments ])
for attr, param in self.simple_attributes:
setattr(self, attr, self.data.get(param))
self.files = []
for key in self.keys:
# Avoid None key, it could be the first one, don't repeat them
if key and key.uri not in self.files:
self.files.append(key.uri)
self.files.extend(self.segments.uri)
self.media = MediaList([ Media(base_uri=self.base_uri, **media)
for media in self.data.get('media', []) ])
self.playlists = PlaylistList([ Playlist(base_uri=self.base_uri, media=self.media, **playlist)
for playlist in self.data.get('playlists', []) ])
self.iframe_playlists = PlaylistList()
for ifr_pl in self.data.get('iframe_playlists', []):
self.iframe_playlists.append(IFramePlaylist(base_uri=self.base_uri,
uri=ifr_pl['uri'],
iframe_stream_info=ifr_pl['iframe_stream_info'])
)
self.segment_map_uri = self.data.get('segment_map_uri')
def __unicode__(self):
return self.dumps()
@property
def base_uri(self):
return self._base_uri
@base_uri.setter
def base_uri(self, new_base_uri):
self._base_uri = new_base_uri
self.media.base_uri = new_base_uri
self.playlists.base_uri = new_base_uri
self.segments.base_uri = new_base_uri
for key in self.keys:
if key:
key.base_uri = new_base_uri
@property
def base_path(self):
return self._base_path
@base_path.setter
def base_path(self, newbase_path):
self._base_path = newbase_path
self._update_base_path()
def _update_base_path(self):
if self._base_path is None:
return
for key in self.keys:
if key:
key.base_path = self._base_path
self.media.base_path = self._base_path
self.segments.base_path = self._base_path
self.playlists.base_path = self._base_path
def add_playlist(self, playlist):
self.is_variant = True
self.playlists.append(playlist)
def add_iframe_playlist(self, iframe_playlist):
if iframe_playlist is not None:
self.is_variant = True
self.iframe_playlists.append(iframe_playlist)
def add_media(self, media):
self.media.append(media)
def add_segment(self, segment):
self.segments.append(segment)
def dumps(self):
'''
Returns the current m3u8 as a string.
You could also use unicode(<this obj>) or str(<this obj>)
'''
output = ['#EXTM3U']
if self.is_independent_segments:
output.append('#EXT-X-INDEPENDENT-SEGMENTS')
if self.media_sequence:
output.append('#EXT-X-MEDIA-SEQUENCE:' + str(self.media_sequence))
if self.allow_cache:
output.append('#EXT-X-ALLOW-CACHE:' + self.allow_cache.upper())
if self.version:
output.append('#EXT-X-VERSION:' + self.version)
if self.target_duration:
output.append('#EXT-X-TARGETDURATION:' +
int_or_float_to_string(self.target_duration))
if self.program_date_time is not None:
output.append('#EXT-X-PROGRAM-DATE-TIME:' + format_date_time(self.program_date_time))
#output.append('#EXT-X-PROGRAM-DATE-TIME:' + self.program_date_time)
if not (self.playlist_type is None or self.playlist_type == ''):
output.append('#EXT-X-PLAYLIST-TYPE:%s' % str(self.playlist_type).upper())
if self.is_i_frames_only:
output.append('#EXT-X-I-FRAMES-ONLY')
if self.is_variant:
if self.media:
output.append(str(self.media))
output.append(str(self.playlists))
if self.iframe_playlists:
output.append(str(self.iframe_playlists))
if self.segments:
output.append(str(self.segments))
if self.is_endlist:
output.append('#EXT-X-ENDLIST')
return '\n'.join(output)
def dump(self, filename):
'''
Saves the current m3u8 to ``filename``
'''
self._create_sub_directories(filename)
with open(filename, 'w') as fileobj:
fileobj.write(self.dumps())
def _create_sub_directories(self, filename):
basename = os.path.dirname(filename)
try:
os.makedirs(basename)
except OSError as error:
if error.errno != errno.EEXIST:
raise
class Segment(BasePathMixin):
'''
A video segment from a M3U8 playlist
`uri`
a string with the segment uri
`title`
title attribute from EXTINF parameter
`program_date_time`
Returns the EXT-X-PROGRAM-DATE-TIME as a datetime
http://tools.ietf.org/html/draft-pantos-http-live-streaming-07#section-3.3.5
`discontinuity`
Returns a boolean indicating if a EXT-X-DISCONTINUITY tag exists
http://tools.ietf.org/html/draft-pantos-http-live-streaming-13#section-3.4.11
`cue_out`
Returns a boolean indicating if a EXT-X-CUE-OUT-CONT tag exists
`scte35`
Base64 encoded SCTE35 metadata if available
`scte35_duration`
Planned SCTE35 duration
`duration`
duration attribute from EXTINF parameter
`base_uri`
uri the key comes from in URI hierarchy. ex.: http://example.com/path/to
`byterange`
byterange attribute from EXT-X-BYTERANGE parameter
`key`
Key used to encrypt the segment (EXT-X-KEY)
'''
def __init__(self, uri, base_uri, program_date_time=None, duration=None,
title=None, byterange=None, cue_out=False, discontinuity=False, key=None,
scte35=None, scte35_duration=None, keyobject=None):
self.uri = uri
self.duration = duration
self.title = title
self.base_uri = base_uri
self.byterange = byterange
self.program_date_time = program_date_time
self.discontinuity = discontinuity
self.cue_out = cue_out
self.scte35 = scte35
self.scte35_duration = scte35_duration
self.key = keyobject
# Key(base_uri=base_uri, **key) if key else None
def dumps(self, last_segment):
output = []
if last_segment and self.key != last_segment.key:
output.append(str(self.key))
output.append('\n')
else:
# The key must be checked anyway now for the first segment
if self.key and last_segment is None:
output.append(str(self.key))
output.append('\n')
if self.discontinuity:
output.append('#EXT-X-DISCONTINUITY\n')
if self.program_date_time:
# output.append('#EXT-X-PROGRAM-DATE-TIME:%s\n' % format_date_time(self.program_date_time))
output.append('#EXT-X-PROGRAM-DATE-TIME:%s\n' % self.program_date_time)
if self.cue_out:
output.append('#EXT-X-CUE-OUT-CONT\n')
output.append('#EXTINF:%s,' % int_or_float_to_string(self.duration))
if self.title:
output.append(quoted(self.title))
output.append('\n')
if self.byterange:
output.append('#EXT-X-BYTERANGE:%s\n' % self.byterange)
output.append(self.uri)
return ''.join(output)
def __str__(self):
return self.dumps(None)
class SegmentList(list, GroupedBasePathMixin):
def __str__(self):
output = []
last_segment = None
for segment in self:
output.append(segment.dumps(last_segment))
last_segment = segment
return '\n'.join(output)
@property
def uri(self):
return [seg.uri for seg in self]
def by_key(self, key):
return [ segment for segment in self if segment.key == key ]
class Key(BasePathMixin):
'''
Key used to encrypt the segments in a m3u8 playlist (EXT-X-KEY)
`method`
is a string. ex.: "AES-128"
`uri`
is a string. ex:: "https://priv.example.com/key.php?r=52"
`base_uri`
uri the key comes from in URI hierarchy. ex.: http://example.com/path/to
`iv`
initialization vector. a string representing a hexadecimal number. ex.: 0X12A
'''
def __init__(self, method, base_uri, uri=None, iv=None, keyformat=None, keyformatversions=None):
self.method = method
self.uri = uri
self.iv = iv
self.keyformat = keyformat
self.keyformatversions = keyformatversions
self.base_uri = base_uri
def __str__(self):
output = [
'METHOD=%s' % self.method,
]
if self.uri:
output.append('URI="%s"' % self.uri)
if self.iv:
output.append('IV=%s' % self.iv)
if self.keyformat:
output.append('KEYFORMAT="%s"' % self.keyformat)
if self.keyformatversions:
output.append('KEYFORMATVERSIONS="%s"' % self.keyformatversions)
return '#EXT-X-KEY:' + ','.join(output)
def __eq__(self, other):
if not other:
return False
return self.method == other.method and \
self.uri == other.uri and \
self.iv == other.iv and \
self.base_uri == other.base_uri and \
self.keyformat == other.keyformat and \
self.keyformatversions == other.keyformatversions
def __ne__(self, other):
return not self.__eq__(other)
class Playlist(BasePathMixin):
'''
Playlist object representing a link to a variant M3U8 with a specific bitrate.
Attributes:
`stream_info` is a named tuple containing the attributes: `program_id`,
`bandwidth`, `average_bandwidth`, `resolution`, `codecs` and `resolution`
which is a a tuple (w, h) of integers
`media` is a list of related Media entries.
More info: http://tools.ietf.org/html/draft-pantos-http-live-streaming-07#section-3.3.10
'''
def __init__(self, uri, stream_info, media, base_uri):
self.uri = uri
self.base_uri = base_uri
resolution = stream_info.get('resolution')
if resolution != None:
resolution = resolution.strip('"')
values = resolution.split('x')
resolution_pair = (int(values[0]), int(values[1]))
else:
resolution_pair = None
self.stream_info = StreamInfo(
bandwidth=stream_info['bandwidth'],
average_bandwidth=stream_info.get('average_bandwidth'),
program_id=stream_info.get('program_id'),
resolution=resolution_pair,
codecs=stream_info.get('codecs')
)
self.media = []
for media_type in ('audio', 'video', 'subtitles'):
group_id = stream_info.get(media_type)
if not group_id:
continue
self.media += filter(lambda m: m.group_id == group_id, media)
def __str__(self):
stream_inf = []
if self.stream_info.program_id:
stream_inf.append('PROGRAM-ID=%d' % self.stream_info.program_id)
if self.stream_info.bandwidth:
stream_inf.append('BANDWIDTH=%d' % self.stream_info.bandwidth)
if self.stream_info.average_bandwidth:
stream_inf.append('AVERAGE-BANDWIDTH=%d' %
self.stream_info.average_bandwidth)
if self.stream_info.resolution:
res = str(self.stream_info.resolution[
0]) + 'x' + str(self.stream_info.resolution[1])
stream_inf.append('RESOLUTION=' + res)
if self.stream_info.codecs:
stream_inf.append('CODECS=' + quoted(self.stream_info.codecs))
media_types = []
for media in self.media:
if media.type in media_types:
continue
else:
media_types += [media.type]
media_type = media.type.upper()
stream_inf.append('%s="%s"' % (media_type, media.group_id))
return '#EXT-X-STREAM-INF:' + ','.join(stream_inf) + '\n' + self.uri
class IFramePlaylist(BasePathMixin):
'''
IFramePlaylist object representing a link to a
variant M3U8 i-frame playlist with a specific bitrate.
Attributes:
`iframe_stream_info` is a named tuple containing the attributes:
`program_id`, `bandwidth`, `codecs` and `resolution` which
is a tuple (w, h) of integers
More info: http://tools.ietf.org/html/draft-pantos-http-live-streaming-07#section-3.3.13
'''
def __init__(self, base_uri, uri, iframe_stream_info):
self.uri = uri
self.base_uri = base_uri
resolution = iframe_stream_info.get('resolution')
if resolution is not None:
values = resolution.split('x')
resolution_pair = (int(values[0]), int(values[1]))
else:
resolution_pair = None
self.iframe_stream_info = StreamInfo(
bandwidth=iframe_stream_info.get('bandwidth'),
average_bandwidth=None,
program_id=iframe_stream_info.get('program_id'),
resolution=resolution_pair,
codecs=iframe_stream_info.get('codecs')
)
def __str__(self):
iframe_stream_inf = []
if self.iframe_stream_info.program_id:
iframe_stream_inf.append('PROGRAM-ID=%d' %
self.iframe_stream_info.program_id)
if self.iframe_stream_info.bandwidth:
iframe_stream_inf.append('BANDWIDTH=%d' %
self.iframe_stream_info.bandwidth)
if self.iframe_stream_info.resolution:
res = (str(self.iframe_stream_info.resolution[0]) + 'x' +
str(self.iframe_stream_info.resolution[1]))
iframe_stream_inf.append('RESOLUTION=' + res)
if self.iframe_stream_info.codecs:
iframe_stream_inf.append('CODECS=' +
quoted(self.iframe_stream_info.codecs))
if self.uri:
iframe_stream_inf.append('URI=' + quoted(self.uri))
return '#EXT-X-I-FRAME-STREAM-INF:' + ','.join(iframe_stream_inf)
StreamInfo = namedtuple(
'StreamInfo',
['bandwidth', 'average_bandwidth', 'program_id', 'resolution', 'codecs']
)
class Media(BasePathMixin):
'''
A media object from a M3U8 playlist
https://tools.ietf.org/html/draft-pantos-http-live-streaming-16#section-4.3.4.1
`uri`
a string with the media uri
`type`
`group_id`
`language`
`assoc-language`
`name`
`default`
`autoselect`
`forced`
`instream_id`
`characteristics`
attributes in the EXT-MEDIA tag
`base_uri`
uri the media comes from in URI hierarchy. ex.: http://example.com/path/to
'''
def __init__(self, uri=None, type=None, group_id=None, language=None,
name=None, default=None, autoselect=None, forced=None,
characteristics=None, assoc_language=None,
instream_id=None, base_uri=None, **extras):
self.base_uri = base_uri
self.uri = uri
self.type = type
self.group_id = group_id
self.language = language
self.name = name
self.default = default
self.autoselect = autoselect
self.forced = forced
self.assoc_language = assoc_language
self.instream_id = instream_id
self.characteristics = characteristics
self.extras = extras
def dumps(self):
media_out = []
if self.uri:
media_out.append('URI=' + quoted(self.uri))
if self.type:
media_out.append('TYPE=' + self.type)
if self.group_id:
media_out.append('GROUP-ID=' + quoted(self.group_id))
if self.language:
media_out.append('LANGUAGE=' + quoted(self.language))
if self.assoc_language:
media_out.append('ASSOC-LANGUAGE=' + quoted(self.assoc_language))
if self.name:
media_out.append('NAME=' + quoted(self.name))
if self.default:
media_out.append('DEFAULT=' + self.default)
if self.autoselect:
media_out.append('AUTOSELECT=' + self.autoselect)
if self.forced:
media_out.append('FORCED=' + self.forced)
if self.instream_id:
media_out.append('INSTREAM-ID=' + self.instream_id)
if self.characteristics:
media_out.append('CHARACTERISTICS=' + quoted(self.characteristics))
return ('#EXT-X-MEDIA:' + ','.join(media_out))
def __str__(self):
return self.dumps()
class MediaList(list, GroupedBasePathMixin):
def __str__(self):
output = [str(playlist) for playlist in self]
return '\n'.join(output)
@property
def uri(self):
return [media.uri for media in self]
class PlaylistList(list, GroupedBasePathMixin):
def __str__(self):
output = [str(playlist) for playlist in self]
return '\n'.join(output)
def find_key(keydata, keylist):
if not keydata:
return None
for key in keylist:
if key:
# Check the intersection of keys and values
if keydata.get('uri') == key.uri and \
keydata.get('method', 'NONE') == key.method and \
keydata.get('iv') == key.iv:
return key
raise KeyError("No key found for key data")
def denormalize_attribute(attribute):
return attribute.replace('_', '-').upper()
def quoted(string):
return '"%s"' % string
def int_or_float_to_string(number):
return str(int(number)) if number == math.floor(number) else str(number)
| olavopeixoto/plugin.video.brplay | resources/lib/modules/m3u8/model.py | Python | gpl-3.0 | 23,467 |
# LIBTBX_PRE_DISPATCHER_INCLUDE_SH export PHENIX_GUI_ENVIRONMENT=1
# DIALS_ENABLE_COMMAND_LINE_COMPLETION
# LIBTBX_SET_DISPATCHER_NAME dials.reciprocal_lattice_viewer
# LIBTBX_SET_DISPATCHER_NAME dials.rlv
from __future__ import annotations
import copy
import os
import wxtbx.app
from scitbx.array_family import flex
import dials.util.log
from dials.util.options import ArgumentParser, reflections_and_experiments_from_files
from dials.util.reciprocal_lattice.viewer import ReciprocalLatticeViewer, phil_scope
help_message = """
Visualise the strong spots from spotfinding in reciprocal space.
Examples::
dials.reciprocal_lattice_viewer imported.expt strong.refl
dials.reciprocal_lattice_viewer indexed.expt indexed.refl
"""
@dials.util.show_mail_handle_errors()
def run(args=None):
dials.util.log.print_banner()
usage = "dials.reciprocal_lattice_viewer [options] models.expt observations.refl"
parser = ArgumentParser(
usage=usage,
phil=phil_scope,
read_experiments=True,
read_reflections=True,
check_format=False,
epilog=help_message,
)
params, options = parser.parse_args(args, show_diff_phil=True)
reflections, experiments = reflections_and_experiments_from_files(
params.input.reflections, params.input.experiments
)
if len(experiments) == 0 or len(reflections) == 0:
parser.print_help()
exit(0)
if len(reflections) > 1:
assert len(reflections) == len(experiments)
for i in range(len(reflections)):
reflections[i]["imageset_id"] = flex.int(len(reflections[i]), i)
if i > 0:
reflections[0].extend(reflections[i])
elif "imageset_id" not in reflections[0]:
reflections[0]["imageset_id"] = reflections[0]["id"]
reflections = reflections[0]
a = wxtbx.app.CCTBXApp(0)
a.settings = params
f = ReciprocalLatticeViewer(
None,
-1,
os.path.realpath(params.input.reflections[0].filename),
size=(1024, 768),
settings=copy.deepcopy(params),
)
f.load_models(experiments, reflections)
f.Show()
a.SetTopWindow(f)
a.MainLoop()
if __name__ == "__main__":
run()
| dials/dials | command_line/reciprocal_lattice_viewer.py | Python | bsd-3-clause | 2,225 |
#!/usr/bin/python
# Copyright 2017 Google Inc. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
import yaml
import subprocess
import sys
import tempfile
RUNTIME_BUCKET = 'runtime-builders'
RUNTIME_BUCKET_PREFIX = 'gs://{0}/'.format(RUNTIME_BUCKET)
MANIFEST_FILE = RUNTIME_BUCKET_PREFIX + 'runtimes.yaml'
SCHEMA_VERSION = 1
def copy_to_gcs(file_path, gcs_path):
command = ['gsutil', 'cp', file_path, gcs_path]
try:
output = subprocess.check_output(command)
logging.debug(output)
except subprocess.CalledProcessError as cpe:
logging.error('Error encountered when writing to GCS!', cpe)
except Exception as e:
logging.error('Fatal error encountered when shelling command {0}'
.format(command))
logging.error(e)
def write_to_gcs(gcs_path, file_contents):
try:
logging.info(gcs_path)
fd, f_name = tempfile.mkstemp(text=True)
os.write(fd, file_contents)
copy_to_gcs(f_name, gcs_path)
finally:
os.remove(f_name)
def get_file_from_gcs(gcs_file, temp_file):
command = ['gsutil', 'cp', gcs_file, temp_file]
try:
subprocess.check_output(command, stderr=subprocess.STDOUT)
return True
except subprocess.CalledProcessError as e:
logging.error('Error when retrieving file from GCS! {0}'
.format(e.output))
return False
def verify_manifest(manifest):
"""Verify that the provided runtime manifest is valid before publishing.
Aliases are provided for runtime 'names' that can be included in users'
application configuration files: this method ensures that all the aliases
can resolve to actual builder files.
All builders and aliases are turned into nodes in a graph, which is then
traversed to be sure that all nodes lead down to a builder node.
Example formatting of the manifest, showing both an 'alias' and
an actual builder file:
runtimes:
java:
target:
runtime: java-openjdk
java-openjdk:
target:
file: gs://runtimes/java-openjdk-1234.yaml
deprecation:
message: "openjdk is deprecated."
"""
node_graph = _build_manifest_graph(manifest)
_verify_manifest_graph(node_graph)
def _verify_manifest_graph(node_graph):
for _, node in node_graph.items():
seen = set()
child = node
while True:
seen.add(child)
if not child.child:
break
elif child.child not in node_graph.keys():
logging.error('Non-existent alias provided for {0}: {1}'
.format(child.name, child.child))
sys.exit(1)
child = node_graph[child.child]
if child in seen:
logging.error('Circular dependency found in manifest! '
'Check node {0}'.format(child))
sys.exit(1)
if not child.isBuilder:
logging.error('No terminating builder for alias {0}'
.format(node.name))
sys.exit(1)
def _build_manifest_graph(manifest):
try:
node_graph = {}
for key, val in manifest.get('runtimes').iteritems():
target = val.get('target', {})
if not target:
if 'deprecation' not in val:
logging.error('No target or deprecation specified for '
'runtime: {0}'.format(key))
sys.exit(1)
continue
child = None
isBuilder = 'file' in target.keys()
if not isBuilder:
child = target['runtime']
node = node_graph.get(key, {})
if not node:
node_graph[key] = Node(key, isBuilder, child)
return node_graph
except (KeyError, AttributeError) as ke:
logging.error('Error encountered when verifying manifest:', ke)
sys.exit(1)
def load_manifest_file():
try:
_, tmp = tempfile.mkstemp(text=True)
command = ['gsutil', 'cp', MANIFEST_FILE, tmp]
subprocess.check_output(command, stderr=subprocess.STDOUT)
with open(tmp) as f:
return yaml.load(f)
except subprocess.CalledProcessError:
logging.info('Manifest file not found in GCS: creating new one.')
return {'schema_version': SCHEMA_VERSION}
finally:
os.remove(tmp)
class Node:
def __init__(self, name, isBuilder, child):
self.name = name
self.isBuilder = isBuilder
self.child = child
def __repr__(self):
return '{0}: {1}|{2}'.format(self.name, self.isBuilder, self.child)
| huyhg/runtimes-common | runtime_builders/builder_util.py | Python | apache-2.0 | 5,271 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# complexity documentation build configuration file, created by
# sphinx-quickstart on Tue Jul 9 22:26:36 2013.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another
# directory, add these directories to sys.path here. If the directory is
# relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# Get the project root dir, which is the parent dir of this
cwd = os.getcwd()
project_root = os.path.dirname(cwd)
# Insert the project root dir as the first element in the PYTHONPATH.
# This lets us ensure that the source package is imported, and that its
# version is used.
sys.path.insert(0, project_root)
import mca
# -- General configuration ---------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'mca'
copyright = u'2014, Emre Safak'
# The version info for the project you're documenting, acts as replacement
# for |version| and |release|, also used in various other places throughout
# the built documents.
#
# The short X.Y version.
version = mca.__version__
# The full version, including alpha/beta/rc tags.
release = mca.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to
# some non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built
# documents.
#keep_warnings = False
# -- Options for HTML output -------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a
# theme further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as
# html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the
# top of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon
# of the docs. This file should be a Windows icon file (.ico) being
# 16x16 or 32x32 pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets)
# here, relative to this directory. They are copied after the builtin
# static files, so a file named "default.css" will overwrite the builtin
# "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names
# to template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer.
# Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer.
# Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages
# will contain a <link> tag referring to it. The value of this option
# must be the base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'mcadoc'
# -- Options for LaTeX output ------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index', 'mca.tex',
u'mca Documentation',
u'Emre Safak', 'manual'),
]
# The name of an image file (relative to this directory) to place at
# the top of the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings
# are parts, not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'mca',
u'mca Documentation',
[u'Emre Safak'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ----------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'mca',
u'mca Documentation',
u'Emre Safak',
'mca',
'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False | ssujit/mca | docs/conf.py | Python | bsd-3-clause | 8,333 |
from __future__ import absolute_import
from django.utils import timezone
from rest_framework.response import Response
from sentry.api.base import Endpoint
from sentry.api.permissions import assert_perm
from sentry.db.models import create_or_update
from sentry.models import Project, Group, GroupSeen
from sentry.utils.functional import extract_lazy_object
class GroupMarkSeenEndpoint(Endpoint):
def post(self, request, group_id):
group = Group.objects.get(
id=group_id,
)
assert_perm(group, request.user, request.auth)
if group.project not in Project.objects.get_for_user(
team=group.project.team, user=request.user):
return Response(status=400)
instance, created = create_or_update(
GroupSeen,
group=group,
user=extract_lazy_object(request.user),
project=group.project,
defaults={
'last_seen': timezone.now(),
}
)
if created:
return Response(status=201)
return Response(status=204)
| camilonova/sentry | src/sentry/api/endpoints/group_markseen.py | Python | bsd-3-clause | 1,098 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# operations.py
#
# Copyright 2016 Bruno S <[email protected]>
#
# This file is part of ProperImage (https://github.com/toros-astro/ProperImage)
# License: BSD-3-Clause
# Full Text: https://github.com/toros-astro/ProperImage/blob/master/LICENSE.txt
#
"""operations module from ProperImage,
for coadding and subtracting astronomical images.
Written by Bruno SANCHEZ
PhD of Astromoy - UNC
[email protected]
Instituto de Astronomia Teorica y Experimental (IATE) UNC
Cordoba - Argentina
Of 301
"""
import logging
import time
import warnings
import sep
import numpy as np
import astroalign as aa
from astropy.stats import sigma_clipped_stats
from scipy import optimize
from scipy.ndimage import center_of_mass
from scipy.ndimage.fourier import fourier_shift
from multiprocessing import Process, Queue
from . import utils as u
from .single_image import SingleImage as si
try:
import cPickle as pickle # noqa
except ImportError:
import pickle
try:
import pyfftw
_fftwn = pyfftw.interfaces.numpy_fft.fftn # noqa
_ifftwn = pyfftw.interfaces.numpy_fft.ifftn # noqa
except ImportError:
_fftwn = np.fft.fft2
_ifftwn = np.fft.ifft2
aa.PIXEL_TOL = 0.5
eps = np.finfo(np.float64).eps
def subtract(
ref,
new,
align=False,
inf_loss=0.25,
smooth_psf=False,
beta=True,
shift=True,
iterative=False,
fitted_psf=True,
):
"""
Function that takes a list of SingleImage instances
and performs a stacking using properimage R estimator
Parameters:
-----------
align : bool
Whether to align the images before subtracting, default to False
inf_loss : float
Value of information loss in PSF estimation, lower limit is 0,
upper is 1. Only valid if fitted_psf=False. Default is 0.25
smooth_psf : bool
Whether to smooth the PSF, using a noise reduction technique.
Default to False.
beta : bool
Specify if using the relative flux scale estimation.
Default to True.
shift : bool
Whether to include a shift parameter in the iterative
methodology, in order to correct for misalignments.
Default to True.
iterative : bool
Specify if an iterative estimation of the subtraction relative
flux scale must be used. Default to False.
fitted_psf : bool
Whether to use a Gaussian fitted PSF. Overrides the use of
auto-psf determination. Default to True.
Returns:
--------
D : np.ndarray(n, m) of float
Subtracion image, Zackay's decorrelated D.
P : np.ndarray(n, m) of float
Subtracion image PSF. This is a full PSF image, with a size equal to D
S_corr : np.ndarray of float
Subtracion image S, Zackay's cross-correlated D x P
mix_mask : np.ndarray of bool
Mask of bad pixels for subtracion image, with True marking bad pixels
"""
logger = logging.getLogger()
if fitted_psf:
from .single_image import SingleImageGaussPSF as SI
logger.info("Using single psf, gaussian modeled")
else:
from .single_image import SingleImage as SI
if not isinstance(ref, SI):
try:
ref = SI(ref, smooth_psf=smooth_psf)
except: # noqa
try:
ref = SI(ref.data, smooth_psf=smooth_psf)
except: # noqa
raise
if not isinstance(new, SI):
try:
new = SI(new, smooth_psf=smooth_psf)
except: # noqa
try:
new = SI(new.data, smooth_psf=smooth_psf)
except: # noqa
raise
if align:
registrd, registrd_mask = aa.register(new.data, ref.data)
new._clean()
# should it be new = type(new)( ?
new = SI(
registrd[: ref.data.shape[0], : ref.data.shape[1]],
mask=registrd_mask[: ref.data.shape[0], : ref.data.shape[1]],
borders=False,
smooth_psf=smooth_psf,
)
# new.data = registered
# new.data.mask = registered.mask
# make sure that the alignement has delivered arrays of size
if new.data.data.shape != ref.data.data.shape:
raise ValueError("N and R arrays are of different size")
t0 = time.time()
mix_mask = np.ma.mask_or(new.data.mask, ref.data.mask)
zps, meanmags = u.transparency([ref, new])
ref.zp = zps[0]
new.zp = zps[1]
n_zp = new.zp
r_zp = ref.zp
a_ref, psf_ref = ref.get_variable_psf(inf_loss)
a_new, psf_new = new.get_variable_psf(inf_loss)
if fitted_psf:
# I already know that a_ref and a_new are None, both of them
# And each psf is a list, first element a render,
# second element a model
p_r = psf_ref[1]
p_n = psf_new[1]
p_r.x_mean = ref.data.data.shape[0] / 2.0
p_r.y_mean = ref.data.data.shape[1] / 2.0
p_n.x_mean = new.data.data.shape[0] / 2.0
p_n.y_mean = new.data.data.shape[1] / 2.0
p_r.bounding_box = None
p_n.bounding_box = None
p_n = p_n.render(np.zeros(new.data.data.shape))
p_r = p_r.render(np.zeros(ref.data.data.shape))
dx_ref, dy_ref = center_of_mass(p_r) # [0])
dx_new, dy_new = center_of_mass(p_n) # [0])
else:
p_r = psf_ref[0]
p_n = psf_new[0]
dx_ref, dy_ref = center_of_mass(p_r) # [0])
dx_new, dy_new = center_of_mass(p_n) # [0])
if dx_new < 0.0 or dy_new < 0.0:
raise ValueError("Imposible to acquire center of PSF inside stamp")
psf_ref_hat = _fftwn(p_r, s=ref.data.shape, norm="ortho")
psf_new_hat = _fftwn(p_n, s=new.data.shape, norm="ortho")
psf_ref_hat[np.where(psf_ref_hat.real == 0)] = eps
psf_new_hat[np.where(psf_new_hat.real == 0)] = eps
psf_ref_hat_conj = psf_ref_hat.conj()
psf_new_hat_conj = psf_new_hat.conj()
D_hat_r = fourier_shift(psf_new_hat * ref.interped_hat, (-dx_new, -dy_new))
D_hat_n = fourier_shift(psf_ref_hat * new.interped_hat, (-dx_ref, -dy_ref))
norm_b = ref.var ** 2 * psf_new_hat * psf_new_hat_conj
norm_a = new.var ** 2 * psf_ref_hat * psf_ref_hat_conj
new_back = sep.Background(new.interped).back()
ref_back = sep.Background(ref.interped).back()
gamma = new_back - ref_back
b = n_zp / r_zp
norm = np.sqrt(norm_a + norm_b * b ** 2)
if beta:
if shift: # beta==True & shift==True
def cost(vec):
b, dx, dy = vec
gammap = gamma / np.sqrt(new.var ** 2 + b ** 2 * ref.var ** 2)
norm = np.sqrt(norm_a + norm_b * b ** 2)
dhn = D_hat_n / norm
dhr = D_hat_r / norm
b_n = (
_ifftwn(dhn, norm="ortho")
- _ifftwn(fourier_shift(dhr, (dx, dy)), norm="ortho") * b
- np.roll(gammap, (int(round(dx)), int(round(dy))))
)
border = 100
cost = np.ma.MaskedArray(b_n.real, mask=mix_mask, fill_value=0)
cost = cost[border:-border, border:-border]
cost = np.sum(np.abs(cost / (cost.shape[0] * cost.shape[1])))
return cost
ti = time.time()
vec0 = [b, 0.0, 0.0]
bounds = ([0.1, -0.9, -0.9], [10.0, 0.9, 0.9])
solv_beta = optimize.least_squares(
cost,
vec0,
xtol=1e-5,
jac="3-point",
method="trf",
bounds=bounds,
)
tf = time.time()
if solv_beta.success:
logger.info(("Found that beta = {}".format(solv_beta.x)))
logger.info(("Took only {} awesome seconds".format(tf - ti)))
logger.info(
("The solution was with cost {}".format(solv_beta.cost))
)
b, dx, dy = solv_beta.x
else:
logger.info("Least squares could not find our beta :(")
logger.info("Beta is overriden to be the zp ratio again")
b = n_zp / r_zp
dx = 0.0
dy = 0.0
elif iterative: # beta==True & shift==False & iterative==True
bi = b
def F(b):
gammap = gamma / np.sqrt(new.var ** 2 + b ** 2 * ref.var ** 2)
norm = np.sqrt(norm_a + norm_b * b ** 2)
b_n = (
_ifftwn(D_hat_n / norm, norm="ortho")
- gammap
- b * _ifftwn(D_hat_r / norm, norm="ortho")
)
# robust_stats = lambda b: sigma_clipped_stats(
# b_n(b).real[100:-100, 100:-100])
cost = np.ma.MaskedArray(b_n.real, mask=mix_mask, fill_value=0)
return np.sum(np.abs(cost))
ti = time.time()
solv_beta = optimize.minimize_scalar(
F,
method="bounded",
bounds=[0.1, 10.0],
options={"maxiter": 1000},
)
tf = time.time()
if solv_beta.success:
logger.info(("Found that beta = {}".format(solv_beta.x)))
logger.info(("Took only {} awesome seconds".format(tf - tf)))
b = solv_beta.x
else:
logger.info("Least squares could not find our beta :(")
logger.info("Beta is overriden to be the zp ratio again")
b = n_zp / r_zp
dx = dy = 0.0
else: # beta==True & shift==False & iterative==False
bi = b
def F(b):
gammap = gamma / np.sqrt(new.var ** 2 + b ** 2 * ref.var ** 2)
norm = np.sqrt(norm_a + norm_b * b ** 2)
b_n = (
_ifftwn(D_hat_n / norm, norm="ortho")
- gammap
- b * _ifftwn(D_hat_r / norm, norm="ortho")
)
cost = np.ma.MaskedArray(b_n.real, mask=mix_mask, fill_value=0)
return np.sum(np.abs(cost))
ti = time.time()
solv_beta = optimize.least_squares(
F, bi, ftol=1e-8, bounds=[0.1, 10.0], jac="2-point"
)
tf = time.time()
if solv_beta.success:
logger.info(("Found that beta = {}".format(solv_beta.x)))
logger.info(("Took only {} awesome seconds".format(tf - tf)))
logger.info(
("The solution was with cost {}".format(solv_beta.cost))
)
b = solv_beta.x
else:
logger.info("Least squares could not find our beta :(")
logger.info("Beta is overriden to be the zp ratio again")
b = n_zp / r_zp
dx = dy = 0.0
else:
if shift: # beta==False & shift==True
bi = n_zp / r_zp
gammap = gamma / np.sqrt(new.var ** 2 + b ** 2 * ref.var ** 2)
norm = np.sqrt(norm_a + norm_b * b ** 2)
dhn = D_hat_n / norm
dhr = D_hat_r / norm
def cost(vec):
dx, dy = vec
b_n = (
_ifftwn(dhn, norm="ortho")
- _ifftwn(fourier_shift(dhr, (dx, dy)), norm="ortho") * b
- np.roll(gammap, (int(round(dx)), int(round(dy))))
)
border = 100
cost = np.ma.MaskedArray(b_n.real, mask=mix_mask, fill_value=0)
cost = cost[border:-border, border:-border]
cost = np.sum(np.abs(cost / (cost.shape[0] * cost.shape[1])))
return cost
ti = time.time()
vec0 = [0.0, 0.0]
bounds = ([-0.9, -0.9], [0.9, 0.9])
solv_beta = optimize.least_squares(
cost,
vec0,
xtol=1e-5,
jac="3-point",
method="trf",
bounds=bounds,
)
tf = time.time()
if solv_beta.success:
logger.info(("Found that shift = {}".format(solv_beta.x)))
logger.info(("Took only {} awesome seconds".format(tf - ti)))
logger.info(
("The solution was with cost {}".format(solv_beta.cost))
)
dx, dy = solv_beta.x
else:
logger.info("Least squares could not find our shift :(")
dx = 0.0
dy = 0.0
else: # beta==False & shift==False
b = new.zp / ref.zp
dx = 0.0
dy = 0.0
norm = norm_a + norm_b * b ** 2
if dx == 0.0 and dy == 0.0:
D_hat = (D_hat_n - b * D_hat_r) / np.sqrt(norm)
else:
D_hat = (D_hat_n - fourier_shift(b * D_hat_r, (dx, dy))) / np.sqrt(
norm
)
D = _ifftwn(D_hat, norm="ortho")
if np.any(np.isnan(D.real)):
pass
d_zp = b / np.sqrt(ref.var ** 2 * b ** 2 + new.var ** 2)
P_hat = (psf_ref_hat * psf_new_hat * b) / (np.sqrt(norm) * d_zp)
P = _ifftwn(P_hat, norm="ortho").real
dx_p, dy_p = center_of_mass(P)
dx_pk, dy_pk = [val[0] for val in np.where(P == np.max(P))]
if (np.abs(dx_p - dx_pk) > 30) or (np.abs(dx_p - dx_pk) > 30):
logger.info("Resetting PSF center of mass to peak")
dx_p = dx_pk
dy_p = dy_pk
S_hat = fourier_shift(d_zp * D_hat * P_hat.conj(), (dx_p, dy_p))
kr = _ifftwn(
new.zp * psf_ref_hat_conj * b * psf_new_hat * psf_new_hat_conj / norm,
norm="ortho",
)
kn = _ifftwn(
new.zp * psf_new_hat_conj * psf_ref_hat * psf_ref_hat_conj / norm,
norm="ortho",
)
V_en = _ifftwn(
_fftwn(new.data.filled(0) + 1.0, norm="ortho")
* _fftwn(kn ** 2, s=new.data.shape),
norm="ortho",
)
V_er = _ifftwn(
_fftwn(ref.data.filled(0) + 1.0, norm="ortho")
* _fftwn(kr ** 2, s=ref.data.shape),
norm="ortho",
)
S_corr = _ifftwn(S_hat, norm="ortho") / np.sqrt(V_en + V_er)
logger.info("S_corr sigma_clipped_stats ")
logger.info(
(
"mean = {}, median = {}, std = {}\n".format(
*sigma_clipped_stats(S_corr.real.flatten(), sigma=4.0)
)
)
)
logger.info(
("Subtraction performed in {} seconds\n\n".format(time.time() - t0))
)
return D, P, S_corr.real, mix_mask
def diff(*args, **kwargs):
warnings.warn(
"This is being deprecated in favour of `subtract`", DeprecationWarning
)
return subtract(*args, **kwargs)
class StackCombinator(Process):
"""Combination engine.
An engine for image combination in parallel, using multiprocessing.Process
class.
Uses an ensemble of images and a queue to calculate the propercoadd of
the list of images.
Parameters
----------
img_list: list or tuple
list of SingleImage instances used in the combination process
queue: multiprocessing.Queue instance
an instance of multiprocessing.Queue class where to pickle the
intermediate results.
shape: shape of the images being coadded.
stack: boolean, default True
Whether to stack the results for coadd or just obtain individual
image calculations.
If True it will pickle in queue a coadded image of the chunk's images.
If False it will pickle in queue a list of individual matched filtered
images.
fourier: boolean, default False.
Whether to calculate individual fourier transform of each s_component
image.
If stack is True this parameter will be ignored.
If stack is False, and fourier is True, the pickled object will be a
tuple of two values, with the first one containing the list of
s_components, and the second one containing the list of fourier
transformed s_components.
Returns
-------
Combinator process
An instance of Combinator.
This can be launched like a multiprocessing.Process
Example
-------
queue1 = multiprocessing.Queue()
queue2 = multiprocessing.Queue()
p1 = Combinator(list1, queue1)
p2 = Combinator(list2, queue2)
p1.start()
p2.start()
#results are in queues
result1 = queue1.get()
result2 = queue2.get()
p1.join()
p2.join()
"""
def __init__(
self,
img_list,
queue,
shape,
stack=True,
fourier=False,
*args,
**kwargs,
):
super(StackCombinator, self).__init__(*args, **kwargs)
self.list_to_combine = img_list
self.queue = queue
self.global_shape = shape
logging.getLogger("StackCombinator").info(self.global_shape)
# self.zps = ensemble.transparencies
def run(self):
S_hat = np.zeros(self.global_shape).astype(np.complex128)
psf_hat_sum = np.zeros(self.global_shape).astype(np.complex128)
mix_mask = self.list_to_combine[0].data.mask
for an_img in self.list_to_combine:
np.add(an_img.s_hat_comp, S_hat, out=S_hat, casting="same_kind")
np.add(
((an_img.zp / an_img.var) ** 2) * an_img.psf_hat_sqnorm(),
psf_hat_sum,
out=psf_hat_sum,
) # , casting='same_kind')
# psf_hat_sum = ((an_img.zp/an_img.var)**2)*an_img.psf_hat_sqnorm()
mix_mask = np.ma.mask_or(mix_mask, an_img.data.mask)
serialized = pickle.dumps([S_hat, psf_hat_sum, mix_mask])
self.queue.put(serialized)
return
def coadd(si_list, align=True, inf_loss=0.2, n_procs=2):
"""Function that takes a list of SingleImage instances
and performs a stacking using properimage R estimator
"""
logger = logging.getLogger()
for i_img, animg in enumerate(si_list):
if not isinstance(animg, si):
si_list[i_img] = si(animg)
if align:
img_list = u._align_for_coadd(si_list)
for an_img in img_list:
an_img.update_sources()
else:
img_list = si_list
shapex = np.min([an_img.data.shape[0] for an_img in img_list])
shapey = np.min([an_img.data.shape[1] for an_img in img_list])
global_shape = (shapex, shapey)
zps, meanmags = u.transparency(img_list)
for j, an_img in enumerate(img_list):
an_img.zp = zps[j]
an_img._setup_kl_a_fields(inf_loss)
psf_shapes = [an_img.stamp_shape[0] for an_img in img_list]
psf_shape = np.max(psf_shapes)
psf_shape = (psf_shape, psf_shape)
if n_procs > 1:
queues = []
procs = []
for chunk in u.chunk_it(img_list, n_procs):
queue = Queue()
proc = StackCombinator(
chunk, queue, shape=global_shape, stack=True, fourier=False
)
logger.info("starting new process")
proc.start()
queues.append(queue)
procs.append(proc)
logger.info("all chunks started, and procs appended")
S_hat = np.zeros(global_shape, dtype=np.complex128)
P_hat = np.zeros(global_shape, dtype=np.complex128)
mix_mask = np.zeros(global_shape, dtype=np.bool)
for q in queues:
serialized = q.get()
logger.info("loading pickles")
s_hat_comp, psf_hat_sum, mask = pickle.loads(serialized)
np.add(s_hat_comp, S_hat, out=S_hat) # , casting='same_kind')
np.add(psf_hat_sum, P_hat, out=P_hat) # , casting='same_kind')
mix_mask = np.ma.mask_or(mix_mask, mask)
P_r_hat = np.sqrt(P_hat)
P_r = _ifftwn(fourier_shift(P_r_hat, psf_shape))
P_r = P_r / np.sum(P_r)
R = _ifftwn(S_hat / np.sqrt(P_hat))
logger.info("S calculated, now starting to join processes")
for proc in procs:
logger.info("waiting for procs to finish")
proc.join()
logger.info("processes finished, now returning R")
else:
S_hat = np.zeros(global_shape, dtype=np.complex128)
P_hat = np.zeros(global_shape, dtype=np.complex128)
mix_mask = img_list[0].data.mask
for an_img in img_list:
np.add(an_img.s_hat_comp, S_hat, out=S_hat)
np.add(
((an_img.zp / an_img.var) ** 2) * an_img.psf_hat_sqnorm(),
P_hat,
out=P_hat,
)
mix_mask = np.ma.mask_or(mix_mask, an_img.data.mask)
P_r_hat = np.sqrt(P_hat)
P_r = _ifftwn(fourier_shift(P_r_hat, psf_shape))
P_r = P_r / np.sum(P_r)
R = _ifftwn(S_hat / P_r_hat)
return R, P_r, mix_mask
def stack_R(*args, **kwargs):
warnings.warn(
"This is being deprecated in favour of `coadd`", DeprecationWarning
)
return coadd(*args, **kwargs)
| toros-astro/ProperImage | properimage/operations.py | Python | bsd-3-clause | 20,993 |
import numpy
def doPCA(data, dim):
data = makeDataMatrix(data)
means = getMeanVector(data)
data = normalizeData(data, means)
cov = getCov(data)
eigvals, eigvecs = getEigs(cov)
principalComponents = sortEigs(eigvals, eigvecs)
return getDimensions(dim, principalComponents)
def getDimensions(d, pc):
if d <= len(pc):
result = numpy.zeros((d, len(pc[0])))
for i in range(d):
result[i] = pc[:,i]
return result
else: return None
def sortEigs(vals, vecs):
result = numpy.zeros((len(vecs), len(vecs[0])))
#selection sort because vals is short for now so it should be fast enough
lastMax = float("inf")
for i in range(len(vals)):
currentMax = float("-inf")
currentInd = -1
for j in range(len(vals)):
if vals[j] > currentMax and vals[j] < lastMax:
currentMax = vals[j]
currentInd = j
if currentInd != -1:
result[i] = vecs[currentInd]
lastMax = currentMax
return result
def getEigs(cov):
return numpy.linalg.eig(cov)
def getCov(data):
return numpy.cov(data)
def getMeanVector(data):
result = numpy.zeros(len(data))
for i in range(len(data)):
result[i] = numpy.mean(data[i,:])
return result
def normalizeData(data, means):
result = numpy.zeros((len(data), len(data[0])))
for i in range(len(data)):
result[i] = data[i,:] - means[i]
return result
def makeDataMatrix(data):
return numpy.transpose(data) | hakuliu/inf552 | hw3/pca.py | Python | apache-2.0 | 1,541 |
# $Filename$$
# $Authors$
# Last Changed: $Date$ $Committer$ $Revision-Id$
#
# Copyright (c) 2003-2011, German Aerospace Center (DLR)
# All rights reserved.
#
#
#Redistribution and use in source and binary forms, with or without
#modification, are permitted provided that the following conditions are
#met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the
# distribution.
#
# * Neither the name of the German Aerospace Center nor the names of
# its contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
#THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
#LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
#A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
#OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
#SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
#LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
#DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
#THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
#(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
#OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
Tests the base configuration.
"""
import unittest
from datafinder.persistence.common.configuration import BaseConfiguration
from datafinder.persistence.error import PersistenceError
__version__ = "$Revision-Id:$"
class BaseConfigurationTestCase(unittest.TestCase):
def setUp(self):
self._baseConfig = BaseConfiguration("http://test.dlr.de/myPath",
user="me", password="secret")
def testConfigurationHandling(self):
""" Demonstrates use cases of the base configuration. """
# Access URI parameters
self.assertEquals(self._baseConfig.uriScheme, "http")
self.assertEquals(self._baseConfig.uriNetloc, "test.dlr.de")
self.assertEquals(self._baseConfig.uriHostname, "test.dlr.de")
self.assertEquals(self._baseConfig.uriPort, None)
self.assertEquals(self._baseConfig.uriPath, "/myPath")
self.assertEquals(self._baseConfig.baseUri, "http://test.dlr.de/myPath")
# Access additional parameters
self.assertEquals(self._baseConfig.user, "me")
self.assertEquals(self._baseConfig.password, "secret")
# Access not defined parameters
self.assertEquals(self._baseConfig.unknown1, None)
self.assertEquals(self._baseConfig.unknown2, None)
# Testing None -> everything stays the same
self._baseConfig.baseUri = None
self.assertEquals(self._baseConfig.baseUri, "http://test.dlr.de/myPath")
# Testing unknown scheme
self._baseConfig.baseUri = ""
self.assertEquals(self._baseConfig.uriScheme, "")
self.assertEquals(self._baseConfig.uriNetloc, "")
self.assertEquals(self._baseConfig.uriHostname, None)
self.assertEquals(self._baseConfig.uriPort, None)
self.assertEquals(self._baseConfig.uriPath, "")
self.assertEquals(self._baseConfig.baseUri, "")
# Testing parsing error
for uri in ["http://test.de:as", "file://c:/as/as"]:
try:
self._baseConfig.baseUri = uri
self.fail("No PersistenceError has been thrown.")
except PersistenceError:
self.assertTrue(True)
| DLR-SC/DataFinder | test/unittest/datafinder_test/persistence/common/configuration_test.py | Python | bsd-3-clause | 3,995 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Removing M2M table for field assets on 'AssetType'
db.delete_table('AppDistribution_assettype_assets')
# Adding field 'AppAsset.asset_type'
db.add_column(u'AppDistribution_appasset', 'asset_type',
self.gf('django.db.models.fields.related.ForeignKey')(to=orm['AppDistribution.AssetType'], null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Adding M2M table for field assets on 'AssetType'
db.create_table(u'AppDistribution_assettype_assets', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('assettype', models.ForeignKey(orm[u'AppDistribution.assettype'], null=False)),
('appasset', models.ForeignKey(orm[u'AppDistribution.appasset'], null=False))
))
db.create_unique(u'AppDistribution_assettype_assets', ['assettype_id', 'appasset_id'])
# Deleting field 'AppAsset.asset_type'
db.delete_column(u'AppDistribution_appasset', 'asset_type_id')
models = {
u'AppDistribution.app': {
'Meta': {'object_name': 'App'},
'creation_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'device_type': ('django.db.models.fields.CharField', [], {'default': "'IOS'", 'max_length': '255'}),
'download_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'icon': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'note': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['AppDistribution.Product']"}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'apps'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['AppDistribution.Tag']"}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'AppDistribution.appasset': {
'Meta': {'object_name': 'AppAsset'},
'app': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'assets'", 'null': 'True', 'to': u"orm['AppDistribution.App']"}),
'asset_file': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
'asset_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['AppDistribution.AssetType']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'primary': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'AppDistribution.assettype': {
'Meta': {'object_name': 'AssetType'},
'extension': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'AppDistribution.page': {
'Meta': {'object_name': 'Page'},
'expiration_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'group_by': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'heading': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'requires_auth': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'top_html': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
u'AppDistribution.pagerow': {
'Meta': {'object_name': 'PageRow'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['AppDistribution.Page']"}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['AppDistribution.Product']"}),
'show_options': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['AppDistribution.Tag']", 'null': 'True', 'blank': 'True'})
},
u'AppDistribution.product': {
'Meta': {'object_name': 'Product'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'AppDistribution.setting': {
'Meta': {'object_name': 'Setting'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'value_type': ('django.db.models.fields.CharField', [], {'max_length': '1'})
},
u'AppDistribution.spoutsite': {
'Meta': {'ordering': "('domain',)", 'object_name': 'SpoutSite'},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'home_page': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['AppDistribution.Page']", 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'AppDistribution.spoutuser': {
'Meta': {'ordering': "['username']", 'object_name': 'SpoutUser'},
'allowed_pages': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'user_allowed_pages'", 'null': 'True', 'to': u"orm['AppDistribution.Page']"}),
'email': ('django.db.models.fields.EmailField', [], {'db_index': 'True', 'max_length': '255', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'expiration_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_admin': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'main_page': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'users'", 'null': 'True', 'to': u"orm['AppDistribution.Page']"}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'})
},
u'AppDistribution.tag': {
'Meta': {'object_name': 'Tag'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['AppDistribution'] | ridecharge/spout | AppDistribution/migrations/0020_auto__add_field_appasset_asset_type.py | Python | bsd-3-clause | 9,266 |
import kivy
from kivy.app import App
from kivy.lang import Builder
from kivy.logger import Logger
import logging
kivy.require('1.9.0')
# Logger.setLevel(logging.DEBUG)
import kivy.garden.contextmenu
kv = """
FloatLayout:
id: layout
AppMenu:
id: app_menu
top: root.height
cancel_handler_widget: layout
AppMenuTextItem:
text: "Menu #1"
ContextMenu:
ContextMenuTextItem:
text: "Item #11"
ContextMenuTextItem:
text: "Item #12"
AppMenuTextItem:
text: "Menu Menu Menu #2"
ContextMenu:
ContextMenuTextItem:
text: "Item #21"
ContextMenuTextItem:
text: "Item #22"
ContextMenuTextItem:
text: "ItemItemItem #23"
ContextMenuTextItem:
text: "Item #24"
ContextMenu:
ContextMenuTextItem:
text: "Item #241"
ContextMenuTextItem:
text: "Hello, World!"
on_release: app.say_hello(self.text)
ContextMenuTextItem:
text: "Item #243"
ContextMenuTextItem:
text: "Item #244"
ContextMenuTextItem:
text: "Item #5"
AppMenuTextItem:
text: "Menu Menu #3"
ContextMenu:
ContextMenuTextItem:
text: "SubMenu #31"
ContextMenuTextItem:
text: "SubMenu #32"
ContextMenuTextItem:
text: "SubMenu #33"
ContextMenuDivider:
ContextMenuTextItem:
text: "SubMenu #34"
AppMenuTextItem:
text: "Menu #4"
Label:
pos: 10, 10
text: "Left click anywhere outside the context menu to close it"
size_hint: None, None
size: self.texture_size
"""
class MyApp(App):
def build(self):
self.title = 'Simple app menu example'
return Builder.load_string(kv)
def say_hello(self, text):
print(text)
self.root.ids['app_menu'].close_all()
if __name__ == '__main__':
MyApp().run() | Bakterija/mmplayer | mmplayer/other/garden_contextmenu/examples/simple_app_menu.py | Python | mit | 2,416 |
import logging
import random
import time
import config
# uncomment only one
#
logging.basicConfig(format='%(message)s', level=logging.INFO)
#logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.DEBUG)
# load camera configuration
#
try:
settings = config.camera
except:
settings = {}
# transmit updates via Unix Domain Socket
#
import uds
# send some fake measurements
#
counter = 0
while counter < 50:
counter += 1
# build a random update: <camera_id> <standing> <moves> <faces>
#
message = '%s %d %d %d' % (settings.get('id', 'camera42'),
random.randint(0, 10),
random.randint(0, 3),
random.randint(0, 3) )
uds.push(message)
# wait a bit
#
logging.debug('sleeping')
time.sleep(2)
| MartinPaques/smart-video-counter | source/test_updater.py | Python | apache-2.0 | 843 |
#!/usr/bin/env python3
from anormbookmarker.test.test_enviroment import *
with self_contained_session(CONFIG.database_timestamp) as session:
BASE.metadata.create_all(session.bind)
# make a tag to make an alias to
aa = Tag.construct(session=session, tag='a a')
session.commit()
db_result = [('select COUNT(*) from alias;', 0),
('select COUNT(*) from aliasword;', 0),
('select COUNT(*) from bookmark;', 0),
('select COUNT(*) from filename;', 0),
('select COUNT(*) from tag;', 1),
('select COUNT(*) from tag_relationship;', 0),
('select COUNT(*) from tagbookmarks;', 0),
('select COUNT(*) from tagword;', 2),
('select COUNT(*) from word;', 1),
('select COUNT(*) from wordmisspelling;', 0)]
check_db_result(config=CONFIG, db_result=db_result)
| jakeogh/anormbookmarker | anormbookmarker/test/tests/Tag/two_single_char_words.py | Python | mit | 876 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
try:
from ._models_py3 import ComplianceStatus
from ._models_py3 import ErrorDefinition
from ._models_py3 import ErrorResponse
from ._models_py3 import HelmOperatorProperties
from ._models_py3 import ProxyResource
from ._models_py3 import Resource
from ._models_py3 import ResourceProviderOperation
from ._models_py3 import ResourceProviderOperationDisplay
from ._models_py3 import ResourceProviderOperationList
from ._models_py3 import Result
from ._models_py3 import SourceControlConfiguration
from ._models_py3 import SourceControlConfigurationList
from ._models_py3 import SystemData
except (SyntaxError, ImportError):
from ._models import ComplianceStatus # type: ignore
from ._models import ErrorDefinition # type: ignore
from ._models import ErrorResponse # type: ignore
from ._models import HelmOperatorProperties # type: ignore
from ._models import ProxyResource # type: ignore
from ._models import Resource # type: ignore
from ._models import ResourceProviderOperation # type: ignore
from ._models import ResourceProviderOperationDisplay # type: ignore
from ._models import ResourceProviderOperationList # type: ignore
from ._models import Result # type: ignore
from ._models import SourceControlConfiguration # type: ignore
from ._models import SourceControlConfigurationList # type: ignore
from ._models import SystemData # type: ignore
from ._source_control_configuration_client_enums import (
ComplianceStateType,
CreatedByType,
Enum0,
Enum1,
MessageLevelType,
OperatorScopeType,
OperatorType,
ProvisioningStateType,
)
__all__ = [
'ComplianceStatus',
'ErrorDefinition',
'ErrorResponse',
'HelmOperatorProperties',
'ProxyResource',
'Resource',
'ResourceProviderOperation',
'ResourceProviderOperationDisplay',
'ResourceProviderOperationList',
'Result',
'SourceControlConfiguration',
'SourceControlConfigurationList',
'SystemData',
'ComplianceStateType',
'CreatedByType',
'Enum0',
'Enum1',
'MessageLevelType',
'OperatorScopeType',
'OperatorType',
'ProvisioningStateType',
]
| Azure/azure-sdk-for-python | sdk/kubernetesconfiguration/azure-mgmt-kubernetesconfiguration/azure/mgmt/kubernetesconfiguration/v2021_03_01/models/__init__.py | Python | mit | 2,687 |
import bcrypt
import tempfile
from datetime import datetime
from hashlib import sha256, sha1
from dictshield.base import ShieldException
from dictshield.fields import StringField, IntField, DateTimeField, EmailField, URLField
from psycopg2 import DatabaseError
from boto.s3.key import Key
from boardhood.models.base import BaseModel, ValidationException, UniqueViolationException
from boardhood.sql.users import *
from boardhood.helpers.image import resize, get_extension
class User(BaseModel):
_public_fields = ['name', 'avatar_url', 'conversations_count', 'interests_count']
STATUS_ACTIVE = 1
STATUS_PENDING = 2
STATUS_BLOCKED = 3
STATUS_REMOVED = 4
BUCKET_NAME = 'boardhood_profile_images'
ALLOWED_IMAGES = set(['png', 'jpg', 'jpeg', 'gif'])
id = IntField()
name = StringField(max_length=45)
email = EmailField()
password = StringField()
status = IntField()
created_at = DateTimeField()
updated_at = DateTimeField()
avatar_url = URLField()
conversations_count = IntField()
interests_count = IntField()
ip = StringField()
timestamp = DateTimeField()
def encrypt_password(self):
self.password = bcrypt.hashpw(self.password, bcrypt.gensalt())
def to_self_dict(self):
data = self.to_dict()
data.update({'email': self.email})
return data
@staticmethod
def parse(row):
if not row:
return None
return User(**row)
@staticmethod
def parse_all(rows):
records = []
for row in rows:
records.append(User.parse(row))
return records
@staticmethod
def create(user):
try:
cursor = User.cursor()
user.encrypt_password()
user.status = User.STATUS_ACTIVE
user.validate()
cursor.execute(CREATE, user.to_sqldict())
User.db.commit()
user.id = cursor.fetchone()['id']
return user
except ShieldException, e:
raise ValidationException(e)
except DatabaseError, e:
if e.pgcode == '23505':
raise UniqueViolationException(e)
else:
User.report_error(e, cursor)
return False
except Exception, e:
User.report_error(e, cursor)
return False
@staticmethod
def exists(id):
try:
cursor = User.cursor()
cursor.execute(EXISTS, [id])
return cursor.fetchone() is not None
except Exception, e:
User.report_error(e, cursor)
return False
@staticmethod
def authenticate(user):
try:
cursor = User.cursor()
cursor.execute(AUTHENTICATE, user.to_sqldict())
data = cursor.fetchone()
if data is None or bcrypt.hashpw(user.password, data['password']) != data['password']:
return False
else:
return user.update_values(data)
except Exception, e:
User.report_error(e, cursor)
return False
@staticmethod
def findByName(name):
try:
cursor = User.cursor()
cursor.execute(FIND_BY_NAME, [name])
return User.parse(cursor.fetchone())
except Exception, e:
User.report_error(e, cursor)
return None
@staticmethod
def findById(id):
try:
cursor = User.cursor()
cursor.execute(FIND_BY_ID, [id])
return User.parse(cursor.fetchone())
except Exception, e:
User.report_error(e, cursor)
return None
@staticmethod
def nameExists(name):
try:
cursor = User.cursor()
cursor.execute(NAME_EXISTS, [name])
return cursor.fetchone() is not None
except Exception, e:
User.report_error(e, cursor)
return False
@staticmethod
def emailExists(email):
try:
cursor = User.cursor()
cursor.execute(EMAIL_EXISTS, [email])
return cursor.fetchone() is not None
except Exception, e:
User.report_error(e, cursor)
return False
@staticmethod
def updateAvatarUrl(id, avatar_url):
try:
cursor = User.cursor()
cursor.execute(UPDATE_AVATAR, [avatar_url, id])
User.db.commit()
return True
except Exception, e:
User.report_error(e, cursor)
return False
def saveAvatar(self, file):
avatar = create_avatar(file)
if avatar:
content_type = file.mimetype
ext = get_extension(file.filename)
name = sha1(self.name + str(datetime.utcnow())).hexdigest()
filename = '%s.%s' % (name, ext)
if not content_type:
content_type = 'text/plain'
bucket = User.s3.get_bucket(User.BUCKET_NAME)
key = Key(bucket, filename)
res = key.set_contents_from_file(avatar, policy='public-read')
if res:
self.avatar_url = User.S3_URL % (User.BUCKET_NAME, filename)
User.updateAvatarUrl(self.id, self.avatar_url)
return True
else:
return False
else:
return False
def update(self):
try:
if len(self.password) != 60:
self.encrypt_password()
self.validate()
cursor = User.cursor()
cursor.execute(UPDATE, self.to_sqldict())
User.db.commit()
return True
except ShieldException, e:
raise ValidationException(e)
except DatabaseError, e:
if e.pgcode == '23505': # already exists
raise UniqueViolationException(e)
else:
User.report_error(e, cursor)
return False
except Exception, e:
User.report_error(e, cursor)
return False
def create_avatar(file):
try:
img = resize(file)
if img is False:
return False
tmp = tempfile.NamedTemporaryFile()
ext = get_extension(file.filename)
if ext == 'jpg' or ext == 'jpeg':
img.save(tmp, "JPEG", quality=100)
elif ext == 'png':
img.save(tmp, "PNG", quality=100)
elif ext == 'gif':
img.save(tmp, "GIF", quality=100)
tmp.seek(0)
return tmp
except IOError, e:
User.report_error(e)
return False
| mayconbordin/boardhood | server_app/api/boardhood/models/users.py | Python | mit | 6,641 |
# Copyright 2006 James Tauber and contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pyjamas import DOM
from pyjamas.ui.CellFormatter import CellFormatter
class FlexCellFormatter(CellFormatter):
def __init__(self, outer, **kwargs):
CellFormatter.__init__(self, outer, **kwargs)
def getColSpan(self, row, column):
return DOM.getIntAttribute(self.getElement(row, column), "colSpan")
def getRowSpan(self, row, column):
return DOM.getIntAttribute(self.getElement(row, column), "rowSpan")
def setColSpan(self, row, column, colSpan):
DOM.setIntAttribute(self.ensureElement(row, column), "colSpan", colSpan)
def setRowSpan(self, row, column, rowSpan):
DOM.setIntAttribute(self.ensureElement(row, column), "rowSpan", rowSpan)
| lovelysystems/pyjamas | library/pyjamas/ui/FlexCellFormatter.py | Python | apache-2.0 | 1,299 |
from mrcrowbar.colour import TEST_PALETTE
from mrcrowbar import models as mrc
from mrcrowbar.lib.images import base as img
class TIMFile( mrc.Block ):
file_name = mrc.Bytes( length=13 )
size = mrc.UInt32_LE()
data = mrc.Bytes( length=mrc.Ref( "size" ) )
@property
def repr( self ):
return self.file_name.split( b"\x00" )[0].decode( "utf8" )
class ResourceTIM( mrc.Block ):
raw_data = mrc.Bytes()
def __init__( self, *args, **kwargs ):
super().__init__( *args, **kwargs )
self.store = mrc.Store( self, mrc.Ref( "raw_data" ) )
class TIMFileEntry( mrc.Block ):
_file = mrc.StoreRef(
TIMFile, mrc.Ref( "_parent._resource.store" ), mrc.Ref( "offset" )
)
name_hash = mrc.Int32_LE()
offset = mrc.UInt32_LE()
class TIMFileStruct( mrc.Block ):
_resource = None # replace with the ResourceTIM object
file_name = mrc.Bytes( length=13 )
entry_count = mrc.UInt16_LE()
entries = mrc.BlockField( TIMFileEntry, count=mrc.Ref( "entry_count" ) )
class ResourceMapTIM( mrc.Block ):
hash_index = mrc.Bytes( length=4 )
file_count = mrc.UInt16_LE()
files = mrc.BlockField( TIMFileStruct, count=mrc.Ref( "file_count" ) )
class BitmapFrame( mrc.Block ):
width = mrc.UInt16_LE()
height = mrc.UInt16_LE()
unk1 = mrc.UInt8()
size = mrc.UInt32_LE()
image_data = mrc.Bytes( length=mrc.Ref( "size" ) )
def __init__( self, *args, **kwargs ):
super().__init__( *args, **kwargs )
self.image = img.IndexedImage(
self,
width=mrc.Ref( "width" ),
height=mrc.Ref( "height" ),
source=mrc.Ref( "image_data" ),
palette=mrc.Ref( "_parent._parent._palette" ),
)
class BitmapData( mrc.Block ):
unk1 = mrc.UInt16_LE()
frame_count = mrc.UInt16_LE()
frame_offsets = mrc.UInt32_LE( count=mrc.Ref( "frame_count" ) )
raw_data = mrc.Bytes()
@property
def base_offset( self ):
return -self.get_field_end_offset( "frame_offsets" ) - 8
def __init__( self, *args, **kwargs ):
self.store = mrc.LinearStore(
parent=self,
source=mrc.Ref( "raw_data" ),
block_klass=BitmapFrame,
offsets=mrc.Ref( "frame_offsets" ),
base_offset=mrc.Ref( "base_offset" ),
)
super().__init__( *args, **kwargs )
class BitmapTIM( mrc.Block ):
magic = mrc.Const( mrc.Bytes( length=4 ), b"BMP:" )
size = mrc.UInt32_LE()
bitmap_data = mrc.BlockField( BitmapData, length=mrc.Ref( "size" ) )
# replace this at load time
_palette = TEST_PALETTE
| moralrecordings/mrcrowbar | mrcrowbar/lib/games/sierra.py | Python | bsd-3-clause | 2,635 |
import unittest
import sys
sys.path.append('../')
sys.path.append('../entities')
from SourceDAO import SourceDAO
from Source import Source
class SourceTestCase(unittest.TestCase):
hash = 'fake_hash_3453k24j5hk234j5h32kj5kjb345'
def testInsert(self):
sdao = SourceDAO()
source = Source()
source.name = 'zezinho'
source.hash = self.hash
source.software = 'mldonkey 3.1.2'
source.osinfo = 'Linux'
sid = sdao.insertOrUpdate(source)
assert sid is not None, 'error inserting source'
def testSelect(self):
sdao = SourceDAO()
source = sdao.findByHash(self.hash)
s = sdao.find(source.id)
assert s is not None, 'error selecting source'
def testDelete(self):
sdao = SourceDAO()
source = sdao.findByHash(self.hash)
sdao.delete(source.id)
s = sdao.find(source.id)
assert s is None, 'error deleting source'
suite = unittest.TestSuite()
suite.addTest(SourceTestCase("testInsert"))
suite.addTest(SourceTestCase("testSelect"))
suite.addTest(SourceTestCase("testDelete"))
#unittest.main("SourceTestCase.suite")
runner = unittest.TextTestRunner()
runner.run(suite)
| tassia/DonkeySurvey | src/database/tests/SourceTestCase.py | Python | gpl-3.0 | 1,220 |
import time, hashlib
start = time.time()
prev = hashlib.md5(str(0).encode('utf-8')).hexdigest()
for i in range(1, 1000):
dict = {}
for j in range(1, 1000):
dict[hashlib.md5(str(j).encode('utf-8')).hexdigest()] = j
dict = sorted(dict)
end = time.time()
print("{0} sec.".format(end - start))
| Hast/Benchmark | python/dictionaries.py | Python | gpl-3.0 | 313 |
# -*- test-case-name: twisted.words.test.test_jabbersaslmechanisms -*-
#
# Copyright (c) 2001-2007 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Protocol agnostic implementations of SASL authentication mechanisms.
"""
import md5, binascii, random, time, os
from zope.interface import Interface, Attribute, implements
class ISASLMechanism(Interface):
name = Attribute("""Common name for the SASL Mechanism.""")
def getInitialResponse():
"""
Get the initial client response, if defined for this mechanism.
@return: initial client response string.
@rtype: L{str}.
"""
def getResponse(challenge):
"""
Get the response to a server challenge.
@param challenge: server challenge.
@type challenge: L{str}.
@return: client response.
@rtype: L{str}.
"""
class Plain(object):
"""
Implements the PLAIN SASL authentication mechanism.
The PLAIN SASL authentication mechanism is defined in RFC 2595.
"""
implements(ISASLMechanism)
name = 'PLAIN'
def __init__(self, authzid, authcid, password):
self.authzid = authzid or ''
self.authcid = authcid or ''
self.password = password or ''
def getInitialResponse(self):
return "%s\x00%s\x00%s" % (self.authzid.encode('utf-8'),
self.authcid.encode('utf-8'),
self.password.encode('utf-8'))
class DigestMD5(object):
"""
Implements the DIGEST-MD5 SASL authentication mechanism.
The DIGEST-MD5 SASL authentication mechanism is defined in RFC 2831.
"""
implements(ISASLMechanism)
name = 'DIGEST-MD5'
def __init__(self, serv_type, host, serv_name, username, password):
self.username = username
self.password = password
self.defaultRealm = host
self.digest_uri = '%s/%s' % (serv_type, host)
if serv_name is not None:
self.digest_uri += '/%s' % serv_name
def getInitialResponse(self):
return None
def getResponse(self, challenge):
directives = self._parse(challenge)
# Compat for implementations that do not send this along with
# a succesful authentication.
if 'rspauth' in directives:
return ''
try:
realm = directives['realm']
except KeyError:
realm = self.defaultRealm
return self._gen_response(directives['charset'],
realm,
directives['nonce'])
def _parse(self, challenge):
"""
Parses the server challenge.
Splits the challenge into a dictionary of directives with values.
@return: challenge directives and their values.
@rtype: L{dict} of L{str} to L{str}.
"""
s = challenge
paramDict = {}
cur = 0
remainingParams = True
while remainingParams:
# Parse a param. We can't just split on commas, because there can
# be some commas inside (quoted) param values, e.g.:
# qop="auth,auth-int"
middle = s.index("=", cur)
name = s[cur:middle].lstrip()
middle += 1
if s[middle] == '"':
middle += 1
end = s.index('"', middle)
value = s[middle:end]
cur = s.find(',', end) + 1
if cur == 0:
remainingParams = False
else:
end = s.find(',', middle)
if end == -1:
value = s[middle:].rstrip()
remainingParams = False
else:
value = s[middle:end].rstrip()
cur = end + 1
paramDict[name] = value
for param in ('qop', 'cipher'):
if param in paramDict:
paramDict[param] = paramDict[param].split(',')
return paramDict
def _unparse(self, directives):
"""
Create message string from directives.
@param directives: dictionary of directives (names to their values).
For certain directives, extra quotes are added, as
needed.
@type directives: L{dict} of L{str} to L{str}
@return: message string.
@rtype: L{str}.
"""
directive_list = []
for name, value in directives.iteritems():
if name in ('username', 'realm', 'cnonce',
'nonce', 'digest-uri', 'authzid', 'cipher'):
directive = '%s="%s"' % (name, value)
else:
directive = '%s=%s' % (name, value)
directive_list.append(directive)
return ','.join(directive_list)
def _gen_response(self, charset, realm, nonce):
"""
Generate response-value.
Creates a response to a challenge according to section 2.1.2.1 of
RFC 2831 using the L{charset}, L{realm} and L{nonce} directives
from the challenge.
"""
def H(s):
return md5.new(s).digest()
def HEX(n):
return binascii.b2a_hex(n)
def KD(k, s):
return H('%s:%s' % (k, s))
try:
username = self.username.encode(charset)
password = self.password.encode(charset)
except UnicodeError:
# TODO - add error checking
raise
nc = '%08x' % 1 # TODO: support subsequent auth.
cnonce = self._gen_nonce()
qop = 'auth'
# TODO - add support for authzid
a1 = "%s:%s:%s" % (H("%s:%s:%s" % (username, realm, password)),
nonce,
cnonce)
a2 = "AUTHENTICATE:%s" % self.digest_uri
response = HEX( KD ( HEX(H(a1)),
"%s:%s:%s:%s:%s" % (nonce, nc,
cnonce, "auth", HEX(H(a2)))))
directives = {'username': username,
'realm' : realm,
'nonce' : nonce,
'cnonce' : cnonce,
'nc' : nc,
'qop' : qop,
'digest-uri': self.digest_uri,
'response': response,
'charset': charset}
return self._unparse(directives)
def _gen_nonce(self):
return md5.new("%s:%s:%s" % (str(random.random()) , str(time.gmtime()),str(os.getpid()))).hexdigest()
| hortonworks/hortonworks-sandbox | desktop/core/ext-py/Twisted/twisted/words/protocols/jabber/sasl_mechanisms.py | Python | apache-2.0 | 6,610 |
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for CreateDatabase
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-spanner-admin-database
# [START spanner_v1_generated_DatabaseAdmin_CreateDatabase_async]
from google.cloud import spanner_admin_database_v1
async def sample_create_database():
# Create a client
client = spanner_admin_database_v1.DatabaseAdminAsyncClient()
# Initialize request argument(s)
request = spanner_admin_database_v1.CreateDatabaseRequest(
parent="parent_value",
create_statement="create_statement_value",
)
# Make the request
operation = client.create_database(request=request)
print("Waiting for operation to complete...")
response = await operation.result()
# Handle the response
print(response)
# [END spanner_v1_generated_DatabaseAdmin_CreateDatabase_async]
| googleapis/python-spanner | samples/generated_samples/spanner_v1_generated_database_admin_create_database_async.py | Python | apache-2.0 | 1,667 |
"""
django admin page for the course creators table
"""
from course_creators.models import CourseCreator, update_creator_state, send_user_notification, send_admin_notification
from course_creators.views import update_course_creator_group
from ratelimitbackend import admin
from django.conf import settings
from django.dispatch import receiver
from edxmako.shortcuts import render_to_string
from django.core.mail import send_mail
from smtplib import SMTPException
import logging
log = logging.getLogger("studio.coursecreatoradmin")
def get_email(obj):
""" Returns the email address for a user """
return obj.user.email
get_email.short_description = 'email'
class CourseCreatorAdmin(admin.ModelAdmin):
"""
Admin for the course creator table.
"""
# Fields to display on the overview page.
list_display = ['username', get_email, 'state', 'state_changed', 'note']
readonly_fields = ['username', 'state_changed']
# Controls the order on the edit form (without this, read-only fields appear at the end).
fieldsets = (
(None, {
'fields': ['username', 'state', 'state_changed', 'note']
}),
)
# Fields that filtering support
list_filter = ['state', 'state_changed']
# Fields that search supports.
search_fields = ['user__username', 'user__email', 'state', 'note']
# Turn off the action bar (we have no bulk actions)
actions = None
def username(self, inst):
"""
Returns the username for a given user.
Implemented to make sorting by username instead of by user object.
"""
return inst.user.username
username.admin_order_field = 'user__username'
def has_add_permission(self, request):
return False
def has_delete_permission(self, request, obj=None):
return False
def has_change_permission(self, request, obj=None):
return request.user.is_staff
def save_model(self, request, obj, form, change):
# Store who is making the request.
obj.admin = request.user
obj.save()
admin.site.register(CourseCreator, CourseCreatorAdmin)
@receiver(update_creator_state, sender=CourseCreator)
def update_creator_group_callback(sender, **kwargs):
"""
Callback for when the model's creator status has changed.
"""
user = kwargs['user']
updated_state = kwargs['state']
update_course_creator_group(kwargs['caller'], user, updated_state == CourseCreator.GRANTED)
@receiver(send_user_notification, sender=CourseCreator)
def send_user_notification_callback(sender, **kwargs):
"""
Callback for notifying user about course creator status change.
"""
print "+++++++++++++++++++++"
print "=======================================nicky"
user = kwargs['user']
updated_state = kwargs['state']
studio_request_email = settings.FEATURES.get('STUDIO_REQUEST_EMAIL', '')
context = {'studio_request_email': studio_request_email}
subject = render_to_string('emails/course_creator_subject.txt', context)
subject = ''.join(subject.splitlines())
if updated_state == CourseCreator.GRANTED:
message_template = 'emails/course_creator_granted.txt'
elif updated_state == CourseCreator.DENIED:
message_template = 'emails/course_creator_denied.txt'
else:
# changed to unrequested or pending
message_template = 'emails/course_creator_revoked.txt'
message = render_to_string(message_template, context)
try:
user.email_user(subject, message, studio_request_email)
except:
log.warning("Unable to send course creator status e-mail to %s", user.email)
@receiver(send_admin_notification, sender=CourseCreator)
def send_admin_notification_callback(sender, **kwargs):
"""
Callback for notifying admin of a user in the 'pending' state.
"""
user = kwargs['user']
studio_request_email = settings.FEATURES.get('STUDIO_REQUEST_EMAIL', '')
context = {'user_name': user.username, 'user_email': user.email}
subject = render_to_string('emails/course_creator_admin_subject.txt', context)
subject = ''.join(subject.splitlines())
message = render_to_string('emails/course_creator_admin_user_pending.txt', context)
try:
send_mail(
subject,
message,
studio_request_email,
[studio_request_email],
fail_silently=False
)
except SMTPException:
log.warning("Failure sending 'pending state' e-mail for %s to %s", user.email, studio_request_email)
| xiandiancloud/edx-platform | cms/djangoapps/course_creators/admin.py | Python | agpl-3.0 | 4,561 |
# This file is part of Shuup.
#
# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from django.db.transaction import atomic
from django.http.response import JsonResponse
from django.shortcuts import get_object_or_404
from django.utils.encoding import force_text
from django.utils.translation import ugettext_lazy as _
from django.views.generic import DetailView, ListView, View
from django.views.generic.detail import SingleObjectMixin
from shuup.core.excs import ProductNotOrderableProblem
from shuup.core.models import OrderLineType, Product
from shuup.core.utils.users import real_user_or_none
from shuup.front.models import StoredBasket
class CartViewMixin(object):
model = StoredBasket
def get_queryset(self):
qs = super(CartViewMixin, self).get_queryset()
return qs.filter(persistent=True, deleted=False, customer=self.request.customer, shop=self.request.shop)
class CartListView(CartViewMixin, ListView):
template_name = 'shuup/saved_carts/cart_list.jinja'
context_object_name = 'carts'
class CartDetailView(CartViewMixin, DetailView):
template_name = 'shuup/saved_carts/cart_detail.jinja'
context_object_name = 'cart'
def get_queryset(self):
qs = super(CartDetailView, self).get_queryset()
return qs.prefetch_related("products")
def get_context_data(self, **kwargs):
context = super(CartDetailView, self).get_context_data(**kwargs)
lines = []
product_dict = {}
for product in self.object.products.all():
product_dict[product.id] = product
for line in self.object.data.get("lines", []):
if line.get("type", None) != OrderLineType.PRODUCT:
continue
product = product_dict[line["product_id"]]
quantity = line.get("quantity", 0)
lines.append({
"product": product,
"quantity": quantity,
})
context["lines"] = lines
return context
class CartSaveView(View):
def post(self, request, *args, **kwargs):
title = request.POST.get("title", "")
basket = request.basket
if not request.customer:
return JsonResponse({"ok": False}, status=403)
if not title:
return JsonResponse({"ok": False, "error": force_text(_("Please enter a basket title."))}, status=400)
if basket.product_count == 0:
return JsonResponse({"ok": False, "error": force_text(_("Cannot save an empty basket."))}, status=400)
saved_basket = StoredBasket(
shop=basket.shop,
customer=basket.customer,
orderer=basket.orderer,
creator=real_user_or_none(basket.creator),
currency=basket.currency,
prices_include_tax=basket.prices_include_tax,
persistent=True,
title=title,
data=basket.storage.load(basket=basket),
product_count=basket.product_count)
saved_basket.save()
saved_basket.products = set(basket.product_ids)
return JsonResponse({"ok": True}, status=200)
class CartAddAllProductsView(CartViewMixin, SingleObjectMixin, View):
def get_object(self):
return get_object_or_404(self.get_queryset(), pk=self.kwargs.get("pk"))
def _get_supplier(self, shop_product, supplier_id):
if supplier_id:
supplier = shop_product.suppliers.filter(pk=supplier_id).first()
else:
supplier = shop_product.suppliers.first()
return supplier
@atomic
def post(self, request, *args, **kwargs):
cart = self.get_object()
basket = request.basket
product_ids_to_quantities = basket.get_product_ids_and_quantities()
errors = []
quantity_added = 0
for line in cart.data.get('lines', []):
if line.get("type", None) != OrderLineType.PRODUCT:
continue
product = Product.objects.get(id=line.get("product_id", None))
shop_product = product.get_shop_instance(shop=request.shop)
if not shop_product:
errors.append({"product": line.text, "message": _("Product not available in this shop")})
continue
supplier = self._get_supplier(shop_product, line.get("supplier_id"))
if not supplier:
errors.append({"product": line.text, "message": _("Invalid supplier")})
continue
try:
quantity = line.get("quantity", 0)
quantity_added += quantity
product_quantity = quantity + product_ids_to_quantities.get(line["product_id"], 0)
shop_product.raise_if_not_orderable(
supplier=supplier,
quantity=product_quantity,
customer=request.customer)
basket.add_product(
supplier=supplier,
shop=request.shop,
product=product,
quantity=quantity)
except ProductNotOrderableProblem as e:
errors.append({"product": line["text"], "message": force_text(e.message)})
return JsonResponse({
"errors": errors,
"success": force_text(_("%d product(s) added to cart" % quantity_added))
}, status=200)
class CartDeleteView(CartViewMixin, SingleObjectMixin, View):
def post(self, request, *args, **kwargs):
cart = self.get_object()
cart.deleted = True
cart.save()
return JsonResponse({"status": "success"}, status=200)
| hrayr-artunyan/shuup | shuup/front/apps/saved_carts/views.py | Python | agpl-3.0 | 5,738 |
import decimal
import ipaddr
from django.contrib.auth import models as auth_models
from django.core import exceptions
from django.db.models import *
from nodehub import forms
from nodehub import utils
class IPAddressField(Field):
__metaclass__ = SubfieldBase
description = 'An IP address stored as a decimal'
def _format(self, value):
try:
return unicode(self.to_python(value)) or ''
except ValueError:
return ''
def clean(self, value, *args, **kwargs):
value = super(IPAddressField, self).clean(value, *args, **kwargs)
try:
self.to_python(value)
except ValueError:
raise exceptions.ValidationError('Enter a valid IP address.')
return value
def to_python(self, value):
if not value:
return None
if isinstance(value, (ipaddr.IPv4Network, ipaddr.IPv6Network)):
return value.network
elif isinstance(value, (ipaddr.IPv4Address, ipaddr.IPv6Address)):
return value
elif isinstance(value, decimal.Decimal):
value = int(value)
return ipaddr.IPAddress(value)
def get_internal_type(self):
return 'DecimalField'
def get_db_prep_value(self, value, connection, prepared=False):
return connection.ops.value_to_db_decimal(
decimal.Decimal(int(self.get_prep_value(value))),
39, 0)
def get_prep_value(self, value):
return self.to_python(value)
def formfield(self, **kwargs):
defaults = {'form_class': forms.IPAddressField}
defaults.update(kwargs)
return super(IPAddressField, self).formfield(**defaults)
class MACAddressField(fields.CharField):
def __init__(self, *args, **kwargs):
kwargs['max_length'] = 12
super(MACAddressField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
defaults = {'form_class': forms.MACAddressField}
defaults.update(kwargs)
return super(MACAddressField, self).formfield(**defaults)
class NameField(fields.CharField):
def __init__(self, hostname=True, *args, **kwargs):
kwargs['max_length'] = 255
self.hostname = hostname
super(NameField, self).__init__(*args, **kwargs)
def clean(self, value, *args, **kwargs):
value = super(NameField, self).clean(value, *args, **kwargs)
if value:
utils.validate_dns_name(value, self.hostname)
return value
class TimestampModel(Model):
created = DateTimeField(auto_now_add=True)
updated = DateTimeField(auto_now=True)
class Meta:
abstract = True
class BaseModel(TimestampModel):
class Meta:
abstract = True
class ModifierModel(BaseModel):
MOD_CHOICES = (
(1, 'set'),
(2, 'unset'),
)
name = CharField(max_length=255)
operator = IntegerField(choices=MOD_CHOICES)
value = TextField(blank=True)
priority = IntegerField(default=0)
def __unicode__(self):
return u'%s %s %s' % (self.name, self.operator, self.value)
class Meta:
abstract = True
ordering = ['name', 'priority']
| crazed/nodehub | nodehub/db/models.py | Python | mit | 3,164 |
#!/usr/bin/env python
"""
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
from lib.core.settings import WAF_ATTACK_VECTORS
__product__ = "ExpressionEngine (EllisLab)"
def detect(get_page):
retval = False
for vector in WAF_ATTACK_VECTORS:
page, _, _ = get_page(get=vector)
retval = "Invalid GET Data" in page
if retval:
break
return retval
| glaudsonml/kurgan-ai | tools/sqlmap/waf/expressionengine.py | Python | apache-2.0 | 462 |
import os
ROOT_PATH = os.path.dirname(__file__)
TEMPLATE_DEBUG = DEBUG = True
MANAGERS = ADMINS = ()
DATABASE_ENGINE = 'sqlite3'
DATABASE_NAME = os.path.join(ROOT_PATH, 'testdb.sqlite')
TIME_ZONE = 'America/Chicago'
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
USE_I18N = True
MEDIA_ROOT = ''
MEDIA_URL = ''
ADMIN_MEDIA_PREFIX = '/media/'
SECRET_KEY = '2+@4vnr#v8e273^+a)g$8%dre^dwcn#d&n#8+l6jk7r#$p&3zk'
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.load_template_source',
'django.template.loaders.app_directories.load_template_source',
)
TEMPLATE_CONTEXT_PROCESSORS = (
"django.core.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.request",
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
)
ROOT_URLCONF = 'urls'
TEMPLATE_DIRS = (os.path.join(ROOT_PATH, '../../templates'),)
INSTALLED_APPS = (
'django_roa',
'twitter_roa',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
)
SESSION_ENGINE = "django.contrib.sessions.backends.file"
SERIALIZATION_MODULES = {
'twitter' : 'examples.twitter_roa.serializers',
}
## ROA custom settings
ROA_MODELS = True # set to False if you'd like to develop/test locally
ROA_FORMAT = 'twitter' # json or xml
ROA_DJANGO_ERRORS = True # useful to ease debugging if you use test server
ROA_URL_OVERRIDES_DETAIL = {
'twitter_roa.tweet': lambda o: 'http://api.twitter.com/1/statuses/show/%s.json' % o.id,
'twitter_roa.user': lambda o: 'http://api.twitter.com/1/users/show.json?user_id=%s' % o.id,
}
ROA_ARGS_NAMES_MAPPING = {
'filter_id__exact': 'user_id',
}
ROA_CUSTOM_ARGS = {
'include_entities': 'false',
'skip_status': 'true',
}
## Logging settings
import logging
logging.basicConfig(level=logging.DEBUG, format="%(name)s - %(message)s")
| bjarnoldus/django-roa | examples/twitter_roa/settings.py | Python | bsd-3-clause | 2,042 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import re
from py3status.autodoc import (
create_auto_documentation,
Py3statusLexer,
ScreenshotDirective,
)
from py3status.version import version as py3_version
# py3status documentation build configuration file, created by
# sphinx-quickstart on Mon Mar 6 15:17:30 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ["sphinx.ext.autodoc", "sphinx.ext.doctest", "sphinx.ext.todo"]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = ".rst"
# The master toctree document.
master_doc = "index"
# General information about the project.
project = "py3status"
copyright = "2017, ultrabug & others"
author = "ultrabug & others"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
# html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = "py3statusdoc"
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, "py3status.tex", "py3status Documentation", "ultrabug", "manual")
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [(master_doc, "py3status", "py3status Documentation", [author], 1)]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
master_doc,
"py3status",
"py3status Documentation",
author,
"py3status",
"One line description of project.",
"Miscellaneous",
)
]
# =============================
# py3status specific code
# =============================
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version_info = [s for s in re.split("([0-9]+)", py3_version) if s.isdigit()][:2]
version = ".".join(version_info)
# The full version, including alpha/beta/rc tags.
release = py3_version
def setup(sphinx):
"""
This will be called by sphinx.
"""
create_auto_documentation()
# add the py3status lexer (for code blocks)
from sphinx.highlighting import lexers
lexers["py3status"] = Py3statusLexer()
# enable screenshot directive for dynamic screenshots
sphinx.add_directive("screenshot", ScreenshotDirective)
| Andrwe/py3status | doc/conf.py | Python | bsd-3-clause | 5,565 |
#!/usr/bin/env python
# encoding: utf-8
name = "Solute Descriptors"
shortDesc = u""
longDesc = u"""
From Abraham J. Chem. Soc. 1994
"""
entry(
index = 1,
label = "methane",
molecule = "C",
solute = SoluteData(
S = 0,
B = 0,
E = 0,
L = -0.323,
A = 0,
V = 0.2495,
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 2,
label = "ethane",
molecule = "CC",
solute = SoluteData(
S = 0,
B = 0,
E = 0,
L = 0.492,
A = 0,
V = 0.3904,
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 3,
label = "propane",
molecule = "CCC",
solute = SoluteData(
S = 0,
B = 0,
E = 0,
L = 1.05,
A = 0,
V = 0.5313,
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 4,
label = "n-butane",
molecule = "CCCC",
solute = SoluteData(
S = 0,
B = 0,
E = 0,
L = 1.615,
A = 0,
V = 0.6722,
),
shortDesc = u"""""",
longDesc =
u"""
""",
) | Molecular-Image-Recognition/Molecular-Image-Recognition | code/rmgpy/test_data/testing_database/solvation/libraries/solute.py | Python | mit | 1,145 |
from sqlalchemy.sql import text
from featuring import db
from featuring.entities.ticket.models import Ticket
from featuring.utilities.common import maybe_update
from .persistence import normalize_db_update_err, normalize_db_fetch_err
# --------------------------------------------------------------------------------------------------
# Ticket CRUD
# --------------------------------------------------------------------------------------------------
@normalize_db_update_err
def create_one_ticket(title, product_id, client_id,
description=None,
deadline=None,
priority=None,
url=None):
instance = Ticket(**locals())
db.session.add(instance)
# Priority requires special handling.
update_ticket_priority(instance, priority, allocate=True)
return instance
@normalize_db_fetch_err
def retrieve_ticket_by_ticket_id(ticket_id):
return Ticket.query.filter_by(ticket_id=ticket_id).one()
def retrieve_all_tickets():
return Ticket.query.order_by(Ticket.ticket_id)
@normalize_db_fetch_err
@normalize_db_update_err
def update_ticket_by_ticket_id(ticket_id=None,
title=None,
product_id=None,
client_id=None,
description=None,
deadline=None,
priority=None,
url=None):
ticket = Ticket.query.filter_by(ticket_id=ticket_id).one()
maybe_update(ticket, 'title', title)
maybe_update(ticket, 'product_id', product_id)
maybe_update(ticket, 'client_id', client_id)
maybe_update(ticket, 'description', description)
maybe_update(ticket, 'deadline', deadline)
maybe_update(ticket, 'url', url)
db.session.flush()
# Priority requires special handling.
if priority is not None:
update_ticket_priority(ticket, priority)
return ticket
def update_ticket_priority(ticket, priority, allocate=False):
# Priority requires special handling.
with db.session.no_autoflush:
# Lock table to reliably use table aggregation value on computation.
Ticket.query.with_for_update()
# Adjust new incoming value to range [1, maximum].
maximum = _greatest_priority_number() + (1 if allocate else 0)
priority = priority or (maximum if allocate else 0) # Set dynamic default
priority = min(priority, maximum) # Adjust to maximum
priority = max(1, priority) # Adjust to minimum
previous_priority = ticket.priority
no_change = priority == previous_priority
# Optimized early return in case no change is needed.
if not allocate and no_change:
return
# Forces Shift+ in case there is no previous value.
# Forces Shift+ in case this is an explicit allocation.
if not previous_priority or allocate:
previous_priority = priority + 1
# Update affected rows priorities.
_shift_switch_priority_block(
floor=min(previous_priority, priority),
ceil=max(previous_priority, priority),
shift=(previous_priority > priority) - (previous_priority < priority))
ticket.priority = priority
db.session.flush()
def delete_ticket_by_ticket_id(ticket_id):
return Ticket.query.filter_by(ticket_id=ticket_id).delete()
def _greatest_priority_number():
return db.session.query(db.func.max(Ticket.priority)).scalar() or 0
def _shift_switch_priority_block(floor, ceil, shift):
if db.engine.name == 'mysql':
return _shift_switch_priority_block_mysql(floor, ceil, shift)
raise NotImplementedError(f'not supported when using {db.engine.name} as the database')
def _shift_switch_priority_block_mysql(floor, ceil, shift):
"""Shifts the `priority` column of rows respective to the given range according to `shift`.
The overriden value (either floor or ceil, depending on shift) assumes its counter part. So
when the block is shifted by -1, floor will become ceil in the end. When the block is shifted by
+1, ceil will become floor in the end.
NOTE 1: the reason this function uses plain SQL instead of the ORM is because no good equivalent
to `update ... order by` syntax in SQLAlchemy at the ORM layer.
NOTE 2: a solution to this kind of problem can be implemented in a more robust way using native
database triggers depending on specific database deployment constraints (transaction level) and
if the database supports deferred unique integrity checks. As per MySQL 8.x that type of
deferred checking is not possible, and bulk updating sequencial unique integers is faded to
violate a constraint. The currently accepted solution is to leverage the `order by` support on
update operations to make sure rows are updated in a predictable order. Additionally to that,
to fulfill this specific use case an out-of-range temporary placeholder has to be used to avoid
boundary clashes.
@param floor: int, target range initial value
@param ceil: int, target range final value
@param shift: a positive or negative integer indicating the shift direction
"""
# Placeholder must be a value outside the integers range being shifted.
placeholder = _greatest_priority_number() + 2
params = dict(floor=floor, ceil=ceil, placeholder=placeholder)
if shift < 0:
db.session.execute(text('''
update tickets
set priority = :placeholder
where priority = :floor
'''), params)
db.session.execute(text('''
update tickets
set priority = priority - 1
where priority > :floor and priority <= :ceil
order by priority asc;
'''), params)
db.session.execute(text('''
update tickets
set priority = :floor
where priority = :ceil
'''), params)
elif shift > 0:
db.session.execute(text('''
update tickets
set priority = :placeholder
where priority = :ceil
'''), params)
db.session.execute(text('''
update tickets
set priority = priority + 1
where priority >= :floor and priority < :ceil
order by priority desc;
'''), params)
db.session.execute(text('''
update tickets
set priority = :ceil
where priority = :floor
'''), params)
| ccortezia/featuring | featuring-flask-api/featuring/controllers/crud_ticket.py | Python | mit | 6,593 |
#!/usr/bin/env python
from __future__ import division, absolute_import, unicode_literals
import re
import sys
from qtpy.QtCore import Qt
from qtpy.QtCore import QEvent
from qtpy.QtCore import Signal
from qtpy.QtGui import QMouseEvent
from qtpy.QtGui import QSyntaxHighlighter
from qtpy.QtGui import QTextCharFormat
from qtpy.QtGui import QTextCursor
from qtpy.QtWidgets import QAction
from qtpy.QtWidgets import QApplication
from .. import qtutils
from .. import gitcfg
from .. import spellcheck
from ..i18n import N_
from .text import HintedTextEdit
class SpellCheckTextEdit(HintedTextEdit):
def __init__(self, hint, parent=None):
HintedTextEdit.__init__(self, hint, parent)
# Default dictionary based on the current locale.
self.spellcheck = spellcheck.NorvigSpellCheck()
self.highlighter = Highlighter(self.document(), self.spellcheck)
def mousePressEvent(self, event):
if event.button() == Qt.RightButton:
# Rewrite the mouse event to a left button event so the cursor is
# moved to the location of the pointer.
event = QMouseEvent(QEvent.MouseButtonPress,
event.pos(),
Qt.LeftButton,
Qt.LeftButton,
Qt.NoModifier)
HintedTextEdit.mousePressEvent(self, event)
def context_menu(self):
popup_menu = HintedTextEdit.createStandardContextMenu(self)
# Select the word under the cursor.
cursor = self.textCursor()
cursor.select(QTextCursor.WordUnderCursor)
self.setTextCursor(cursor)
# Check if the selected word is misspelled and offer spelling
# suggestions if it is.
spell_menu = None
if self.textCursor().hasSelection():
text = self.textCursor().selectedText()
if not self.spellcheck.check(text):
title = N_('Spelling Suggestions')
spell_menu = qtutils.create_menu(title, self)
for word in self.spellcheck.suggest(text):
action = SpellAction(word, spell_menu)
action.result.connect(self.correct)
spell_menu.addAction(action)
# Only add the spelling suggests to the menu if there are
# suggestions.
if len(spell_menu.actions()) > 0:
popup_menu.addSeparator()
popup_menu.addMenu(spell_menu)
return popup_menu, spell_menu
def contextMenuEvent(self, event):
popup_menu, _spell_menu = self.context_menu()
popup_menu.exec_(self.mapToGlobal(event.pos()))
def correct(self, word):
"""Replaces the selected text with word."""
cursor = self.textCursor()
cursor.beginEditBlock()
cursor.removeSelectedText()
cursor.insertText(word)
cursor.endEditBlock()
class Highlighter(QSyntaxHighlighter):
WORDS = r"(?iu)[\w']+"
def __init__(self, doc, spellcheck_widget):
QSyntaxHighlighter.__init__(self, doc)
self.spellcheck = spellcheck_widget
self.enabled = False
def enable(self, enabled):
self.enabled = enabled
self.rehighlight()
def highlightBlock(self, text):
if not self.enabled:
return
fmt = QTextCharFormat()
fmt.setUnderlineColor(Qt.red)
fmt.setUnderlineStyle(QTextCharFormat.SpellCheckUnderline)
for word_object in re.finditer(self.WORDS, text):
if not self.spellcheck.check(word_object.group()):
self.setFormat(word_object.start(),
word_object.end() - word_object.start(), fmt)
class SpellAction(QAction):
"""QAction that returns the text in a signal.
"""
result = Signal(object)
def __init__(self, *args):
QAction.__init__(self, *args)
self.triggered.connect(self.correct)
def correct(self):
self.result.emit(self.text())
def main(args=sys.argv):
app = QApplication(args)
widget = SpellCheckTextEdit('Type here')
widget.show()
widget.raise_()
return app.exec_()
if __name__ == '__main__':
sys.exit(main())
| Vdragon/git-cola | cola/widgets/spellcheck.py | Python | gpl-2.0 | 4,261 |
# coding: utf-8
from django.test import TestCase
from feedhoos.worker.models.entry import EntryModel
from feedhoos.worker.models.entry import MAX_CONTENT_LENGTH
import datetime
from feedparser import FeedParserDict
class EntryModelTestCase(TestCase):
fixtures = [
'feedhoos/fixtures/entry.json',
]
def setUp(self):
super(EntryModelTestCase, self).setUp()
def tearDown(self):
super(EntryModelTestCase, self).tearDown()
def test_add(self):
before_count = EntryModel.objects.all().count()
feed_id = 1
updated_parsed = datetime.date(2013, 11, 23).timetuple()
entry = FeedParserDict({
"link": "http://example.con/1",
"title": "test1 title",
"updated_parsed": updated_parsed,
"content": [{"value": "content"}],
"summary": "summary"
})
EntryModel.add(feed_id, entry)
after_count = EntryModel.objects.all().count()
self.assertEqual(before_count + 1, after_count)
def test_dict(self):
entry_model = EntryModel.objects.get(pk=1)
result = {
'id': 1,
'updated': '2014-02-21 00:03',
'title': 'title1',
'url': 'http://example.com/110001',
'content': 'content1',
'feed_id': 1
}
self.assertEqual(result, entry_model.dict)
def test_updated_stftime(self):
entry_model = EntryModel.objects.get(pk=1)
self.assertEqual('2014-02-21 00:03', entry_model.updated_stftime)
def test_count(self):
feed_id = 1
c = EntryModel.count(feed_id)
self.assertEqual(2, c)
min_updated = 1392942999
c = EntryModel.count(feed_id, min_updated)
self.assertEqual(1, c)
def test_get_entries(self):
empty = EntryModel.get_entries(1000, 1)
self.assertEqual(len([]), len(empty))
entries = EntryModel.get_entries(1, 1)
self.assertEqual(len(entries), 2)
self.assertTrue(isinstance(entries[0], EntryModel))
entries = EntryModel.get_entries(1, 1, 1392942999)
self.assertEqual(len(entries), 1)
def test_get_timeline(self):
empty = EntryModel.get_timeline(1000, 1)
self.assertEqual(len([]), len(empty))
entries = EntryModel.get_timeline(1, 1)
self.assertEqual(len(entries), 2)
self.assertTrue(isinstance(entries[0], EntryModel))
def test_get_folder(self):
empty = EntryModel.get_folder([1000, 1001], 1)
self.assertEqual(len([]), len(empty))
entries = EntryModel.get_folder([1, 2], 1)
self.assertEqual(len(entries), 3)
self.assertTrue(isinstance(entries[0], EntryModel))
def test_get_content(self):
updated_parsed = datetime.date(2013, 11, 23).timetuple()
entry = FeedParserDict({
"link": "http://example.con/1",
"title": "test1 title",
"updated_parsed": updated_parsed,
"content": [{"value": "content"}],
"summary": "summary"
})
content = EntryModel.get_content(entry)
self.assertEqual(content, "content")
entry = FeedParserDict({
"link": "http://example.con/1",
"title": "test1 title",
"updated_parsed": updated_parsed,
"content": [{"value": "t" * (MAX_CONTENT_LENGTH + 1)}],
"summary": "summary"
})
content = EntryModel.get_content(entry)
self.assertEqual(content, "summary")
| 38elements/feedhoos | feedhoos/tests/entry.py | Python | mit | 3,528 |
# coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import compat_str
from ..utils import (
int_or_none,
float_or_none,
unified_timestamp,
url_or_none,
)
class VzaarIE(InfoExtractor):
_VALID_URL = r'https?://(?:(?:www|view)\.)?vzaar\.com/(?:videos/)?(?P<id>\d+)'
_TESTS = [{
# HTTP and HLS
'url': 'https://vzaar.com/videos/1152805',
'md5': 'bde5ddfeb104a6c56a93a06b04901dbf',
'info_dict': {
'id': '1152805',
'ext': 'mp4',
'title': 'sample video (public)',
},
}, {
'url': 'https://view.vzaar.com/27272/player',
'md5': '3b50012ac9bbce7f445550d54e0508f2',
'info_dict': {
'id': '27272',
'ext': 'mp3',
'title': 'MP3',
},
}]
@staticmethod
def _extract_urls(webpage):
return re.findall(
r'<iframe[^>]+src=["\']((?:https?:)?//(?:view\.vzaar\.com)/[0-9]+)',
webpage)
def _real_extract(self, url):
video_id = self._match_id(url)
video_data = self._download_json(
'http://view.vzaar.com/v2/%s/video' % video_id, video_id)
title = video_data['videoTitle']
formats = []
source_url = url_or_none(video_data.get('sourceUrl'))
if source_url:
f = {
'url': source_url,
'format_id': 'http',
}
if 'audio' in source_url:
f.update({
'vcodec': 'none',
'ext': 'mp3',
})
else:
f.update({
'width': int_or_none(video_data.get('width')),
'height': int_or_none(video_data.get('height')),
'ext': 'mp4',
'fps': float_or_none(video_data.get('fps')),
})
formats.append(f)
video_guid = video_data.get('guid')
usp = video_data.get('usp')
if isinstance(video_guid, compat_str) and isinstance(usp, dict):
m3u8_url = ('http://fable.vzaar.com/v4/usp/%s/%s.ism/.m3u8?'
% (video_guid, video_id)) + '&'.join(
'%s=%s' % (k, v) for k, v in usp.items())
formats.extend(self._extract_m3u8_formats(
m3u8_url, video_id, 'mp4', entry_protocol='m3u8_native',
m3u8_id='hls', fatal=False))
self._sort_formats(formats)
return {
'id': video_id,
'title': title,
'thumbnail': self._proto_relative_url(video_data.get('poster')),
'duration': float_or_none(video_data.get('videoDuration')),
'timestamp': unified_timestamp(video_data.get('ts')),
'formats': formats,
}
| valmynd/MediaFetcher | src/plugins/youtube_dl/youtube_dl/extractor/vzaar.py | Python | gpl-3.0 | 2,278 |
'''
Created on 01/11/2013
@author: Dani
Just some functions used for a basic perceptron
The objectives for this module is to introduce perceptron as matrices
and consequently a little introduction for numpy
'''
import numpy as np
def costFunction(theta,X,y):
J=0 # costFunction
grad = np.zeros(theta.shape) # gradient
m,n = X.shape
# Implementation notes: we could have used * here and there but to make sure lets just use dot
h = np.dot( X, theta ) # hypothesis
err = (h - y);
# sum it just to have a single number and not a 1x1 matrix
J = (1./(2.*m)) * np.dot(err.T,err)
grad = (1./m) * np.dot(X.T,(h - y))
return J,grad
def gradientDescent(theta,X,y,alpha,error,maxIts):
'''Basic implementation of a gradient descent'''
history = np.zeros((maxIts,1))
for i in xrange(maxIts):
cost,grad = costFunction(theta,X,y)
theta = theta - (alpha * grad)
history[i] = cost
if abs(cost) < error :
break
return (history,theta)
if __name__ == '__main__':
pass | DaniM/mscthesis | ANN/LinearRegressionPerceptron.py | Python | gpl-3.0 | 1,056 |
'''
-----------------------------------------------------------
BaseHTTPServer - base classes for implementing web servers:
-----------------------------------------------------------
Purpose: BaseHTTPServer includes classes that can form the basis of a web server.
Available In: 1.4 and later
BaseHTTPServer uses classes from SocketServer to create base classes for making
HTTP servers. HTTPServer can be used directly, but the BaseHTTPRequestHandler is
intended to be extended to handle each protocol method (GET, POST, etc.).
To add support for an HTTP method in your request handler class, implement the
method do_METHOD(), replacing METHOD with the name of the HTTP method.
For example, do_GET(), do_POST(), etc. For consistency, the method takes no
arguments. All of the parameters for the request are parsed by
BaseHTTPRequestHandler and stored as instance attributes of the request instance.
https://pymotw.com/2/BaseHTTPServer/index.html#module-BaseHTTPServer
'''
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
from SocketServer import ThreadingMixIn
import threading
# EXAMPLE 1: HTTP GET - Threading and Forking:
# ==============================================================================
# HTTPServer is a simple subclass of SocketServer.TCPServer, and does not use
# multiple threads or processes to handle requests. To add threading or forking,
# create a new class using the appropriate mix-in from SocketServer.
class Handler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(200)
self.end_headers()
message = threading.currentThread().getName()
self.wfile.write(message)
self.wfile.write('\n')
return
class ThreadedHTTPServer(ThreadingMixIn, HTTPServer):
'''Handle requests in a separate thread.'''
if __name__ == '__main__':
server = ThreadedHTTPServer(('localhost', 8080), Handler)
print 'Starting server, use <Ctrl-C> to stop'
server.serve_forever() | rolandovillca/python_basis | web/http_server_get_threading.py | Python | mit | 1,984 |
"""Data in internal awss structures for testing awss test scripts.
Contains:
ii_noami (dict) = instance dict without AMI name.
ii_all (dict) = instance dict with AMI name.
ami_lookup (dict) = lookup dict with ami-number: name.
ami_user_lu (list) = list of tuples with login-name: ami-name.
tags_list (list) = list of dictionaries, containing tags in
name: value format.
tags_dict (dict) = indexed dict of dictionaries containing
tags in name: value format.
expected_info (dict) = i_info dict from rawdata.
expected_info_nt (dict) = i_info dict from rawdata_nt.
expected_info_term (dict) = i_info dict from rawdata_term.
"""
ii_noami = {
0: {'ami': 'ami-16efb076',
'id': 'i-0df5fd60bfd215ced',
'pub_dns_name': '',
'ssh_key': 'james',
'state': 'stopped',
'tag': {'Department': 'Operations',
'Name': 'Ubuntu',
'Owner': '[email protected]',
'Role': 'Regression',
'Stage': 'Archive',
'Team': 'DataAdmins'}},
1: {'ami': 'ami-3e21725e',
'id': 'i-052d2bfaeb676bc86',
'pub_dns_name': '',
'ssh_key': 'james',
'state': 'stopped',
'tag': {'Department': 'Marketing',
'Name': 'Ubuntu',
'Owner': '[email protected]',
'Project': 'Reporting',
'Stage': 'Pre-Release'}},
2: {'ami': 'ami-e09acc80',
'id': 'i-0cfd71bb6ec6f0fa5',
'pub_dns_name': '',
'ssh_key': 'james',
'state': 'stopped',
'tag': {'Department': 'IT',
'Name': 'Suse',
'Owner': '[email protected]',
'Role': 'Test',
'Stage': 'Alpha',
'Team': 'Dev10a'}},
3: {'ami': 'ami-165a0876',
'id': 'i-017df2336bf679c40',
'pub_dns_name': '',
'ssh_key': 'james',
'state': 'stopped',
'tag': {'Department': 'Operations',
'Name': 'Amazon',
'Owner': '[email protected]',
'Role': 'Dev',
'Team': 'TestUsers'}},
4: {'ami': 'ami-af4333cf',
'id': 'i-058f43c9b690e3e5f',
'pub_dns_name': '',
'ssh_key': 'james',
'state': 'stopped',
'tag': {'Name': 'CentOS',
'Owner': '[email protected]',
'Project': 'Reporting',
'Role': 'Dev',
'Stage': 'Production',
'Team': 'Dev10a'}},
5: {'ami': 'ami-0343ae47',
'id': 'i-06a88b75aa2cb2e6f',
'pub_dns_name': '',
'ssh_key': 'james',
'state': 'stopped',
'tag': {'Name': 'Debian',
'Owner': '[email protected]',
'Project': 'POS-Migration',
'Role': 'Dev',
'Stage': 'Beta'}},
6: {'ami': 'ami-2cade64c',
'id': 'i-054cd4e8d31bd2181',
'pub_dns_name': '',
'ssh_key': 'james',
'state': 'stopped',
'tag': {'Department': 'IT',
'Name': 'RHEL',
'Project': 'SysAdmin',
'Role': 'Test',
'Stage': 'Beta'}},
7: {'ami': 'ami-02765547',
'id': 'i-0e0919c061f20ef77',
'pub_dns_name': '',
'ssh_key': 'james',
'state': 'stopped',
'tag': {'Name': 'CentOS',
'Owner': '[email protected]',
'Role': 'Community',
'Stage': 'Pre-Alpha'}},
8: {'ami': 'ami-3d3a6b78',
'id': 'i-04a10a9a89f05523d',
'pub_dns_name': '',
'ssh_key': 'james',
'state': 'stopped',
'tag': {'Department': 'IT',
'Name': 'Fedora',
'Project': 'SysAdmin',
'Role': 'Regression',
'Team': 'Dev10a'}},
9: {'ami': 'ami-a2346fc2',
'id': 'i-06dc920a34316ea29',
'pub_dns_name': '',
'ssh_key': 'james',
'state': 'stopped',
'tag': {'Department': 'IT',
'Name': 'WordPress',
'Project': 'Web UI',
'Stage': 'Alpha',
'Team': 'Dev10a'}}}
ii_all = {
0: {'ami': 'ami-16efb076',
'aminame': 'ubuntu/images/hvm-ssd/ubuntu-xenial-16.04'
'-amd64-server-20170221',
'id': 'i-0df5fd60bfd215ced',
'pub_dns_name': '',
'ssh_key': 'james',
'state': 'stopped',
'tag': {'Department': 'Operations',
'Name': 'Ubuntu',
'Owner': '[email protected]',
'Role': 'Regression',
'Stage': 'Archive',
'Team': 'DataAdmins'}},
1: {'ami': 'ami-3e21725e',
'aminame': 'ubuntu/images/hvm-ssd/ubuntu-trusty-14.04'
'-amd64-server-20170110',
'id': 'i-052d2bfaeb676bc86',
'pub_dns_name': '',
'ssh_key': 'james',
'state': 'stopped',
'tag': {'Department': 'Marketing',
'Name': 'Ubuntu',
'Owner': '[email protected]',
'Project': 'Reporting',
'Stage': 'Pre-Release'}},
2: {'ami': 'ami-e09acc80',
'aminame': 'suse-sles-12-sp2-v20161214-hvm-ssd-x86_64',
'id': 'i-0cfd71bb6ec6f0fa5',
'pub_dns_name': '',
'ssh_key': 'james',
'state': 'stopped',
'tag': {'Department': 'IT',
'Name': 'Suse',
'Owner': '[email protected]',
'Role': 'Test',
'Stage': 'Alpha',
'Team': 'Dev10a'}},
3: {'ami': 'ami-165a0876',
'aminame': 'amzn-ami-hvm-2016.09.1.20170119-x86_64-gp2',
'id': 'i-017df2336bf679c40',
'pub_dns_name': '',
'ssh_key': 'james',
'state': 'stopped',
'tag': {'Department': 'Operations',
'Name': 'Amazon',
'Owner': '[email protected]',
'Role': 'Dev',
'Team': 'TestUsers'}},
4: {'ami': 'ami-af4333cf',
'aminame': 'CentOS Linux 7 x86_64 HVM EBS '
'1602-b7ee8a69-ee97-4a49-9e68-afaee216db2e-ami-d7e1d2bd.3',
'id': 'i-058f43c9b690e3e5f',
'pub_dns_name': '',
'ssh_key': 'james',
'state': 'stopped',
'tag': {'Name': 'CentOS',
'Owner': '[email protected]',
'Project': 'Reporting',
'Role': 'Dev',
'Stage': 'Production',
'Team': 'Dev10a'}},
5: {'ami': 'ami-0343ae47',
'aminame': 'debian-jessie-amd64-hvm-2015-04-25-23-22-ebs',
'id': 'i-06a88b75aa2cb2e6f',
'pub_dns_name': '',
'ssh_key': 'james',
'state': 'stopped',
'tag': {'Name': 'Debian',
'Owner': '[email protected]',
'Project': 'POS-Migration',
'Role': 'Dev',
'Stage': 'Beta'}},
6: {'ami': 'ami-2cade64c',
'aminame': 'RHEL-7.3_HVM_GA-20161026-x86_64-1-Hourly2-GP2',
'id': 'i-054cd4e8d31bd2181',
'pub_dns_name': '',
'ssh_key': 'james',
'state': 'stopped',
'tag': {'Department': 'IT',
'Name': 'RHEL',
'Project': 'SysAdmin',
'Role': 'Test',
'Stage': 'Beta'}},
7: {'ami': 'ami-02765547',
'aminame': 'RightImage_CentOS_6.3_x64_v5.8.8.8_EBS',
'id': 'i-0e0919c061f20ef77',
'pub_dns_name': '',
'ssh_key': 'james',
'state': 'stopped',
'tag': {'Name': 'CentOS',
'Owner': '[email protected]',
'Role': 'Community',
'Stage': 'Pre-Alpha'}},
8: {'ami': 'ami-3d3a6b78',
'aminame': 'fedora-8-x86_64-v1.14-std',
'id': 'i-04a10a9a89f05523d',
'pub_dns_name': '',
'ssh_key': 'james',
'state': 'stopped',
'tag': {'Department': 'IT',
'Name': 'Fedora',
'Project': 'SysAdmin',
'Role': 'Regression',
'Team': 'Dev10a'}},
9: {'ami': 'ami-a2346fc2',
'aminame': 'bitnami-wordpress-4.7.3-0-linux-ubuntu-14.04.3-x86_64'
'-ebs-mp-dff9bfa7-e43e-4c06-bafd-756e9d331d18-ami-6cac0a7a.4',
'id': 'i-06dc920a34316ea29',
'pub_dns_name': '',
'ssh_key': 'james',
'state': 'stopped',
'tag': {'Department': 'IT',
'Name': 'WordPress',
'Project': 'Web UI',
'Stage': 'Alpha',
'Team': 'Dev10a'}}}
ami_lookup = {
'ami-16efb076': 'ubuntu/images/hvm-ssd/ubuntu-xenial-16.04'
'-amd64-server-20170221',
'ami-3e21725e': 'ubuntu/images/hvm-ssd/ubuntu-trusty-14.04'
'-amd64-server-20170110',
'ami-e09acc80': 'suse-sles-12-sp2-v20161214-hvm-ssd-x86_64',
'ami-165a0876': 'amzn-ami-hvm-2016.09.1.20170119-x86_64-gp2',
'ami-af4333cf': 'CentOS Linux 7 x86_64 HVM EBS '
'1602-b7ee8a69-ee97-4a49-9e68-afaee216db2e-ami-d7e1d2bd.3',
'ami-0343ae47': 'debian-jessie-amd64-hvm-2015-04-25-23-22-ebs',
'ami-2cade64c': 'RHEL-7.3_HVM_GA-20161026-x86_64-1-Hourly2-GP2',
'ami-02765547': 'RightImage_CentOS_6.3_x64_v5.8.8.8_EBS',
'ami-3d3a6b78': 'fedora-8-x86_64-v1.14-std',
'ami-a2346fc2': 'bitnami-wordpress-4.7.3-0-linux-ubuntu-14.04.3-x86_64'
'-ebs-mp-dff9bfa7-e43e-4c06-bafd-756e9d331d18-ami-6cac0a7a.4'}
# default name for ec2: https://alestic.com/2014/01/ec2-ssh-username/
ami_user_lu = [
('ubuntu', 'ubuntu/images/hvm-ssd/ubuntu-xenial-16.04'
'-amd64-server-20170221'),
('ubuntu', 'ubuntu/images/hvm-ssd/ubuntu-trusty-14.04'
'-amd64-server-20170110'),
('ec2-user', 'suse-sles-12-sp2-v20161214-hvm-ssd-x86_64'),
('ec2-user', 'amzn-ami-hvm-2016.09.1.20170119-x86_64-gp2'),
('centos', 'CentOS Linux 7 x86_64 HVM EBS '
'1602-b7ee8a69-ee97-4a49-9e68-afaee216db2e-ami-d7e1d2bd.3'),
('admin', 'debian-jessie-amd64-hvm-2015-04-25-23-22-ebs'),
('ec2-user', 'RHEL-7.3_HVM_GA-20161026-x86_64-1-Hourly2-GP2'),
('centos', 'RightImage_CentOS_6.3_x64_v5.8.8.8_EBS'),
('root', 'fedora-8-x86_64-v1.14-std'),
('ubuntu', 'bitnami-wordpress-4.7.3-0-linux-ubuntu-14.04.3-x86_64'
'-ebs-mp-dff9bfa7-e43e-4c06-bafd-756e9d331d18-ami-6cac0a7a.4')]
tags_list = [
{'Name': 'Ubuntu', 'Owner': '[email protected]', 'Role': 'Regression',
'Team': 'DataAdmins', 'Department': 'Operations', 'Stage': 'Archive'},
{'Department': 'Marketing', 'Owner': '[email protected]',
'Project': 'Reporting', 'Name': 'Ubuntu', 'Stage': 'Pre-Release'},
{'Name': 'Suse', 'Owner': '[email protected]', 'Role': 'Test',
'Team': 'Dev10a', 'Department': 'IT', 'Stage': 'Alpha'},
{'Department': 'Operations', 'Owner': '[email protected]',
'Role': 'Dev', 'Name': 'Amazon', 'Team': 'TestUsers'},
{'Name': 'CentOS', 'Project': 'Reporting', 'Role': 'Dev',
'Team': 'Dev10a', 'Owner': '[email protected]', 'Stage': 'Production'},
{'Owner': '[email protected]', 'Project': 'POS-Migration',
'Role': 'Dev', 'Name': 'Debian', 'Stage': 'Beta'},
{'Department': 'IT', 'Project': 'SysAdmin', 'Role': 'Test',
'Name': 'RHEL', 'Stage': 'Beta'},
{'Owner': '[email protected]', 'Role': 'Community',
'Name': 'CentOS', 'Stage': 'Pre-Alpha'},
{'Department': 'IT', 'Project': 'SysAdmin', 'Role': 'Regression',
'Name': 'Fedora', 'Team': 'Dev10a'},
{'Department': 'IT', 'Project': 'Web UI', 'Team': 'Dev10a',
'Name': 'WordPress', 'Stage': 'Alpha'}]
tags_dict = {
0: {'Name': 'Ubuntu', 'Owner': '[email protected]', 'Role': 'Regression',
'Team': 'DataAdmins', 'Department': 'Operations', 'Stage': 'Archive'},
1: {'Department': 'Marketing', 'Owner': '[email protected]',
'Project': 'Reporting', 'Name': 'Ubuntu', 'Stage': 'Pre-Release'},
2: {'Name': 'Suse', 'Owner': '[email protected]', 'Role': 'Test',
'Team': 'Dev10a', 'Department': 'IT', 'Stage': 'Alpha'},
3: {'Department': 'Operations', 'Owner': '[email protected]',
'Role': 'Dev', 'Name': 'Amazon', 'Team': 'TestUsers'},
4: {'Name': 'CentOS', 'Project': 'Reporting', 'Role': 'Dev',
'Team': 'Dev10a', 'Owner': '[email protected]', 'Stage': 'Production'},
5: {'Owner': '[email protected]', 'Project': 'POS-Migration',
'Role': 'Dev', 'Name': 'Debian', 'Stage': 'Beta'},
6: {'Department': 'IT', 'Project': 'SysAdmin', 'Role': 'Test',
'Name': 'RHEL', 'Stage': 'Beta'},
7: {'Owner': '[email protected]', 'Role': 'Community',
'Name': 'CentOS', 'Stage': 'Pre-Alpha'},
8: {'Department': 'IT', 'Project': 'SysAdmin', 'Role': 'Regression',
'Name': 'Fedora', 'Team': 'Dev10a'},
9: {'Department': 'IT', 'Project': 'Web UI', 'Team': 'Dev10a',
'Name': 'WordPress', 'Stage': 'Alpha'}}
expected_info = {
0: {'ami': 'ami-3d3a6b78',
'id': 'i-04a10a9a89f05523d',
'pub_dns_name': 'ec2-54-219-64-145.us-west-1.compute.amazonaws.com',
'ssh_key': 'james',
'state': 'running',
'tag': {'Department': 'IT',
'Name': 'Fedora',
'Project': 'SysAdmin',
'Role': 'Regression',
'Team': 'Dev10a'}}}
expected_info_nt = {
0: {'ami': 'ami-3d3a6b78',
'id': 'i-04a10a9a89f05523d',
'pub_dns_name': 'ec2-54-219-64-145.us-west-1.compute.amazonaws.com',
'ssh_key': 'james',
'state': 'running',
'tag': {'Name': ''}}}
expected_info_term = {
0: {'ami': 'ami-05cf2265',
'id': 'i-04a10a9a89f05523d',
'pub_dns_name': '',
'ssh_key': 'james',
'state': 'terminated',
'tag': {'Name': ''}}}
| robertpeteuil/aws-shortcuts | test/awsstestdata.py | Python | gpl-3.0 | 13,661 |
#!/usr/bin/env python3
import os
import time
import json
import requests
import sys
GAME = "/Users/{}/Library/Application Support/Toontown Rewritten/Toontown Rewritten".format(os.getlogin())
DYLD_LIBRARY_PATH = "/Users/{}/Library/Application Support/Toontown Rewritten/Libraries.bundle".format(os.getlogin())
DYLD_FRAMEWORK_PATH = "/Users/{}/Library/Application Support/Toontown Rewritten/Frameworks".format(os.getlogin())
CONFIG_DIRECTORY = os.path.expanduser("~/.config/ttrlauncher/")
URL = "https://www.toontownrewritten.com/api/login?format=json"
if not os.path.exists(CONFIG_DIRECTORY):
os.makedirs(CONFIG_DIRECTORY)
if not os.path.exists(CONFIG_DIRECTORY + 'config.json'):
with open(CONFIG_DIRECTORY + 'config.json', 'w') as f:
f.write(json.dumps({}))
with open(CONFIG_DIRECTORY + 'config.json.example', 'w') as f:
f.write(json.dumps({"AccountNickName": ['username', 'password']}))
ACCOUNTS = json.load(open(CONFIG_DIRECTORY + 'config.json', 'r'))
def die(reason):
print(reason)
exit(1)
def select_account():
if not len(ACCOUNTS):
die('Error: You need to open {} and add some accounts! See config.json.example for examples.'.format(
CONFIG_DIRECTORY + "config.json"))
if len(sys.argv) > 1 and sys.argv[1] in ACCOUNTS.keys():
return ACCOUNTS[sys.argv[1]]
while True:
print("Available accounts: {}".format(", ".join(ACCOUNTS.keys())))
account = input('Which account? ')
if account in ACCOUNTS.keys():
return ACCOUNTS[account]
print("Invalid account, try again.")
def finish_partial_auth(r):
while True:
print(r['banner'])
code = input("Code: ")
r = requests.post(URL, data={'appToken': code, 'responseToken': r['responseToken']}).json()
if r['success']:
return r
def finish_queue(r):
queueToken = r['queueToken']
while True:
print(r)
print("Currently waiting in queue... Position: {}, ETA: {} seconds".format(r['position'], r['eta']))
time.sleep(1)
r = requests.post(URL, data={'queueToken': queueToken}).json()
if r['success'] == "true":
return r
time.sleep(29)
def login(account):
r = requests.post(URL, data={'username': account[0], 'password': account[1]}).json()
if r['success'] == "false":
die("Unable to login: {}".format(r['banner']))
elif r['success'] == "partial":
r = finish_partial_auth(r)
if r['success'] == "delayed":
r = finish_queue(r)
print(r)
if r['success'] == "true":
os.environ['TTR_GAMESERVER'] = r['gameserver']
os.environ['TTR_PLAYCOOKIE'] = r['cookie']
os.system('cd "{}" && DYLD_LIBRARY_PATH="{}" DYLD_FRAMEWORK_PATH="{}" "{}"'.format(
os.path.dirname(GAME), DYLD_LIBRARY_PATH, DYLD_FRAMEWORK_PATH, GAME
))
exit(0)
else:
die('Somehow we got here, not sure how ...')
login(select_account())
| Omeryl/TTR-Launcher | launcher.py | Python | mit | 2,997 |
"""Some utilities specific to modifying config files.
"""
import os
import os.path
import sys
import logging
import tempfile
import shutil
import engage_utils.process as processutils
import file as fileutils
from log_setup import setup_engage_logger
logger = setup_engage_logger(__name__)
from regexp import *
def _get_config_line_regexps(line):
"""
>>> (p1, p2) = _get_config_line_regexps("this test")
>>> r1 = p1.compile()
>>> r2 = p2.compile()
>>> r1.match("this test") is not None
True
>>> r1.match("this testx") is not None
False
>>> r1.match("this test ") is not None
True
>>> r1.match("this test # sdkdj") is not None
True
>>> r1.match(" this test") is not None
True
>>> r1.match("# this test") is not None
False
>>> r2.match("# this test") is not None
True
>>> r2.match("## this test") is not None
True
>>> r2.match("# this test # comment") is not None
True
>>> r2.match("this test") is not None
False
>>> r2.match("# this test xyz") is not None
False
>>> r2.match(" # this test") is not None
True
"""
opt_whitespace = zero_or_more(whitespace_char())
line_pat = line_ends_with(concat(opt_whitespace,
lit(line),
opt_whitespace,
zero_or_one(concat(lit('#'),
zero_or_more(any_except_newline())))))
commented_line_pat = concat(opt_whitespace, one_or_more(lit('#')), opt_whitespace, line_pat)
return (line_pat, commented_line_pat)
def _add_config_file_line(config_file, line):
"""We want to itempotently add a config file entry to the config file.
There are three cases we handle:
* If it is not there, we just append it to the end of the file along with a comment
* If it is there, but is commented out, we uncomment the line
* If it is there, and is not commented out, we leave the file alone
"""
(line_pat, cmt_pat) = _get_config_line_regexps(line)
line_re = line_pat.compile()
cmt_re = cmt_pat.compile()
line_match_count = 0
comment_match_count = 0
with open(config_file, "r") as f:
for l in f:
if line_re.match(l):
line_match_count += 1
elif cmt_re.match(l):
comment_match_count += 1
if line_match_count > 0:
logger.debug("Config line '%s' already in config file '%s'" %
(line, config_file))
return
# if there already is .orig file, we leave it alone and create a temp
# file for our work.
if os.path.exists(config_file + ".orig"):
tf = tempfile.NamedTemporaryFile(delete=False)
backup_file_name = tf.name
tf.close()
delete_backup_file_when_done = True
else:
backup_file_name = config_file + ".orig"
delete_backup_file_when_done = False
conf_file_perms = fileutils.get_file_permissions(config_file)
os.rename(config_file, backup_file_name)
try:
if line_match_count==0 and comment_match_count==0:
shutil.copy(backup_file_name, config_file)
with open(config_file, "a") as f:
import datetime
f.write("# Added by Engage %s\n" % datetime.datetime.now().strftime("%Y-%m-%d"))
f.write(line+"\n")
logger.debug("Added config line '%s' to config file '%s'" %
(line, config_file))
else:
# find the commented version and uncomment
found = False
with open(backup_file_name, "r") as rf:
with open(config_file, "w") as wf:
for l in rf:
if (not found) and cmt_re.match(l):
# we take the line starting at the
# beginning of the actual config
# string.
m = lit(line).compile().search(l)
wf.write(l[m.start():])
found = True
else:
wf.write(l)
assert found
logger.debug("Uncommented config line '%s' in config file '%s'" %
(line, config_file))
except:
# if we got an error, move back the original config file
os.rename(backup_file_name, config_file)
raise
fileutils.set_file_permissions(config_file, conf_file_perms)
if delete_backup_file_when_done:
os.remove(backup_file_name)
def add_config_file_line(config_file, line, sudo_password):
"""Add line to config file (see above for detailed description).
This is just a wrapper over _add_config_file_line(). Unless we are
running as root, we need to spawn a subprocess and run it under sudo
"""
if processutils.is_running_as_root():
_add_config_file_line(config_file, line)
else:
processutils.run_sudo_program([sys.executable, __file__, config_file, line],
sudo_password, logger)
def is_config_line_present(config_file, line):
"""Return True if line is present in the file and not commented out,
False otherwise.
"""
(line_pat, cmt_line_pat) = _get_config_line_regexps(line)
line_re = line_pat.compile()
with open(config_file, "r") as f:
for l in f:
if line_re.match(l):
return True
return False
if __name__ == "__main__":
"""For the add_config_file() functionality, we need to run this same program
as an sudo subprocess.
"""
args = sys.argv[1:]
if len(args)!=2:
print "%s <config_file> <line>" % sys.argv[0]
sys.exit(1)
root_logger = logging.getLogger()
root_logger.setLevel(logging.DEBUG)
console_handler = logging.StreamHandler(sys.stdout)
console_handler.setLevel(logging.DEBUG)
root_logger.addHandler(console_handler)
_add_config_file_line(args[0], args[1])
print "_add_config_file_line was successful"
sys.exit(0)
| quaddra/engage | python_pkg/engage/utils/cfg_file.py | Python | apache-2.0 | 6,149 |
import py
from rpython.flowspace.model import SpaceOperation, Constant, Variable
from rpython.rtyper.lltypesystem import lltype, llmemory, rffi
from rpython.translator.unsimplify import varoftype
from rpython.rlib import jit
from rpython.jit.codewriter import support, call
from rpython.jit.codewriter.call import CallControl
from rpython.jit.codewriter.effectinfo import EffectInfo
class FakePolicy:
def look_inside_graph(self, graph):
return True
def test_graphs_from_direct_call():
cc = CallControl()
F = lltype.FuncType([], lltype.Signed)
f = lltype.functionptr(F, 'f', graph='fgraph')
v = varoftype(lltype.Signed)
op = SpaceOperation('direct_call', [Constant(f, lltype.Ptr(F))], v)
#
lst = cc.graphs_from(op, {}.__contains__)
assert lst is None # residual call
#
lst = cc.graphs_from(op, {'fgraph': True}.__contains__)
assert lst == ['fgraph'] # normal call
def test_graphs_from_indirect_call():
cc = CallControl()
F = lltype.FuncType([], lltype.Signed)
v = varoftype(lltype.Signed)
graphlst = ['f1graph', 'f2graph']
op = SpaceOperation('indirect_call', [varoftype(lltype.Ptr(F)),
Constant(graphlst, lltype.Void)], v)
#
lst = cc.graphs_from(op, {'f1graph': True, 'f2graph': True}.__contains__)
assert lst == ['f1graph', 'f2graph'] # normal indirect call
#
lst = cc.graphs_from(op, {'f1graph': True}.__contains__)
assert lst == ['f1graph'] # indirect call, look only inside some graphs
#
lst = cc.graphs_from(op, {}.__contains__)
assert lst is None # indirect call, don't look inside any graph
def test_graphs_from_no_target():
cc = CallControl()
F = lltype.FuncType([], lltype.Signed)
v = varoftype(lltype.Signed)
op = SpaceOperation('indirect_call', [varoftype(lltype.Ptr(F)),
Constant(None, lltype.Void)], v)
lst = cc.graphs_from(op, {}.__contains__)
assert lst is None
# ____________________________________________________________
class FakeJitDriverSD:
def __init__(self, portal_graph):
self.portal_graph = portal_graph
self.portal_runner_ptr = "???"
def test_find_all_graphs():
def g(x):
return x + 2
def f(x):
return g(x) + 1
rtyper = support.annotate(f, [7])
jitdriver_sd = FakeJitDriverSD(rtyper.annotator.translator.graphs[0])
cc = CallControl(jitdrivers_sd=[jitdriver_sd])
res = cc.find_all_graphs(FakePolicy())
funcs = set([graph.func for graph in res])
assert funcs == set([f, g])
def test_find_all_graphs_without_g():
def g(x):
return x + 2
def f(x):
return g(x) + 1
rtyper = support.annotate(f, [7])
jitdriver_sd = FakeJitDriverSD(rtyper.annotator.translator.graphs[0])
cc = CallControl(jitdrivers_sd=[jitdriver_sd])
class CustomFakePolicy:
def look_inside_graph(self, graph):
assert graph.name == 'g'
return False
res = cc.find_all_graphs(CustomFakePolicy())
funcs = [graph.func for graph in res]
assert funcs == [f]
# ____________________________________________________________
def test_guess_call_kind_and_calls_from_graphs():
class portal_runner_obj:
graph = object()
class FakeJitDriverSD:
portal_runner_ptr = portal_runner_obj
g = object()
g1 = object()
cc = CallControl(jitdrivers_sd=[FakeJitDriverSD()])
cc.candidate_graphs = [g, g1]
op = SpaceOperation('direct_call', [Constant(portal_runner_obj)],
Variable())
assert cc.guess_call_kind(op) == 'recursive'
class fakeresidual:
_obj = object()
op = SpaceOperation('direct_call', [Constant(fakeresidual)],
Variable())
assert cc.guess_call_kind(op) == 'residual'
class funcptr:
class _obj:
class graph:
class func:
oopspec = "spec"
op = SpaceOperation('direct_call', [Constant(funcptr)],
Variable())
assert cc.guess_call_kind(op) == 'builtin'
class funcptr:
class _obj:
graph = g
op = SpaceOperation('direct_call', [Constant(funcptr)],
Variable())
res = cc.graphs_from(op)
assert res == [g]
assert cc.guess_call_kind(op) == 'regular'
class funcptr:
class _obj:
graph = object()
op = SpaceOperation('direct_call', [Constant(funcptr)],
Variable())
res = cc.graphs_from(op)
assert res is None
assert cc.guess_call_kind(op) == 'residual'
h = object()
op = SpaceOperation('indirect_call', [Variable(),
Constant([g, g1, h])],
Variable())
res = cc.graphs_from(op)
assert res == [g, g1]
assert cc.guess_call_kind(op) == 'regular'
op = SpaceOperation('indirect_call', [Variable(),
Constant([h])],
Variable())
res = cc.graphs_from(op)
assert res is None
assert cc.guess_call_kind(op) == 'residual'
# ____________________________________________________________
def test_get_jitcode(monkeypatch):
from rpython.jit.codewriter.test.test_flatten import FakeCPU
class FakeRTyper:
class annotator:
translator = None
def getfunctionptr(graph):
F = lltype.FuncType([], lltype.Signed)
return lltype.functionptr(F, 'bar')
monkeypatch.setattr(call, 'getfunctionptr', getfunctionptr)
cc = CallControl(FakeCPU(FakeRTyper()))
class somegraph:
name = "foo"
jitcode = cc.get_jitcode(somegraph)
assert jitcode is cc.get_jitcode(somegraph) # caching
assert jitcode.name == "foo"
pending = list(cc.enum_pending_graphs())
assert pending == [(somegraph, jitcode)]
# ____________________________________________________________
def test_jit_force_virtualizable_effectinfo():
py.test.skip("XXX add a test for CallControl.getcalldescr() -> EF_xxx")
def test_releases_gil_analyzer():
from rpython.jit.backend.llgraph.runner import LLGraphCPU
T = rffi.CArrayPtr(rffi.TIME_T)
external = rffi.llexternal("time", [T], rffi.TIME_T, releasegil=True)
@jit.dont_look_inside
def f():
return external(lltype.nullptr(T.TO))
rtyper = support.annotate(f, [])
jitdriver_sd = FakeJitDriverSD(rtyper.annotator.translator.graphs[0])
cc = CallControl(LLGraphCPU(rtyper), jitdrivers_sd=[jitdriver_sd])
res = cc.find_all_graphs(FakePolicy())
[f_graph] = [x for x in res if x.func is f]
[block, _] = list(f_graph.iterblocks())
[op] = block.operations
call_descr = cc.getcalldescr(op)
assert call_descr.extrainfo.has_random_effects()
assert call_descr.extrainfo.is_call_release_gil() is False
def test_call_release_gil():
from rpython.jit.backend.llgraph.runner import LLGraphCPU
T = rffi.CArrayPtr(rffi.TIME_T)
external = rffi.llexternal("time", [T], rffi.TIME_T, releasegil=True,
save_err=rffi.RFFI_SAVE_ERRNO)
# no jit.dont_look_inside in this test
def f():
return external(lltype.nullptr(T.TO))
rtyper = support.annotate(f, [])
jitdriver_sd = FakeJitDriverSD(rtyper.annotator.translator.graphs[0])
cc = CallControl(LLGraphCPU(rtyper), jitdrivers_sd=[jitdriver_sd])
res = cc.find_all_graphs(FakePolicy())
[llext_graph] = [x for x in res if x.func is external]
[block, _] = list(llext_graph.iterblocks())
[op] = block.operations
tgt_tuple = op.args[0].value._obj.graph.func._call_aroundstate_target_
assert type(tgt_tuple) is tuple and len(tgt_tuple) == 2
call_target, saveerr = tgt_tuple
assert saveerr == rffi.RFFI_SAVE_ERRNO
call_target = llmemory.cast_ptr_to_adr(call_target)
call_descr = cc.getcalldescr(op)
assert call_descr.extrainfo.has_random_effects()
assert call_descr.extrainfo.is_call_release_gil() is True
assert call_descr.extrainfo.call_release_gil_target == (
call_target, rffi.RFFI_SAVE_ERRNO)
def test_random_effects_on_stacklet_switch():
from rpython.jit.backend.llgraph.runner import LLGraphCPU
from rpython.translator.platform import CompilationError
try:
from rpython.rlib._rffi_stacklet import switch, handle
except CompilationError as e:
if "Unsupported platform!" in e.out:
py.test.skip("Unsupported platform!")
else:
raise e
@jit.dont_look_inside
def f():
switch(rffi.cast(handle, 0))
rtyper = support.annotate(f, [])
jitdriver_sd = FakeJitDriverSD(rtyper.annotator.translator.graphs[0])
cc = CallControl(LLGraphCPU(rtyper), jitdrivers_sd=[jitdriver_sd])
res = cc.find_all_graphs(FakePolicy())
[f_graph] = [x for x in res if x.func is f]
[block, _] = list(f_graph.iterblocks())
op = block.operations[-1]
call_descr = cc.getcalldescr(op)
assert call_descr.extrainfo.has_random_effects()
def test_no_random_effects_for_rotateLeft():
from rpython.jit.backend.llgraph.runner import LLGraphCPU
from rpython.rlib.rarithmetic import r_uint
if r_uint.BITS == 32:
py.test.skip("64-bit only")
from rpython.rlib.rmd5 import _rotateLeft
def f(n, m):
return _rotateLeft(r_uint(n), m)
rtyper = support.annotate(f, [7, 9])
jitdriver_sd = FakeJitDriverSD(rtyper.annotator.translator.graphs[0])
cc = CallControl(LLGraphCPU(rtyper), jitdrivers_sd=[jitdriver_sd])
res = cc.find_all_graphs(FakePolicy())
[f_graph] = [x for x in res if x.func is f]
[block, _] = list(f_graph.iterblocks())
op = block.operations[-1]
call_descr = cc.getcalldescr(op)
assert not call_descr.extrainfo.has_random_effects()
assert call_descr.extrainfo.check_is_elidable()
def test_elidable_kinds():
from rpython.jit.backend.llgraph.runner import LLGraphCPU
@jit.elidable
def f1(n, m):
return n + m
@jit.elidable
def f2(n, m):
return [n, m] # may raise MemoryError
@jit.elidable
def f3(n, m):
if n > m:
raise ValueError
return n + m
def f(n, m):
a = f1(n, m)
b = f2(n, m)
c = f3(n, m)
return a + len(b) + c
rtyper = support.annotate(f, [7, 9])
jitdriver_sd = FakeJitDriverSD(rtyper.annotator.translator.graphs[0])
cc = CallControl(LLGraphCPU(rtyper), jitdrivers_sd=[jitdriver_sd])
res = cc.find_all_graphs(FakePolicy())
[f_graph] = [x for x in res if x.func is f]
for index, expected in [
(0, EffectInfo.EF_ELIDABLE_CANNOT_RAISE),
(1, EffectInfo.EF_ELIDABLE_OR_MEMORYERROR),
(2, EffectInfo.EF_ELIDABLE_CAN_RAISE)]:
call_op = f_graph.startblock.operations[index]
assert call_op.opname == 'direct_call'
call_descr = cc.getcalldescr(call_op)
assert call_descr.extrainfo.extraeffect == expected
def test_raise_elidable_no_result():
from rpython.jit.backend.llgraph.runner import LLGraphCPU
l = []
@jit.elidable
def f1(n, m):
l.append(n)
def f(n, m):
f1(n, m)
return n + m
rtyper = support.annotate(f, [7, 9])
jitdriver_sd = FakeJitDriverSD(rtyper.annotator.translator.graphs[0])
cc = CallControl(LLGraphCPU(rtyper), jitdrivers_sd=[jitdriver_sd])
res = cc.find_all_graphs(FakePolicy())
[f_graph] = [x for x in res if x.func is f]
call_op = f_graph.startblock.operations[0]
assert call_op.opname == 'direct_call'
with py.test.raises(Exception):
call_descr = cc.getcalldescr(call_op)
| jptomo/rpython-lang-scheme | rpython/jit/codewriter/test/test_call.py | Python | mit | 11,707 |
"""
Unittests for gst-switch Python-API
"""
| timvideos/gst-switch | python-api/tests/unittests/__init__.py | Python | gpl-3.0 | 44 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import re
import sys
from stgit.run import Run, RunException
class VersionUnavailable(Exception):
pass
def git_describe_version():
root = sys.path[0] if sys.path[0] else None
try:
v = (
Run('git', 'describe', '--tags', '--abbrev=4')
.cwd(root)
.discard_stderr()
.output_one_line()
)
except RunException as e:
raise VersionUnavailable(str(e))
m = re.match(r'^v([0-9].*)', v)
if m:
v = m.group(1)
else:
raise VersionUnavailable('bad version: %s' % v)
try:
dirty = (
Run('git', 'diff-index', '--name-only', 'HEAD')
.cwd(root)
.discard_stderr()
.raw_output()
)
except RunException as e:
raise VersionUnavailable(str(e))
if dirty:
v += '-dirty'
return v
def git_archival_version():
archival_path = os.path.join(sys.path[0], '.git_archival.txt')
if not os.path.isfile(archival_path):
# The archival file will not be present in sdist archives.
raise VersionUnavailable('%s does not exist' % archival_path)
tag_re = re.compile(r'(?<=\btag: )([^,]+)\b')
with open(archival_path) as f:
for line in f:
if line.startswith('ref-names:'):
for tag in tag_re.findall(line):
if tag.startswith('v'):
return tag[1:]
else:
raise VersionUnavailable('no tags found in %s' % archival_path)
def get_builtin_version():
try:
import stgit.builtin_version
except ImportError:
raise VersionUnavailable('could not import stgit.builtin_version')
else:
return stgit.builtin_version.version
def get_version():
for v in [get_builtin_version, git_describe_version, git_archival_version]:
try:
return v()
except VersionUnavailable:
pass
return 'unknown-version'
# minimum version requirements
git_min_ver = '2.2.0'
python_min_ver = '2.6'
| ctmarinas/stgit | stgit/version.py | Python | gpl-2.0 | 2,161 |
import pytest
from registry.utils import pass_spec_parser, pass_spec_builder
PASS_SPECS = {
'': [],
' ': [],
'a': [],
'111': [111],
'111,112': [111, 112],
'111, 112': [111, 112],
'111-113': [111, 112, 113],
'111 - 113': [111, 112, 113],
'111,113-115': [111, 113, 114, 115],
'113-115, 111': [111, 113, 114, 115],
'111-113': [111, 112, 113],
}
PASS_SPECS_REVERSE = {
'': [],
'111': [111],
'111-112': [111, 112],
'111-113': [111, 112, 113],
'111,113-115': [111, 113, 114, 115],
'111,113-115': [111, 115, 114, 113],
}
@pytest.fixture(params=PASS_SPECS.items())
def spec(request):
return request.param
@pytest.fixture(params=PASS_SPECS_REVERSE.items())
def reverse_spec(request):
return request.param
def test_pass_spec_parser(spec):
spec, expected = spec
assert pass_spec_parser(spec) == expected
def test_pass_spec_builder(reverse_spec):
expected, spec = reverse_spec
assert pass_spec_builder(spec) == expected
| arsgeografica/kinderstadt-registry | test/unit/test_pass_spec.py | Python | gpl-3.0 | 1,112 |
import pytest
import mock
from datetime import timedelta
from awx.main.scheduler import TaskManager
from awx.main.models import InstanceGroup, WorkflowJob
from awx.main.tasks import apply_cluster_membership_policies
@pytest.mark.django_db
def test_multi_group_basic_job_launch(instance_factory, default_instance_group, mocker,
instance_group_factory, job_template_factory):
i1 = instance_factory("i1")
i2 = instance_factory("i2")
ig1 = instance_group_factory("ig1", instances=[i1])
ig2 = instance_group_factory("ig2", instances=[i2])
objects1 = job_template_factory('jt1', organization='org1', project='proj1',
inventory='inv1', credential='cred1',
jobs=["job_should_start"])
objects1.job_template.instance_groups.add(ig1)
j1 = objects1.jobs['job_should_start']
j1.status = 'pending'
j1.save()
objects2 = job_template_factory('jt2', organization='org2', project='proj2',
inventory='inv2', credential='cred2',
jobs=["job_should_still_start"])
objects2.job_template.instance_groups.add(ig2)
j2 = objects2.jobs['job_should_still_start']
j2.status = 'pending'
j2.save()
with mock.patch('awx.main.models.Job.task_impact', new_callable=mock.PropertyMock) as mock_task_impact:
mock_task_impact.return_value = 500
with mocker.patch("awx.main.scheduler.TaskManager.start_task"):
TaskManager().schedule()
TaskManager.start_task.assert_has_calls([mock.call(j1, ig1, [], i1), mock.call(j2, ig2, [], i2)])
@pytest.mark.django_db
def test_multi_group_with_shared_dependency(instance_factory, default_instance_group, mocker,
instance_group_factory, job_template_factory):
i1 = instance_factory("i1")
i2 = instance_factory("i2")
ig1 = instance_group_factory("ig1", instances=[i1])
ig2 = instance_group_factory("ig2", instances=[i2])
objects1 = job_template_factory('jt1', organization='org1', project='proj1',
inventory='inv1', credential='cred1',
jobs=["job_should_start"])
objects1.job_template.instance_groups.add(ig1)
p = objects1.project
p.scm_update_on_launch = True
p.scm_update_cache_timeout = 0
p.scm_type = "git"
p.scm_url = "http://github.com/ansible/ansible.git"
p.save()
j1 = objects1.jobs['job_should_start']
j1.status = 'pending'
j1.save()
objects2 = job_template_factory('jt2', organization=objects1.organization, project=p,
inventory='inv2', credential='cred2',
jobs=["job_should_still_start"])
objects2.job_template.instance_groups.add(ig2)
j2 = objects2.jobs['job_should_still_start']
j2.status = 'pending'
j2.save()
with mocker.patch("awx.main.scheduler.TaskManager.start_task"):
TaskManager().schedule()
pu = p.project_updates.first()
TaskManager.start_task.assert_called_once_with(pu,
default_instance_group,
[j1],
default_instance_group.instances.all()[0])
pu.finished = pu.created + timedelta(seconds=1)
pu.status = "successful"
pu.save()
with mock.patch("awx.main.scheduler.TaskManager.start_task"):
TaskManager().schedule()
TaskManager.start_task.assert_any_call(j1, ig1, [], i1)
TaskManager.start_task.assert_any_call(j2, ig2, [], i2)
assert TaskManager.start_task.call_count == 2
@pytest.mark.django_db
def test_workflow_job_no_instancegroup(workflow_job_template_factory, default_instance_group, mocker):
wfjt = workflow_job_template_factory('anicedayforawalk').workflow_job_template
wfj = WorkflowJob.objects.create(workflow_job_template=wfjt)
wfj.status = "pending"
wfj.save()
with mocker.patch("awx.main.scheduler.TaskManager.start_task"):
TaskManager().schedule()
TaskManager.start_task.assert_called_once_with(wfj, None, [], None)
assert wfj.instance_group is None
@pytest.mark.django_db
def test_overcapacity_blocking_other_groups_unaffected(instance_factory, default_instance_group, mocker,
instance_group_factory, job_template_factory):
i1 = instance_factory("i1")
i1.capacity = 1000
i1.save()
i2 = instance_factory("i2")
ig1 = instance_group_factory("ig1", instances=[i1])
ig2 = instance_group_factory("ig2", instances=[i2])
objects1 = job_template_factory('jt1', organization='org1', project='proj1',
inventory='inv1', credential='cred1',
jobs=["job_should_start"])
objects1.job_template.instance_groups.add(ig1)
j1 = objects1.jobs['job_should_start']
j1.status = 'pending'
j1.save()
objects2 = job_template_factory('jt2', organization=objects1.organization, project='proj2',
inventory='inv2', credential='cred2',
jobs=["job_should_start", "job_should_also_start"])
objects2.job_template.instance_groups.add(ig1)
j1_1 = objects2.jobs['job_should_also_start']
j1_1.status = 'pending'
j1_1.save()
objects3 = job_template_factory('jt3', organization='org2', project='proj3',
inventory='inv3', credential='cred3',
jobs=["job_should_still_start"])
objects3.job_template.instance_groups.add(ig2)
j2 = objects3.jobs['job_should_still_start']
j2.status = 'pending'
j2.save()
objects4 = job_template_factory('jt4', organization=objects3.organization, project='proj4',
inventory='inv4', credential='cred4',
jobs=["job_should_not_start"])
objects4.job_template.instance_groups.add(ig2)
j2_1 = objects4.jobs['job_should_not_start']
j2_1.status = 'pending'
j2_1.save()
tm = TaskManager()
with mock.patch('awx.main.models.Job.task_impact', new_callable=mock.PropertyMock) as mock_task_impact:
mock_task_impact.return_value = 500
with mock.patch.object(TaskManager, "start_task", wraps=tm.start_task) as mock_job:
tm.schedule()
mock_job.assert_has_calls([mock.call(j1, ig1, [], i1),
mock.call(j1_1, ig1, [], i1),
mock.call(j2, ig2, [], i2)])
assert mock_job.call_count == 3
@pytest.mark.django_db
def test_failover_group_run(instance_factory, default_instance_group, mocker,
instance_group_factory, job_template_factory):
i1 = instance_factory("i1")
i2 = instance_factory("i2")
ig1 = instance_group_factory("ig1", instances=[i1])
ig2 = instance_group_factory("ig2", instances=[i2])
objects1 = job_template_factory('jt1', organization='org1', project='proj1',
inventory='inv1', credential='cred1',
jobs=["job_should_start"])
objects1.job_template.instance_groups.add(ig1)
j1 = objects1.jobs['job_should_start']
j1.status = 'pending'
j1.save()
objects2 = job_template_factory('jt2', organization=objects1.organization, project='proj2',
inventory='inv2', credential='cred2',
jobs=["job_should_start", "job_should_also_start"])
objects2.job_template.instance_groups.add(ig1)
objects2.job_template.instance_groups.add(ig2)
j1_1 = objects2.jobs['job_should_also_start']
j1_1.status = 'pending'
j1_1.save()
tm = TaskManager()
with mock.patch('awx.main.models.Job.task_impact', new_callable=mock.PropertyMock) as mock_task_impact:
mock_task_impact.return_value = 500
with mock.patch.object(TaskManager, "start_task", wraps=tm.start_task) as mock_job:
tm.schedule()
mock_job.assert_has_calls([mock.call(j1, ig1, [], i1),
mock.call(j1_1, ig2, [], i2)])
assert mock_job.call_count == 2
@pytest.mark.django_db
def test_instance_group_basic_policies(instance_factory, instance_group_factory):
i0 = instance_factory("i0")
i0.managed_by_policy = False
i0.save()
i1 = instance_factory("i1")
i2 = instance_factory("i2")
i3 = instance_factory("i3")
i4 = instance_factory("i4")
ig0 = instance_group_factory("ig0")
ig1 = instance_group_factory("ig1", minimum=2)
ig2 = instance_group_factory("ig2", percentage=50)
ig3 = instance_group_factory("ig3", percentage=50)
ig0.policy_instance_list.append(i0.hostname)
ig0.save()
apply_cluster_membership_policies()
ig0 = InstanceGroup.objects.get(id=ig0.id)
ig1 = InstanceGroup.objects.get(id=ig1.id)
ig2 = InstanceGroup.objects.get(id=ig2.id)
ig3 = InstanceGroup.objects.get(id=ig3.id)
assert len(ig0.instances.all()) == 1
assert i0 in ig0.instances.all()
assert len(InstanceGroup.objects.get(id=ig1.id).instances.all()) == 2
assert i1 in ig1.instances.all()
assert i2 in ig1.instances.all()
assert len(InstanceGroup.objects.get(id=ig2.id).instances.all()) == 2
assert i3 in ig2.instances.all()
assert i4 in ig2.instances.all()
assert len(InstanceGroup.objects.get(id=ig3.id).instances.all()) == 2
assert i1 in ig3.instances.all()
assert i2 in ig3.instances.all()
| wwitzel3/awx | awx/main/tests/functional/task_management/test_rampart_groups.py | Python | apache-2.0 | 9,820 |
# -*- coding: utf-8 -*-
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helper functions for Cloud API implementations."""
from __future__ import absolute_import
import json
import re
from gslib.cloud_api import ArgumentException
from gslib.util import AddQueryParamToUrl
def ValidateDstObjectMetadata(dst_obj_metadata):
"""Ensures dst_obj_metadata supplies the needed fields for copy and insert.
Args:
dst_obj_metadata: Metadata to validate.
Raises:
ArgumentException if metadata is invalid.
"""
if not dst_obj_metadata:
raise ArgumentException(
'No object metadata supplied for destination object.')
if not dst_obj_metadata.name:
raise ArgumentException(
'Object metadata supplied for destination object had no object name.')
if not dst_obj_metadata.bucket:
raise ArgumentException(
'Object metadata supplied for destination object had no bucket name.')
def GetDownloadSerializationData(
src_obj_metadata, progress=0, user_project=None):
"""Returns download serialization data.
There are five entries:
auto_transfer: JSON-specific field, always False.
progress: How much of the download has already been completed.
total_size: Total object size.
url: Implementation-specific field used for saving a metadata get call.
For JSON, this the download URL of the object.
For XML, this is a pickled boto key.
user_project: Project to be billed to, added as query param.
Args:
src_obj_metadata: Object to be downloaded.
progress: See above.
user_project: User project to add to query string.
Returns:
Serialization data for use with Cloud API GetObjectMedia.
"""
url = src_obj_metadata.mediaLink
if user_project:
url = AddQueryParamToUrl(url, 'userProject', user_project)
serialization_dict = {
'auto_transfer': 'False',
'progress': progress,
'total_size': src_obj_metadata.size,
'url': url
}
return json.dumps(serialization_dict)
def ListToGetFields(list_fields=None):
"""Removes 'items/' from the input fields and converts it to a set.
Args:
list_fields: Iterable fields usable in ListBuckets/ListObjects calls.
Returns:
Set of fields usable in GetBucket/GetObjectMetadata calls (None implies
all fields should be returned).
"""
if list_fields:
get_fields = set()
for field in list_fields:
if field in ('kind', 'nextPageToken', 'prefixes'):
# These are not actually object / bucket metadata fields.
# They are fields specific to listing, so we don't consider them.
continue
get_fields.add(re.sub(r'items/', '', field))
return get_fields
| fishjord/gsutil | gslib/cloud_api_helper.py | Python | apache-2.0 | 3,237 |
# Generated by Django 2.0.3 on 2018-03-21 09:17
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("page", "0001_initial")]
operations = [
migrations.AddField(
model_name="page",
name="seo_description",
field=models.CharField(
blank=True,
max_length=300,
null=True,
validators=[django.core.validators.MaxLengthValidator(300)],
),
),
migrations.AddField(
model_name="page",
name="seo_title",
field=models.CharField(
blank=True,
max_length=70,
null=True,
validators=[django.core.validators.MaxLengthValidator(70)],
),
),
]
| maferelo/saleor | saleor/page/migrations/0002_auto_20180321_0417.py | Python | bsd-3-clause | 870 |
#!/usr/bin/env python
##############################################################################
# Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
# Unittest for
# yardstick.benchmark.scenarios.networking.netperf_node.NetperfNode
import mock
import unittest
import os
import json
from yardstick.benchmark.scenarios.networking import netperf_node
@mock.patch('yardstick.benchmark.scenarios.networking.netperf_node.ssh')
class NetperfNodeTestCase(unittest.TestCase):
def setUp(self):
self.ctx = {
'host': {
'ip': '192.168.10.10',
'user': 'root',
'password': 'root'
},
'target': {
'ip': '192.168.10.11',
'user': 'root',
'password': 'root'
}
}
def test_netperf_node_successful_setup(self, mock_ssh):
p = netperf_node.NetperfNode({}, self.ctx)
mock_ssh.SSH().execute.return_value = (0, '', '')
p.setup()
self.assertIsNotNone(p.server)
self.assertIsNotNone(p.client)
self.assertEqual(p.setup_done, True)
def test_netperf_node_successful_no_sla(self, mock_ssh):
options = {}
args = {'options': options}
result = {}
p = netperf_node.NetperfNode(args, self.ctx)
mock_ssh.SSH().execute.return_value = (0, '', '')
p.host = mock_ssh.SSH()
sample_output = self._read_sample_output()
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
expected_result = json.loads(sample_output)
p.run(result)
self.assertEqual(result, expected_result)
def test_netperf_node_successful_sla(self, mock_ssh):
options = {}
args = {
'options': options,
'sla': {'mean_latency': 100}
}
result = {}
p = netperf_node.NetperfNode(args, self.ctx)
mock_ssh.SSH().execute.return_value = (0, '', '')
p.host = mock_ssh.SSH()
sample_output = self._read_sample_output()
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
expected_result = json.loads(sample_output)
p.run(result)
self.assertEqual(result, expected_result)
def test_netperf_node_unsuccessful_sla(self, mock_ssh):
options = {}
args = {
'options': options,
'sla': {'mean_latency': 5}
}
result = {}
p = netperf_node.NetperfNode(args, self.ctx)
mock_ssh.SSH().execute.return_value = (0, '', '')
p.host = mock_ssh.SSH()
sample_output = self._read_sample_output()
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
self.assertRaises(AssertionError, p.run, result)
def test_netperf_node_unsuccessful_script_error(self, mock_ssh):
options = {}
args = {'options': options}
result = {}
p = netperf_node.NetperfNode(args, self.ctx)
mock_ssh.SSH().execute.return_value = (0, '', '')
p.host = mock_ssh.SSH()
mock_ssh.SSH().execute.return_value = (1, '', 'FOOBAR')
self.assertRaises(RuntimeError, p.run, result)
def _read_sample_output(self):
curr_path = os.path.dirname(os.path.abspath(__file__))
output = os.path.join(curr_path, 'netperf_sample_output.json')
with open(output) as f:
sample_output = f.read()
return sample_output
def main():
unittest.main()
if __name__ == '__main__':
main()
| dtudares/hello-world | yardstick/tests/unit/benchmark/scenarios/networking/test_netperf_node.py | Python | apache-2.0 | 3,858 |
from numpy import pi
class Circunferencia(object):
def __init__(self, raio):
self.raio = raio
def area(self):
return pi*self.raio**2
def perimetro(self):
return 2*self.raio*pi
def main():
a = Circunferencia(3)
b = Circunferencia(10)
print(a.perimetro())
print(b.perimetro())
if __name__ == "__main__":
main()
| moisesoliv/ProgAvanc | classe.py | Python | mit | 372 |
# Copyright (c) 2014 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
This script will migrate the database of an openvswitch, linuxbridge or
Hyper-V plugin so that it can be used with the ml2 plugin.
Known Limitations:
- THIS SCRIPT IS DESTRUCTIVE! Make sure to backup your
Neutron database before running this script, in case anything goes
wrong.
- It will be necessary to upgrade the database to the target release
via neutron-db-manage before attempting to migrate to ml2.
Initially, only the icehouse release is supported.
- This script does not automate configuration migration.
Example usage:
python -m neutron.db.migration.migrate_to_ml2 openvswitch \
mysql://login:[email protected]/neutron
Note that migration of tunneling state will only be attempted if the
--tunnel-type parameter is provided.
To manually test migration from ovs to ml2 with devstack:
- stack with Q_PLUGIN=openvswitch
- boot an instance and validate connectivity
- stop the neutron service and all agents
- run the neutron-migrate-to-ml2 script
- update /etc/neutron/neutron.conf as follows:
core_plugin = neutron.plugins.ml2.plugin.Ml2Plugin
- Create /etc/neutron/plugins/ml2/ml2_conf.ini and ensure that:
- ml2.mechanism_drivers includes 'openvswitch'
- ovs.local_ip is set correctly
- database.connection is set correctly
- Start the neutron service with the ml2 config file created in
the previous step in place of the openvswitch config file
- Start all the agents
- verify that the booted instance still has connectivity
- boot a second instance and validate connectivity
"""
import argparse
from oslo_db.sqlalchemy import session
import sqlalchemy as sa
from neutron.extensions import portbindings
from neutron.openstack.common import uuidutils
from neutron.plugins.common import constants as p_const
from neutron.plugins.ml2.drivers import type_vxlan
# Migration targets
LINUXBRIDGE = 'linuxbridge'
OPENVSWITCH = 'openvswitch'
HYPERV = 'hyperv'
# Releases
ICEHOUSE = 'icehouse'
JUNO = 'juno'
SUPPORTED_SCHEMA_VERSIONS = [ICEHOUSE, JUNO]
def check_db_schema_version(engine, metadata):
"""Check that current version of the db schema is supported."""
version_table = sa.Table(
'alembic_version', metadata, autoload=True, autoload_with=engine)
versions = [v[0] for v in engine.execute(version_table.select())]
if not versions:
raise ValueError(_("Missing version in alembic_versions table"))
elif len(versions) > 1:
raise ValueError(_("Multiple versions in alembic_versions table: %s")
% versions)
current_version = versions[0]
if current_version not in SUPPORTED_SCHEMA_VERSIONS:
raise SystemError(_("Unsupported database schema %(current)s. "
"Please migrate your database to one of following "
"versions: %(supported)s")
% {'current': current_version,
'supported': ', '.join(SUPPORTED_SCHEMA_VERSIONS)}
)
# Duplicated from neutron.plugins.linuxbridge.common.constants to
# avoid having any dependency on the linuxbridge plugin being
# installed.
def interpret_vlan_id(vlan_id):
"""Return (network_type, segmentation_id) tuple for encoded vlan_id."""
FLAT_VLAN_ID = -1
LOCAL_VLAN_ID = -2
if vlan_id == LOCAL_VLAN_ID:
return (p_const.TYPE_LOCAL, None)
elif vlan_id == FLAT_VLAN_ID:
return (p_const.TYPE_FLAT, None)
else:
return (p_const.TYPE_VLAN, vlan_id)
class BaseMigrateToMl2(object):
def __init__(self, vif_type, driver_type, segment_table_name,
vlan_allocation_table_name, old_tables):
self.vif_type = vif_type
self.driver_type = driver_type
self.segment_table_name = segment_table_name
self.vlan_allocation_table_name = vlan_allocation_table_name
self.old_tables = old_tables
def __call__(self, connection_url, save_tables=False, tunnel_type=None,
vxlan_udp_port=None):
engine = session.create_engine(connection_url)
metadata = sa.MetaData()
check_db_schema_version(engine, metadata)
if hasattr(self, 'define_ml2_tables'):
self.define_ml2_tables(metadata)
# Autoload the ports table to ensure that foreign keys to it and
# the network table can be created for the new tables.
sa.Table('ports', metadata, autoload=True, autoload_with=engine)
metadata.create_all(engine)
self.migrate_network_segments(engine, metadata)
if tunnel_type:
self.migrate_tunnels(engine, tunnel_type, vxlan_udp_port)
self.migrate_vlan_allocations(engine)
self.migrate_port_bindings(engine, metadata)
if hasattr(self, 'drop_old_tables'):
self.drop_old_tables(engine, save_tables)
def migrate_segment_dict(self, binding):
binding['id'] = uuidutils.generate_uuid()
def migrate_network_segments(self, engine, metadata):
# Migrating network segments requires loading the data to python
# so that a uuid can be generated for each segment.
source_table = sa.Table(self.segment_table_name, metadata,
autoload=True, autoload_with=engine)
source_segments = engine.execute(source_table.select())
ml2_segments = [dict(x) for x in source_segments]
for segment in ml2_segments:
self.migrate_segment_dict(segment)
if ml2_segments:
ml2_network_segments = metadata.tables['ml2_network_segments']
engine.execute(ml2_network_segments.insert(), ml2_segments)
def migrate_tunnels(self, engine, tunnel_type, vxlan_udp_port=None):
"""Override this method to perform plugin-specific tunnel migration."""
pass
def migrate_vlan_allocations(self, engine):
engine.execute(("""
INSERT INTO ml2_vlan_allocations
SELECT physical_network, vlan_id, allocated
FROM %(source_table)s
WHERE allocated = TRUE
""") % {'source_table': self.vlan_allocation_table_name})
def get_port_segment_map(self, engine):
"""Retrieve a mapping of port id to segment id.
The monolithic plugins only support a single segment per
network, so the segment id can be uniquely identified by
the network associated with a given port.
"""
port_segments = engine.execute("""
SELECT ports_network.port_id, ml2_network_segments.id AS segment_id
FROM ml2_network_segments, (
SELECT portbindingports.port_id, ports.network_id
FROM portbindingports, ports
WHERE portbindingports.port_id = ports.id
) AS ports_network
WHERE ml2_network_segments.network_id = ports_network.network_id
""")
return dict(x for x in port_segments)
def migrate_port_bindings(self, engine, metadata):
port_segment_map = self.get_port_segment_map(engine)
port_binding_ports = sa.Table('portbindingports', metadata,
autoload=True, autoload_with=engine)
source_bindings = engine.execute(port_binding_ports.select())
ml2_bindings = [dict(x) for x in source_bindings]
for binding in ml2_bindings:
binding['vif_type'] = self.vif_type
binding['driver'] = self.driver_type
segment = port_segment_map.get(binding['port_id'])
if segment:
binding['segment'] = segment
if ml2_bindings:
ml2_port_bindings = metadata.tables['ml2_port_bindings']
engine.execute(ml2_port_bindings.insert(), ml2_bindings)
class BaseMigrateToMl2_IcehouseMixin(object):
"""A mixin to ensure ml2 database schema state for Icehouse.
This classes the missing tables for Icehouse schema revisions. In Juno,
the schema state has been healed, so we do not need to run these.
"""
def drop_old_tables(self, engine, save_tables=False):
if save_tables:
return
old_tables = self.old_tables + [self.vlan_allocation_table_name,
self.segment_table_name]
for table_name in old_tables:
engine.execute('DROP TABLE %s' % table_name)
def define_ml2_tables(self, metadata):
sa.Table(
'arista_provisioned_nets', metadata,
sa.Column('tenant_id', sa.String(length=255), nullable=True),
sa.Column('id', sa.String(length=36), nullable=False),
sa.Column('network_id', sa.String(length=36), nullable=True),
sa.Column('segmentation_id', sa.Integer(),
autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id'),
)
sa.Table(
'arista_provisioned_vms', metadata,
sa.Column('tenant_id', sa.String(length=255), nullable=True),
sa.Column('id', sa.String(length=36), nullable=False),
sa.Column('vm_id', sa.String(length=255), nullable=True),
sa.Column('host_id', sa.String(length=255), nullable=True),
sa.Column('port_id', sa.String(length=36), nullable=True),
sa.Column('network_id', sa.String(length=36), nullable=True),
sa.PrimaryKeyConstraint('id'),
)
sa.Table(
'arista_provisioned_tenants', metadata,
sa.Column('tenant_id', sa.String(length=255), nullable=True),
sa.Column('id', sa.String(length=36), nullable=False),
sa.PrimaryKeyConstraint('id'),
)
sa.Table(
'cisco_ml2_nexusport_bindings', metadata,
sa.Column('binding_id', sa.Integer(), nullable=False),
sa.Column('port_id', sa.String(length=255), nullable=True),
sa.Column('vlan_id', sa.Integer(), autoincrement=False,
nullable=False),
sa.Column('switch_ip', sa.String(length=255), nullable=True),
sa.Column('instance_id', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('binding_id'),
)
sa.Table(
'cisco_ml2_credentials', metadata,
sa.Column('credential_id', sa.String(length=255), nullable=True),
sa.Column('tenant_id', sa.String(length=255), nullable=False),
sa.Column('credential_name', sa.String(length=255),
nullable=False),
sa.Column('user_name', sa.String(length=255), nullable=True),
sa.Column('password', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('tenant_id', 'credential_name'),
)
sa.Table(
'ml2_flat_allocations', metadata,
sa.Column('physical_network', sa.String(length=64),
nullable=False),
sa.PrimaryKeyConstraint('physical_network'),
)
sa.Table(
'ml2_gre_allocations', metadata,
sa.Column('gre_id', sa.Integer, nullable=False,
autoincrement=False),
sa.Column('allocated', sa.Boolean, nullable=False),
sa.PrimaryKeyConstraint('gre_id'),
)
sa.Table(
'ml2_gre_endpoints', metadata,
sa.Column('ip_address', sa.String(length=64)),
sa.PrimaryKeyConstraint('ip_address'),
)
sa.Table(
'ml2_network_segments', metadata,
sa.Column('id', sa.String(length=36), nullable=False),
sa.Column('network_id', sa.String(length=36), nullable=False),
sa.Column('network_type', sa.String(length=32), nullable=False),
sa.Column('physical_network', sa.String(length=64), nullable=True),
sa.Column('segmentation_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['network_id'], ['networks.id'],
ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
)
sa.Table(
'ml2_port_bindings', metadata,
sa.Column('port_id', sa.String(length=36), nullable=False),
sa.Column('host', sa.String(length=255), nullable=False),
sa.Column('vif_type', sa.String(length=64), nullable=False),
sa.Column('driver', sa.String(length=64), nullable=True),
sa.Column('segment', sa.String(length=36), nullable=True),
sa.Column('vnic_type', sa.String(length=64), nullable=False,
server_default='normal'),
sa.Column('vif_details', sa.String(4095), nullable=False,
server_default=''),
sa.Column('profile', sa.String(4095), nullable=False,
server_default=''),
sa.ForeignKeyConstraint(['port_id'], ['ports.id'],
ondelete='CASCADE'),
sa.ForeignKeyConstraint(['segment'], ['ml2_network_segments.id'],
ondelete='SET NULL'),
sa.PrimaryKeyConstraint('port_id'),
)
sa.Table(
'ml2_vlan_allocations', metadata,
sa.Column('physical_network', sa.String(length=64),
nullable=False),
sa.Column('vlan_id', sa.Integer(), autoincrement=False,
nullable=False),
sa.Column('allocated', sa.Boolean(), autoincrement=False,
nullable=False),
sa.PrimaryKeyConstraint('physical_network', 'vlan_id'),
)
sa.Table(
'ml2_vxlan_allocations', metadata,
sa.Column('vxlan_vni', sa.Integer, nullable=False,
autoincrement=False),
sa.Column('allocated', sa.Boolean, nullable=False),
sa.PrimaryKeyConstraint('vxlan_vni'),
)
sa.Table(
'ml2_vxlan_endpoints', metadata,
sa.Column('ip_address', sa.String(length=64)),
sa.Column('udp_port', sa.Integer(), nullable=False,
autoincrement=False),
sa.PrimaryKeyConstraint('ip_address', 'udp_port'),
)
class MigrateLinuxBridgeToMl2_Juno(BaseMigrateToMl2):
def __init__(self):
super(MigrateLinuxBridgeToMl2_Juno, self).__init__(
vif_type=portbindings.VIF_TYPE_BRIDGE,
driver_type=LINUXBRIDGE,
segment_table_name='network_bindings',
vlan_allocation_table_name='network_states',
old_tables=['portbindingports'])
def migrate_segment_dict(self, binding):
super(MigrateLinuxBridgeToMl2_Juno, self).migrate_segment_dict(
binding)
vlan_id = binding.pop('vlan_id')
network_type, segmentation_id = interpret_vlan_id(vlan_id)
binding['network_type'] = network_type
binding['segmentation_id'] = segmentation_id
class MigrateHyperVPluginToMl2_Juno(BaseMigrateToMl2):
def __init__(self):
super(MigrateHyperVPluginToMl2_Juno, self).__init__(
vif_type=portbindings.VIF_TYPE_HYPERV,
driver_type=HYPERV,
segment_table_name='hyperv_network_bindings',
vlan_allocation_table_name='hyperv_vlan_allocations',
old_tables=['portbindingports'])
def migrate_segment_dict(self, binding):
super(MigrateHyperVPluginToMl2_Juno, self).migrate_segment_dict(
binding)
# the 'hyperv_network_bindings' table has the column
# 'segmentation_id' instead of 'vlan_id'.
vlan_id = binding.pop('segmentation_id')
network_type, segmentation_id = interpret_vlan_id(vlan_id)
binding['network_type'] = network_type
binding['segmentation_id'] = segmentation_id
class MigrateOpenvswitchToMl2_Juno(BaseMigrateToMl2):
def __init__(self):
super(MigrateOpenvswitchToMl2_Juno, self).__init__(
vif_type=portbindings.VIF_TYPE_OVS,
driver_type=OPENVSWITCH,
segment_table_name='ovs_network_bindings',
vlan_allocation_table_name='ovs_vlan_allocations',
old_tables=[
'ovs_tunnel_allocations',
'ovs_tunnel_endpoints',
'portbindingports',
])
def migrate_tunnels(self, engine, tunnel_type, vxlan_udp_port=None):
if tunnel_type == p_const.TYPE_GRE:
engine.execute("""
INSERT INTO ml2_gre_allocations
SELECT tunnel_id as gre_id, allocated
FROM ovs_tunnel_allocations
WHERE allocated = TRUE
""")
engine.execute("""
INSERT INTO ml2_gre_endpoints
SELECT ip_address
FROM ovs_tunnel_endpoints
""")
elif tunnel_type == p_const.TYPE_VXLAN:
if not vxlan_udp_port:
vxlan_udp_port = type_vxlan.VXLAN_UDP_PORT
engine.execute("""
INSERT INTO ml2_vxlan_allocations
SELECT tunnel_id as vxlan_vni, allocated
FROM ovs_tunnel_allocations
WHERE allocated = TRUE
""")
engine.execute(sa.text("""
INSERT INTO ml2_vxlan_endpoints
SELECT ip_address, :udp_port as udp_port
FROM ovs_tunnel_endpoints
"""), udp_port=vxlan_udp_port)
else:
raise ValueError(_('Unknown tunnel type: %s') % tunnel_type)
class MigrateLinuxBridgeToMl2_Icehouse(MigrateLinuxBridgeToMl2_Juno,
BaseMigrateToMl2_IcehouseMixin):
pass
class MigrateOpenvswitchToMl2_Icehouse(MigrateOpenvswitchToMl2_Juno,
BaseMigrateToMl2_IcehouseMixin):
pass
class MigrateHyperVPluginToMl2_Icehouse(MigrateHyperVPluginToMl2_Juno,
BaseMigrateToMl2_IcehouseMixin):
pass
migrate_map = {
ICEHOUSE: {
OPENVSWITCH: MigrateOpenvswitchToMl2_Icehouse,
LINUXBRIDGE: MigrateLinuxBridgeToMl2_Icehouse,
HYPERV: MigrateHyperVPluginToMl2_Icehouse,
},
JUNO: {
OPENVSWITCH: MigrateOpenvswitchToMl2_Juno,
LINUXBRIDGE: MigrateLinuxBridgeToMl2_Juno,
HYPERV: MigrateHyperVPluginToMl2_Juno,
},
}
def main():
parser = argparse.ArgumentParser()
parser.add_argument('plugin', choices=[OPENVSWITCH, LINUXBRIDGE, HYPERV],
help=_('The plugin type whose database will be '
'migrated'))
parser.add_argument('connection',
help=_('The connection url for the target db'))
parser.add_argument('--tunnel-type', choices=[p_const.TYPE_GRE,
p_const.TYPE_VXLAN],
help=_('The %s tunnel type to migrate from') %
OPENVSWITCH)
parser.add_argument('--vxlan-udp-port', default=None, type=int,
help=_('The UDP port to use for VXLAN tunnels.'))
parser.add_argument('--release', default=JUNO, choices=[ICEHOUSE, JUNO])
parser.add_argument('--save-tables', default=False, action='store_true',
help=_("Retain the old plugin's tables"))
#TODO(marun) Provide a verbose option
args = parser.parse_args()
if args.plugin in [LINUXBRIDGE, HYPERV] and (args.tunnel_type or
args.vxlan_udp_port):
msg = _('Tunnel args (tunnel-type and vxlan-udp-port) are not valid '
'for the %s plugin')
parser.error(msg % args.plugin)
try:
migrate_func = migrate_map[args.release][args.plugin]()
except KeyError:
msg = _('Support for migrating %(plugin)s for release '
'%(release)s is not yet implemented')
parser.error(msg % {'plugin': args.plugin, 'release': args.release})
else:
migrate_func(args.connection, args.save_tables, args.tunnel_type,
args.vxlan_udp_port)
if __name__ == '__main__':
main()
| rdo-management/neutron | neutron/db/migration/migrate_to_ml2.py | Python | apache-2.0 | 20,776 |
""" Testing array writer objects
Array writers have init signature::
def __init__(self, array, out_dtype=None, order='F')
and methods
* to_fileobj(fileobj, offset=None)
They do have attributes:
* array
* out_dtype
* order
They may have attributes:
* slope
* inter
They are designed to write arrays to a fileobj with reasonable memory
efficiency.
Subclasses of array writers may be able to scale the array or apply an
intercept, or do something else to make sense of conversions between float and
int, or between larger ints and smaller.
"""
from platform import python_compiler, machine
import numpy as np
from ..py3k import BytesIO
from ..arraywriters import (SlopeInterArrayWriter, SlopeArrayWriter,
WriterError, ScalingError, ArrayWriter,
make_array_writer, get_slope_inter)
from ..casting import int_abs, type_info
from ..volumeutils import array_from_file, apply_read_scaling
from numpy.testing import (assert_array_almost_equal,
assert_array_equal)
from nose.tools import (assert_true, assert_false,
assert_equal, assert_not_equal,
assert_raises)
FLOAT_TYPES = np.sctypes['float']
COMPLEX_TYPES = np.sctypes['complex']
INT_TYPES = np.sctypes['int']
UINT_TYPES = np.sctypes['uint']
CFLOAT_TYPES = FLOAT_TYPES + COMPLEX_TYPES
IUINT_TYPES = INT_TYPES + UINT_TYPES
NUMERIC_TYPES = CFLOAT_TYPES + IUINT_TYPES
def round_trip(writer, order='F', nan2zero=True, apply_scale=True):
sio = BytesIO()
arr = writer.array
writer.to_fileobj(sio, order, nan2zero=nan2zero)
data_back = array_from_file(arr.shape, writer.out_dtype, sio, order=order)
slope, inter = get_slope_inter(writer)
if apply_scale:
data_back = apply_read_scaling(data_back, slope, inter)
return data_back
def test_arraywriters():
# Test initialize
# Simple cases
if machine() == 'sparc64' and python_compiler().startswith('GCC'):
# bus errors on at least np 1.4.1 through 1.6.1 for complex
test_types = FLOAT_TYPES + IUINT_TYPES
else:
test_types = NUMERIC_TYPES
for klass in (SlopeInterArrayWriter, SlopeArrayWriter, ArrayWriter):
for type in test_types:
arr = np.arange(10, dtype=type)
aw = klass(arr)
assert_true(aw.array is arr)
assert_equal(aw.out_dtype, arr.dtype)
assert_array_equal(arr, round_trip(aw))
# Byteswapped is OK
bs_arr = arr.byteswap().newbyteorder('S')
bs_aw = klass(bs_arr)
# assert against original array because POWER7 was running into
# trouble using the byteswapped array (bs_arr)
assert_array_equal(arr, round_trip(bs_aw))
bs_aw2 = klass(bs_arr, arr.dtype)
assert_array_equal(arr, round_trip(bs_aw2))
# 2D array
arr2 = np.reshape(arr, (2, 5))
a2w = klass(arr2)
# Default out - in order is Fortran
arr_back = round_trip(a2w)
assert_array_equal(arr2, arr_back)
arr_back = round_trip(a2w, 'F')
assert_array_equal(arr2, arr_back)
# C order works as well
arr_back = round_trip(a2w, 'C')
assert_array_equal(arr2, arr_back)
assert_true(arr_back.flags.c_contiguous)
def test_scaling_needed():
# Structured types return True if dtypes same, raise error otherwise
dt_def = [('f', 'i4')]
arr = np.ones(10, dt_def)
for t in NUMERIC_TYPES:
assert_raises(WriterError, ArrayWriter, arr, t)
narr = np.ones(10, t)
assert_raises(WriterError, ArrayWriter, narr, dt_def)
assert_false(ArrayWriter(arr).scaling_needed())
assert_false(ArrayWriter(arr, dt_def).scaling_needed())
# Any numeric type that can cast, needs no scaling
for in_t in NUMERIC_TYPES:
for out_t in NUMERIC_TYPES:
if np.can_cast(in_t, out_t):
aw = ArrayWriter(np.ones(10, in_t), out_t)
assert_false(aw.scaling_needed())
for in_t in NUMERIC_TYPES:
# Numeric types to complex never need scaling
arr = np.ones(10, in_t)
for out_t in COMPLEX_TYPES:
assert_false(ArrayWriter(arr, out_t).scaling_needed())
# Attempts to scale from complex to anything else fails
for in_t in COMPLEX_TYPES:
for out_t in FLOAT_TYPES + IUINT_TYPES:
arr = np.ones(10, in_t)
assert_raises(WriterError, ArrayWriter, arr, out_t)
# Scaling from anything but complex to floats is OK
for in_t in FLOAT_TYPES + IUINT_TYPES:
arr = np.ones(10, in_t)
for out_t in FLOAT_TYPES:
assert_false(ArrayWriter(arr, out_t).scaling_needed())
# For any other output type, arrays with no data don't need scaling
for in_t in FLOAT_TYPES + IUINT_TYPES:
arr_0 = np.zeros(10, in_t)
arr_e = []
for out_t in IUINT_TYPES:
assert_false(ArrayWriter(arr_0, out_t).scaling_needed())
assert_false(ArrayWriter(arr_e, out_t).scaling_needed())
# Going to (u)ints, non-finite arrays don't need scaling
for in_t in FLOAT_TYPES:
arr_nan = np.zeros(10, in_t) + np.nan
arr_inf = np.zeros(10, in_t) + np.inf
arr_minf = np.zeros(10, in_t) - np.inf
arr_mix = np.array([np.nan, np.inf, -np.inf], dtype=in_t)
for out_t in IUINT_TYPES:
for arr in (arr_nan, arr_inf, arr_minf, arr_mix):
assert_false(ArrayWriter(arr, out_t).scaling_needed())
# Floats as input always need scaling
for in_t in FLOAT_TYPES:
arr = np.ones(10, in_t)
for out_t in IUINT_TYPES:
# We need an arraywriter that will tolerate construction when
# scaling is needed
assert_true(SlopeArrayWriter(arr, out_t).scaling_needed())
# in-range (u)ints don't need scaling
for in_t in IUINT_TYPES:
in_info = np.iinfo(in_t)
in_min, in_max = in_info.min, in_info.max
for out_t in IUINT_TYPES:
out_info = np.iinfo(out_t)
out_min, out_max = out_info.min, out_info.max
if in_min >= out_min and in_max <= out_max:
arr = np.array([in_min, in_max], in_t)
assert_true(np.can_cast(arr.dtype, out_t))
# We've already tested this with can_cast above, but...
assert_false(ArrayWriter(arr, out_t).scaling_needed())
continue
# The output data type does not include the input data range
max_min = max(in_min, out_min) # 0 for input or output uint
min_max = min(in_max, out_max)
arr = np.array([max_min, min_max], in_t)
assert_false(ArrayWriter(arr, out_t).scaling_needed())
assert_true(SlopeInterArrayWriter(arr + 1, out_t).scaling_needed())
if in_t in INT_TYPES:
assert_true(SlopeInterArrayWriter(arr - 1, out_t).scaling_needed())
def test_special_rt():
# Test that zeros; none finite - round trip to zeros
for arr in (np.array([np.inf, np.nan, -np.inf]),
np.zeros((3,))):
for in_dtt in FLOAT_TYPES:
for out_dtt in IUINT_TYPES:
for klass in (ArrayWriter, SlopeArrayWriter,
SlopeInterArrayWriter):
aw = klass(arr.astype(in_dtt), out_dtt)
assert_equal(get_slope_inter(aw), (1, 0))
assert_array_equal(round_trip(aw), 0)
def test_high_int2uint():
# Need to take care of high values when testing whether values are already
# in range. There was a bug here were the comparison was in floating point,
# and therefore not exact, and 2**63 appeared to be in range for np.int64
arr = np.array([2**63], dtype=np.uint64)
out_type = np.int64
aw = SlopeInterArrayWriter(arr, out_type)
assert_equal(aw.inter, 2**63)
def test_slope_inter_castable():
# Test scaling for arraywriter instances
# Test special case of all zeros
for in_dtt in FLOAT_TYPES + IUINT_TYPES:
for out_dtt in NUMERIC_TYPES:
for klass in (ArrayWriter, SlopeArrayWriter, SlopeInterArrayWriter):
arr = np.zeros((5,), dtype=in_dtt)
aw = klass(arr, out_dtt) # no error
# Test special case of none finite
arr = np.array([np.inf, np.nan, -np.inf])
for in_dtt in FLOAT_TYPES:
for out_dtt in FLOAT_TYPES + IUINT_TYPES:
for klass in (ArrayWriter, SlopeArrayWriter, SlopeInterArrayWriter):
aw = klass(arr.astype(in_dtt), out_dtt) # no error
for in_dtt, out_dtt, arr, slope_only, slope_inter, neither in (
(np.float32, np.float32, 1, True, True, True),
(np.float64, np.float32, 1, True, True, True),
(np.float32, np.complex128, 1, True, True, True),
(np.uint32, np.complex128, 1, True, True, True),
(np.int64, np.float32, 1, True, True, True),
(np.float32, np.int16, 1, True, True, False),
(np.complex128, np.float32, 1, False, False, False),
(np.complex128, np.int16, 1, False, False, False),
(np.uint8, np.int16, 1, True, True, True),
# The following tests depend on the input data
(np.uint16, np.int16, 1, True, True, True), # 1 is in range
(np.uint16, np.int16, 2**16-1, True, True, False), # This not in range
(np.uint16, np.int16, (0, 2**16-1), True, True, False),
(np.uint16, np.uint8, 1, True, True, True),
(np.int16, np.uint16, 1, True, True, True), # in range
(np.int16, np.uint16, -1, True, True, False), # flip works for scaling
(np.int16, np.uint16, (-1, 1), False, True, False), # not with +-
(np.int8, np.uint16, 1, True, True, True), # in range
(np.int8, np.uint16, -1, True, True, False), # flip works for scaling
(np.int8, np.uint16, (-1, 1), False, True, False), # not with +-
):
# data for casting
data = np.array(arr, dtype=in_dtt)
# With scaling but no intercept
if slope_only:
aw = SlopeArrayWriter(data, out_dtt)
else:
assert_raises(WriterError, SlopeArrayWriter, data, out_dtt)
# With scaling and intercept
if slope_inter:
aw = SlopeInterArrayWriter(data, out_dtt)
else:
assert_raises(WriterError, SlopeInterArrayWriter, data, out_dtt)
# With neither
if neither:
aw = ArrayWriter(data, out_dtt)
else:
assert_raises(WriterError, ArrayWriter, data, out_dtt)
def test_calculate_scale():
# Test for special cases in scale calculation
npa = np.array
SIAW = SlopeInterArrayWriter
SAW = SlopeArrayWriter
# Offset handles scaling when it can
aw = SIAW(npa([-2, -1], dtype=np.int8), np.uint8)
assert_equal(get_slope_inter(aw), (1.0, -2.0))
# Sign flip handles these cases
aw = SAW(npa([-2, -1], dtype=np.int8), np.uint8)
assert_equal(get_slope_inter(aw), (-1.0, 0.0))
aw = SAW(npa([-2, 0], dtype=np.int8), np.uint8)
assert_equal(get_slope_inter(aw), (-1.0, 0.0))
# But not when min magnitude is too large (scaling mechanism kicks in)
aw = SAW(npa([-510, 0], dtype=np.int16), np.uint8)
assert_equal(get_slope_inter(aw), (-2.0, 0.0))
# Or for floats (attempts to expand across range)
aw = SAW(npa([-2, 0], dtype=np.float32), np.uint8)
assert_not_equal(get_slope_inter(aw), (-1.0, 0.0))
# Case where offset handles scaling
aw = SIAW(npa([-1, 1], dtype=np.int8), np.uint8)
assert_equal(get_slope_inter(aw), (1.0, -1.0))
# Can't work for no offset case
assert_raises(WriterError, SAW, npa([-1, 1], dtype=np.int8), np.uint8)
# Offset trick can't work when max is out of range
aw = SIAW(npa([-1, 255], dtype=np.int16), np.uint8)
slope_inter = get_slope_inter(aw)
assert_not_equal(slope_inter, (1.0, -1.0))
def test_resets():
# Test reset of values, caching of scales
for klass, inp, outp in ((SlopeInterArrayWriter, (1, 511), (2.0, 1.0)),
(SlopeArrayWriter, (0, 510), (2.0, 0.0))):
arr = np.array(inp)
outp = np.array(outp)
aw = klass(arr, np.uint8)
assert_array_equal(get_slope_inter(aw), outp)
aw.calc_scale() # cached no change
assert_array_equal(get_slope_inter(aw), outp)
aw.calc_scale(force=True) # same data, no change
assert_array_equal(get_slope_inter(aw), outp)
# Change underlying array
aw.array[:] = aw.array * 2
aw.calc_scale() # cached still
assert_array_equal(get_slope_inter(aw), outp)
aw.calc_scale(force=True) # new data, change
assert_array_equal(get_slope_inter(aw), outp * 2)
# Test reset
aw.reset()
assert_array_equal(get_slope_inter(aw), (1.0, 0.0))
def test_no_offset_scale():
# Specific tests of no-offset scaling
SAW = SlopeArrayWriter
# Floating point
for data in ((-128, 127),
(-128, 126),
(-128, -127),
(-128, 0),
(-128, -1),
(126, 127),
(-127, 127)):
aw = SAW(np.array(data, dtype=np.float32), np.int8)
assert_equal(aw.slope, 1.0)
aw = SAW(np.array([-126, 127 * 2.0], dtype=np.float32), np.int8)
assert_equal(aw.slope, 2)
aw = SAW(np.array([-128 * 2.0, 127], dtype=np.float32), np.int8)
assert_equal(aw.slope, 2)
# Test that nasty abs behavior does not upset us
n = -2**15
aw = SAW(np.array([n, n], dtype=np.int16), np.uint8)
assert_array_almost_equal(aw.slope, n / 255.0, 5)
def test_with_offset_scale():
# Tests of specific cases in slope, inter
SIAW = SlopeInterArrayWriter
aw = SIAW(np.array([0, 127], dtype=np.int8), np.uint8)
assert_equal((aw.slope, aw.inter), (1, 0)) # in range
aw = SIAW(np.array([-1, 126], dtype=np.int8), np.uint8)
assert_equal((aw.slope, aw.inter), (1, -1)) # offset only
aw = SIAW(np.array([-1, 254], dtype=np.int16), np.uint8)
assert_equal((aw.slope, aw.inter), (1, -1)) # offset only
aw = SIAW(np.array([-1, 255], dtype=np.int16), np.uint8)
assert_not_equal((aw.slope, aw.inter), (1, -1)) # Too big for offset only
aw = SIAW(np.array([-256, -2], dtype=np.int16), np.uint8)
assert_equal((aw.slope, aw.inter), (1, -256)) # offset only
aw = SIAW(np.array([-256, -2], dtype=np.int16), np.int8)
assert_equal((aw.slope, aw.inter), (1, -129)) # offset only
def test_io_scaling():
# Test scaling works for max, min when going from larger to smaller type,
# and from float to integer.
bio = BytesIO()
for in_type, out_type, err in ((np.int16, np.int16, None),
(np.int16, np.int8, None),
(np.uint16, np.uint8, None),
(np.int32, np.int8, None),
(np.float32, np.uint8, None),
(np.float32, np.int16, None)):
out_dtype = np.dtype(out_type)
arr = np.zeros((3,), dtype=in_type)
info = type_info(in_type)
arr[0], arr[1] = info['min'], info['max']
aw = SlopeInterArrayWriter(arr, out_dtype, calc_scale=False)
if not err is None:
assert_raises(err, aw.calc_scale)
continue
aw.calc_scale()
aw.to_fileobj(bio)
bio.seek(0)
arr2 = array_from_file(arr.shape, out_dtype, bio)
arr3 = apply_read_scaling(arr2, aw.slope, aw.inter)
# Max rounding error for integer type
max_miss = aw.slope / 2.
assert_true(np.all(np.abs(arr - arr3) <= max_miss))
bio.truncate(0)
bio.seek(0)
def test_nan2zero():
# Test conditions under which nans written to zero
arr = np.array([np.nan, 99.], dtype=np.float32)
aw = SlopeInterArrayWriter(arr, np.float32)
data_back = round_trip(aw)
assert_array_equal(np.isnan(data_back), [True, False])
# nan2zero ignored for floats
data_back = round_trip(aw, nan2zero=True)
assert_array_equal(np.isnan(data_back), [True, False])
# Integer output with nan2zero gives zero
aw = SlopeInterArrayWriter(arr, np.int32)
data_back = round_trip(aw, nan2zero=True)
assert_array_equal(data_back, [0, 99])
# Integer output with nan2zero=False gives whatever astype gives
data_back = round_trip(aw, nan2zero=False)
astype_res = np.array(np.nan).astype(np.int32) * aw.slope + aw.inter
assert_array_equal(data_back, [astype_res, 99])
def test_byte_orders():
arr = np.arange(10, dtype=np.int32)
# Test endian read/write of types not requiring scaling
for tp in (np.uint64, np.float, np.complex):
dt = np.dtype(tp)
for code in '<>':
ndt = dt.newbyteorder(code)
for klass in (SlopeInterArrayWriter, SlopeArrayWriter,
ArrayWriter):
aw = klass(arr, ndt)
data_back = round_trip(aw)
assert_array_almost_equal(arr, data_back)
def test_writers_roundtrip():
ndt = np.dtype(np.float)
arr = np.arange(3, dtype=ndt)
# intercept
aw = SlopeInterArrayWriter(arr, ndt, calc_scale=False)
aw.inter = 1.0
data_back = round_trip(aw)
assert_array_equal(data_back, arr)
# scaling
aw.slope = 2.0
data_back = round_trip(aw)
assert_array_equal(data_back, arr)
# if there is no valid data, we get zeros
aw = SlopeInterArrayWriter(arr + np.nan, np.int32)
data_back = round_trip(aw)
assert_array_equal(data_back, np.zeros(arr.shape))
# infs generate ints at same value as max
arr[0] = np.inf
aw = SlopeInterArrayWriter(arr, np.int32)
data_back = round_trip(aw)
assert_array_almost_equal(data_back, [2, 1, 2])
def test_to_float():
start, stop = 0, 100
for in_type in NUMERIC_TYPES:
step = 1 if in_type in IUINT_TYPES else 0.5
info = type_info(in_type)
mn, mx = info['min'], info['max']
arr = np.arange(start, stop, step, dtype=in_type)
arr[0] = mn
arr[-1] = mx
for out_type in CFLOAT_TYPES:
out_info = type_info(out_type)
for klass in (SlopeInterArrayWriter, SlopeArrayWriter,
ArrayWriter):
if in_type in COMPLEX_TYPES and out_type in FLOAT_TYPES:
assert_raises(WriterError, klass, arr, out_type)
continue
aw = klass(arr, out_type)
assert_true(aw.array is arr)
assert_equal(aw.out_dtype, out_type)
arr_back = round_trip(aw)
assert_array_equal(arr.astype(out_type), arr_back)
# Check too-big values overflowed correctly
out_min, out_max = out_info['min'], out_info['max']
assert_true(np.all(arr_back[arr > out_max] == np.inf))
assert_true(np.all(arr_back[arr < out_min] == -np.inf))
def test_dumber_writers():
arr = np.arange(10, dtype=np.float64)
aw = SlopeArrayWriter(arr)
aw.slope = 2.0
assert_equal(aw.slope, 2.0)
assert_raises(AttributeError, getattr, aw, 'inter')
aw = ArrayWriter(arr)
assert_raises(AttributeError, getattr, aw, 'slope')
assert_raises(AttributeError, getattr, aw, 'inter')
# Attempt at scaling should raise error for dumb type
assert_raises(WriterError, ArrayWriter, arr, np.int16)
def test_writer_maker():
arr = np.arange(10, dtype=np.float64)
aw = make_array_writer(arr, np.float64)
assert_true(isinstance(aw, SlopeInterArrayWriter))
aw = make_array_writer(arr, np.float64, True, True)
assert_true(isinstance(aw, SlopeInterArrayWriter))
aw = make_array_writer(arr, np.float64, True, False)
assert_true(isinstance(aw, SlopeArrayWriter))
aw = make_array_writer(arr, np.float64, False, False)
assert_true(isinstance(aw, ArrayWriter))
assert_raises(ValueError, make_array_writer, arr, np.float64, False)
assert_raises(ValueError, make_array_writer, arr, np.float64, False, True)
# Does calc_scale get run by default?
aw = make_array_writer(arr, np.int16, calc_scale=False)
assert_equal((aw.slope, aw.inter), (1, 0))
aw.calc_scale()
slope, inter = aw.slope, aw.inter
assert_false((slope, inter) == (1, 0))
# Should run by default
aw = make_array_writer(arr, np.int16)
assert_equal((aw.slope, aw.inter), (slope, inter))
aw = make_array_writer(arr, np.int16, calc_scale=True)
assert_equal((aw.slope, aw.inter), (slope, inter))
def test_float_int_min_max():
# Conversion between float and int
for in_dt in FLOAT_TYPES:
finf = type_info(in_dt)
arr = np.array([finf['min'], finf['max']], dtype=in_dt)
# Bug in numpy 1.6.2 on PPC leading to infs - abort
if not np.all(np.isfinite(arr)):
print 'Hit PPC max -> inf bug; skip in_type %s' % in_dt
continue
for out_dt in IUINT_TYPES:
try:
aw = SlopeInterArrayWriter(arr, out_dt)
except ScalingError:
continue
arr_back_sc = round_trip(aw)
assert_true(np.allclose(arr, arr_back_sc))
def test_int_int_min_max():
# Conversion between (u)int and (u)int
eps = np.finfo(np.float64).eps
rtol = 1e-6
for in_dt in IUINT_TYPES:
iinf = np.iinfo(in_dt)
arr = np.array([iinf.min, iinf.max], dtype=in_dt)
for out_dt in IUINT_TYPES:
try:
aw = SlopeInterArrayWriter(arr, out_dt)
except ScalingError:
continue
arr_back_sc = round_trip(aw)
# integer allclose
adiff = int_abs(arr - arr_back_sc)
rdiff = adiff / (arr + eps)
assert_true(np.all(rdiff < rtol))
def test_int_int_slope():
# Conversion between (u)int and (u)int for slopes only
eps = np.finfo(np.float64).eps
rtol = 1e-7
for in_dt in IUINT_TYPES:
iinf = np.iinfo(in_dt)
for out_dt in IUINT_TYPES:
kinds = np.dtype(in_dt).kind + np.dtype(out_dt).kind
if kinds in ('ii', 'uu', 'ui'):
arrs = (np.array([iinf.min, iinf.max], dtype=in_dt),)
elif kinds == 'iu':
arrs = (np.array([iinf.min, 0], dtype=in_dt),
np.array([0, iinf.max], dtype=in_dt))
for arr in arrs:
try:
aw = SlopeArrayWriter(arr, out_dt)
except ScalingError:
continue
assert_false(aw.slope == 0)
arr_back_sc = round_trip(aw)
# integer allclose
adiff = int_abs(arr - arr_back_sc)
rdiff = adiff / (arr + eps)
assert_true(np.all(rdiff < rtol))
def test_float_int_spread():
# Test rounding error for spread of values
powers = np.arange(-10, 10, 0.5)
arr = np.concatenate((-10**powers, 10**powers))
for in_dt in (np.float32, np.float64):
arr_t = arr.astype(in_dt)
for out_dt in IUINT_TYPES:
aw = SlopeInterArrayWriter(arr_t, out_dt)
arr_back_sc = round_trip(aw)
# Get estimate for error
max_miss = rt_err_estimate(arr_t,
arr_back_sc.dtype,
aw.slope,
aw.inter)
# Simulate allclose test with large atol
diff = np.abs(arr_t - arr_back_sc)
rdiff = diff / np.abs(arr_t)
assert_true(np.all((diff <= max_miss) | (rdiff <= 1e-5)))
def rt_err_estimate(arr_t, out_dtype, slope, inter):
# Error attributable to rounding
max_int_miss = slope / 2.
# Estimate error attributable to floating point slope / inter;
# Remove inter / slope, put in a float type to simulate the type
# promotion for the multiplication, apply slope / inter
flt_there = (arr_t - inter) / slope
flt_back = flt_there.astype(out_dtype) * slope + inter
max_flt_miss = np.abs(arr_t - flt_back).max()
# Max error is sum of rounding and fp error
return max_int_miss + max_flt_miss
def test_rt_bias():
# Check for bias in round trip
rng = np.random.RandomState(20111214)
mu, std, count = 100, 10, 100
arr = rng.normal(mu, std, size=(count,))
eps = np.finfo(np.float32).eps
for in_dt in (np.float32, np.float64):
arr_t = arr.astype(in_dt)
for out_dt in IUINT_TYPES:
aw = SlopeInterArrayWriter(arr_t, out_dt)
arr_back_sc = round_trip(aw)
bias = np.mean(arr_t - arr_back_sc)
# Get estimate for error
max_miss = rt_err_estimate(arr_t,
arr_back_sc.dtype,
aw.slope,
aw.inter)
# Hokey use of max_miss as a std estimate
bias_thresh = np.max([max_miss / np.sqrt(count), eps])
assert_true(np.abs(bias) < bias_thresh)
| ME-ICA/me-ica | meica.libs/nibabel/tests/test_arraywriters.py | Python | lgpl-2.1 | 25,258 |
#from distutils.core import setup
from setuptools import setup
setup(name='F_UNCLE',
version='1.0',
description='Functional UNcertainty Constrained by Law and Experiment',
author='Stephen A Andrews and Andrew M Fraser',
author_email='[email protected]',
packages=['F_UNCLE'],
)
| fraserphysics/F_UNCLE | setup.py | Python | gpl-2.0 | 314 |
#! /usr/bin/python
# -*- coding: utf-8 -*-
print "Content-Type: text/plain ; charset=utf-8\n"
print "Une page de texte brut en CGI-Python"
| remilap/Raspberry_dev | www/cgi-bin/testcgi.py | Python | gpl-3.0 | 143 |
"""
Test the about xblock
"""
import datetime
import pytz
from ccx_keys.locator import CCXLocator
from django.conf import settings
from django.core.urlresolvers import reverse
from django.test.utils import override_settings
from mock import patch
from nose.plugins.attrib import attr
from course_modes.models import CourseMode
from track.tests import EventTrackingTestCase
from xmodule.modulestore.tests.django_utils import TEST_DATA_MIXED_MODULESTORE
from xmodule.modulestore.tests.utils import TEST_DATA_DIR
from xmodule.modulestore.xml_importer import import_course_from_xml
from student.models import CourseEnrollment
from student.tests.factories import AdminFactory, CourseEnrollmentAllowedFactory, UserFactory
from shoppingcart.models import Order, PaidCourseRegistration
from xmodule.course_module import CATALOG_VISIBILITY_ABOUT, CATALOG_VISIBILITY_NONE
from xmodule.modulestore.tests.django_utils import (
ModuleStoreTestCase,
SharedModuleStoreTestCase,
TEST_DATA_SPLIT_MODULESTORE
)
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from util.milestones_helpers import (
set_prerequisite_courses,
get_prerequisite_courses_display,
)
from milestones.tests.utils import MilestonesTestCaseMixin
from lms.djangoapps.ccx.tests.factories import CcxFactory
from .helpers import LoginEnrollmentTestCase
# HTML for registration button
REG_STR = "<form id=\"class_enroll_form\" method=\"post\" data-remote=\"true\" action=\"/change_enrollment\">"
SHIB_ERROR_STR = "The currently logged-in user account does not have permission to enroll in this course."
@attr(shard=1)
class AboutTestCase(LoginEnrollmentTestCase, SharedModuleStoreTestCase, EventTrackingTestCase, MilestonesTestCaseMixin):
"""
Tests about xblock.
"""
@classmethod
def setUpClass(cls):
super(AboutTestCase, cls).setUpClass()
cls.course = CourseFactory.create()
cls.course_without_about = CourseFactory.create(catalog_visibility=CATALOG_VISIBILITY_NONE)
cls.course_with_about = CourseFactory.create(catalog_visibility=CATALOG_VISIBILITY_ABOUT)
cls.purchase_course = CourseFactory.create(org='MITx', number='buyme', display_name='Course To Buy')
cls.about = ItemFactory.create(
category="about", parent_location=cls.course.location,
data="OOGIE BLOOGIE", display_name="overview"
)
cls.about = ItemFactory.create(
category="about", parent_location=cls.course_without_about.location,
data="WITHOUT ABOUT", display_name="overview"
)
cls.about = ItemFactory.create(
category="about", parent_location=cls.course_with_about.location,
data="WITH ABOUT", display_name="overview"
)
def setUp(self):
super(AboutTestCase, self).setUp()
self.course_mode = CourseMode(
course_id=self.purchase_course.id,
mode_slug=CourseMode.DEFAULT_MODE_SLUG,
mode_display_name=CourseMode.DEFAULT_MODE_SLUG,
min_price=10
)
self.course_mode.save()
def test_anonymous_user(self):
"""
This test asserts that a non-logged in user can visit the course about page
"""
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("OOGIE BLOOGIE", resp.content)
# Check that registration button is present
self.assertIn(REG_STR, resp.content)
def test_logged_in(self):
"""
This test asserts that a logged-in user can visit the course about page
"""
self.setup_user()
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("OOGIE BLOOGIE", resp.content)
def test_already_enrolled(self):
"""
Asserts that the end user sees the appropriate messaging
when he/she visits the course about page, but is already enrolled
"""
self.setup_user()
self.enroll(self.course, True)
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("You are enrolled in this course", resp.content)
self.assertIn("View Course", resp.content)
@override_settings(COURSE_ABOUT_VISIBILITY_PERMISSION="see_about_page")
def test_visible_about_page_settings(self):
"""
Verify that the About Page honors the permission settings in the course module
"""
url = reverse('about_course', args=[self.course_with_about.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("WITH ABOUT", resp.content)
url = reverse('about_course', args=[self.course_without_about.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 404)
@patch.dict(settings.FEATURES, {'ENABLE_MKTG_SITE': True})
def test_logged_in_marketing(self):
self.setup_user()
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
# should be redirected
self.assertEqual(resp.status_code, 302)
# follow this time, and check we're redirected to the course info page
resp = self.client.get(url, follow=True)
target_url = resp.redirect_chain[-1][0]
info_url = reverse('info', args=[self.course.id.to_deprecated_string()])
self.assertTrue(target_url.endswith(info_url))
@patch.dict(settings.FEATURES, {'ENABLE_PREREQUISITE_COURSES': True})
def test_pre_requisite_course(self):
pre_requisite_course = CourseFactory.create(org='edX', course='900', display_name='pre requisite course')
course = CourseFactory.create(pre_requisite_courses=[unicode(pre_requisite_course.id)])
self.setup_user()
url = reverse('about_course', args=[unicode(course.id)])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
pre_requisite_courses = get_prerequisite_courses_display(course)
pre_requisite_course_about_url = reverse('about_course', args=[unicode(pre_requisite_courses[0]['key'])])
self.assertIn("<span class=\"important-dates-item-text pre-requisite\"><a href=\"{}\">{}</a></span>"
.format(pre_requisite_course_about_url, pre_requisite_courses[0]['display']),
resp.content.strip('\n'))
@patch.dict(settings.FEATURES, {'ENABLE_PREREQUISITE_COURSES': True})
def test_about_page_unfulfilled_prereqs(self):
pre_requisite_course = CourseFactory.create(
org='edX',
course='901',
display_name='pre requisite course',
)
pre_requisite_courses = [unicode(pre_requisite_course.id)]
# for this failure to occur, the enrollment window needs to be in the past
course = CourseFactory.create(
org='edX',
course='1000',
# closed enrollment
enrollment_start=datetime.datetime(2013, 1, 1),
enrollment_end=datetime.datetime(2014, 1, 1),
start=datetime.datetime(2013, 1, 1),
end=datetime.datetime(2030, 1, 1),
pre_requisite_courses=pre_requisite_courses,
)
set_prerequisite_courses(course.id, pre_requisite_courses)
self.setup_user()
self.enroll(self.course, True)
self.enroll(pre_requisite_course, True)
url = reverse('about_course', args=[unicode(course.id)])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
pre_requisite_courses = get_prerequisite_courses_display(course)
pre_requisite_course_about_url = reverse('about_course', args=[unicode(pre_requisite_courses[0]['key'])])
self.assertIn("<span class=\"important-dates-item-text pre-requisite\"><a href=\"{}\">{}</a></span>"
.format(pre_requisite_course_about_url, pre_requisite_courses[0]['display']),
resp.content.strip('\n'))
url = reverse('about_course', args=[unicode(pre_requisite_course.id)])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
@attr(shard=1)
class AboutTestCaseXML(LoginEnrollmentTestCase, ModuleStoreTestCase):
"""
Tests for the course about page
"""
MODULESTORE = TEST_DATA_MIXED_MODULESTORE
def setUp(self):
"""
Set up the tests
"""
super(AboutTestCaseXML, self).setUp()
# The following test course (which lives at common/test/data/2014)
# is closed; we're testing that an about page still appears when
# the course is already closed
self.xml_course_id = self.store.make_course_key('edX', 'detached_pages', '2014')
import_course_from_xml(
self.store,
'test_user',
TEST_DATA_DIR,
source_dirs=['2014'],
static_content_store=None,
target_id=self.xml_course_id,
raise_on_failure=True,
create_if_not_present=True,
)
# this text appears in that course's about page
# common/test/data/2014/about/overview.html
self.xml_data = "about page 463139"
@patch.dict('django.conf.settings.FEATURES', {'DISABLE_START_DATES': False})
def test_logged_in_xml(self):
self.setup_user()
url = reverse('about_course', args=[self.xml_course_id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn(self.xml_data, resp.content)
@patch.dict('django.conf.settings.FEATURES', {'DISABLE_START_DATES': False})
def test_anonymous_user_xml(self):
url = reverse('about_course', args=[self.xml_course_id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn(self.xml_data, resp.content)
@attr(shard=1)
class AboutWithCappedEnrollmentsTestCase(LoginEnrollmentTestCase, SharedModuleStoreTestCase):
"""
This test case will check the About page when a course has a capped enrollment
"""
@classmethod
def setUpClass(cls):
super(AboutWithCappedEnrollmentsTestCase, cls).setUpClass()
cls.course = CourseFactory.create(metadata={"max_student_enrollments_allowed": 1})
cls.about = ItemFactory.create(
category="about", parent_location=cls.course.location,
data="OOGIE BLOOGIE", display_name="overview"
)
def test_enrollment_cap(self):
"""
This test will make sure that enrollment caps are enforced
"""
self.setup_user()
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn('<a href="#" class="register">', resp.content)
self.enroll(self.course, verify=True)
# create a new account since the first account is already enrolled in the course
self.email = '[email protected]'
self.password = 'bar'
self.username = 'test_second'
self.create_account(self.username, self.email, self.password)
self.activate_user(self.email)
self.login(self.email, self.password)
# Get the about page again and make sure that the page says that the course is full
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("Course is full", resp.content)
# Try to enroll as well
result = self.enroll(self.course)
self.assertFalse(result)
# Check that registration button is not present
self.assertNotIn(REG_STR, resp.content)
@attr(shard=1)
class AboutWithInvitationOnly(SharedModuleStoreTestCase):
"""
This test case will check the About page when a course is invitation only.
"""
@classmethod
def setUpClass(cls):
super(AboutWithInvitationOnly, cls).setUpClass()
cls.course = CourseFactory.create(metadata={"invitation_only": True})
cls.about = ItemFactory.create(
category="about", parent_location=cls.course.location,
display_name="overview"
)
def test_invitation_only(self):
"""
Test for user not logged in, invitation only course.
"""
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("Enrollment in this course is by invitation only", resp.content)
# Check that registration button is not present
self.assertNotIn(REG_STR, resp.content)
def test_invitation_only_but_allowed(self):
"""
Test for user logged in and allowed to enroll in invitation only course.
"""
# Course is invitation only, student is allowed to enroll and logged in
user = UserFactory.create(username='allowed_student', password='test', email='[email protected]')
CourseEnrollmentAllowedFactory(email=user.email, course_id=self.course.id)
self.client.login(username=user.username, password='test')
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn(u"Enroll in {}".format(self.course.id.course), resp.content.decode('utf-8'))
# Check that registration button is present
self.assertIn(REG_STR, resp.content)
@attr(shard=1)
@patch.dict(settings.FEATURES, {'RESTRICT_ENROLL_BY_REG_METHOD': True})
class AboutTestCaseShibCourse(LoginEnrollmentTestCase, SharedModuleStoreTestCase):
"""
Test cases covering about page behavior for courses that use shib enrollment domain ("shib courses")
"""
@classmethod
def setUpClass(cls):
super(AboutTestCaseShibCourse, cls).setUpClass()
cls.course = CourseFactory.create(enrollment_domain="shib:https://idp.stanford.edu/")
cls.about = ItemFactory.create(
category="about", parent_location=cls.course.location,
data="OOGIE BLOOGIE", display_name="overview"
)
def test_logged_in_shib_course(self):
"""
For shib courses, logged in users will see the enroll button, but get rejected once they click there
"""
self.setup_user()
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("OOGIE BLOOGIE", resp.content)
self.assertIn(u"Enroll in {}".format(self.course.id.course), resp.content.decode('utf-8'))
self.assertIn(SHIB_ERROR_STR, resp.content)
self.assertIn(REG_STR, resp.content)
def test_anonymous_user_shib_course(self):
"""
For shib courses, anonymous users will also see the enroll button
"""
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("OOGIE BLOOGIE", resp.content)
self.assertIn(u"Enroll in {}".format(self.course.id.course), resp.content.decode('utf-8'))
self.assertIn(SHIB_ERROR_STR, resp.content)
self.assertIn(REG_STR, resp.content)
@attr(shard=1)
class AboutWithClosedEnrollment(ModuleStoreTestCase):
"""
This test case will check the About page for a course that has enrollment start/end
set but it is currently outside of that period.
"""
def setUp(self):
super(AboutWithClosedEnrollment, self).setUp()
self.course = CourseFactory.create(metadata={"invitation_only": False})
# Setup enrollment period to be in future
now = datetime.datetime.now(pytz.UTC)
tomorrow = now + datetime.timedelta(days=1)
nextday = tomorrow + datetime.timedelta(days=1)
self.course.enrollment_start = tomorrow
self.course.enrollment_end = nextday
self.course = self.update_course(self.course, self.user.id)
self.about = ItemFactory.create(
category="about", parent_location=self.course.location,
display_name="overview"
)
def test_closed_enrollmement(self):
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("Enrollment is Closed", resp.content)
# Check that registration button is not present
self.assertNotIn(REG_STR, resp.content)
def test_course_price_is_not_visble_in_sidebar(self):
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
# course price is not visible ihe course_about page when the course
# mode is not set to honor
self.assertNotIn('<span class="important-dates-item-text">$10</span>', resp.content)
@attr(shard=1)
@patch.dict(settings.FEATURES, {'ENABLE_SHOPPING_CART': True})
@patch.dict(settings.FEATURES, {'ENABLE_PAID_COURSE_REGISTRATION': True})
class AboutPurchaseCourseTestCase(LoginEnrollmentTestCase, SharedModuleStoreTestCase):
"""
This test class runs through a suite of verifications regarding
purchaseable courses
"""
@classmethod
def setUpClass(cls):
super(AboutPurchaseCourseTestCase, cls).setUpClass()
cls.course = CourseFactory.create(org='MITx', number='buyme', display_name='Course To Buy')
now = datetime.datetime.now(pytz.UTC)
tomorrow = now + datetime.timedelta(days=1)
nextday = tomorrow + datetime.timedelta(days=1)
cls.closed_course = CourseFactory.create(
org='MITx',
number='closed',
display_name='Closed Course To Buy',
enrollment_start=tomorrow,
enrollment_end=nextday
)
def setUp(self):
super(AboutPurchaseCourseTestCase, self).setUp()
self._set_ecomm(self.course)
self._set_ecomm(self.closed_course)
def _set_ecomm(self, course):
"""
Helper method to turn on ecommerce on the course
"""
course_mode = CourseMode(
course_id=course.id,
mode_slug=CourseMode.DEFAULT_MODE_SLUG,
mode_display_name=CourseMode.DEFAULT_MODE_SLUG,
min_price=10,
)
course_mode.save()
def test_anonymous_user(self):
"""
Make sure an anonymous user sees the purchase button
"""
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("Add buyme to Cart <span>($10 USD)</span>", resp.content)
def test_logged_in(self):
"""
Make sure a logged in user sees the purchase button
"""
self.setup_user()
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("Add buyme to Cart <span>($10 USD)</span>", resp.content)
def test_already_in_cart(self):
"""
This makes sure if a user has this course in the cart, that the expected message
appears
"""
self.setup_user()
cart = Order.get_cart_for_user(self.user)
PaidCourseRegistration.add_to_order(cart, self.course.id)
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("This course is in your", resp.content)
self.assertNotIn("Add buyme to Cart <span>($10 USD)</span>", resp.content)
def test_already_enrolled(self):
"""
This makes sure that the already enrolled message appears for paywalled courses
"""
self.setup_user()
# note that we can't call self.enroll here since that goes through
# the Django student views, which doesn't allow for enrollments
# for paywalled courses
CourseEnrollment.enroll(self.user, self.course.id)
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("You are enrolled in this course", resp.content)
self.assertIn("View Course", resp.content)
self.assertNotIn("Add buyme to Cart <span>($10 USD)</span>", resp.content)
def test_closed_enrollment(self):
"""
This makes sure that paywalled courses also honor the registration
window
"""
self.setup_user()
url = reverse('about_course', args=[self.closed_course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("Enrollment is Closed", resp.content)
self.assertNotIn("Add closed to Cart <span>($10 USD)</span>", resp.content)
# course price is visible ihe course_about page when the course
# mode is set to honor and it's price is set
self.assertIn('<span class="important-dates-item-text">$10</span>', resp.content)
def test_invitation_only(self):
"""
This makes sure that the invitation only restirction takes prescendence over
any purchase enablements
"""
course = CourseFactory.create(metadata={"invitation_only": True})
self._set_ecomm(course)
self.setup_user()
url = reverse('about_course', args=[course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("Enrollment in this course is by invitation only", resp.content)
def test_enrollment_cap(self):
"""
Make sure that capped enrollments work even with
paywalled courses
"""
course = CourseFactory.create(
metadata={
"max_student_enrollments_allowed": 1,
"display_coursenumber": "buyme",
}
)
self._set_ecomm(course)
self.setup_user()
url = reverse('about_course', args=[course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("Add buyme to Cart <span>($10 USD)</span>", resp.content)
# note that we can't call self.enroll here since that goes through
# the Django student views, which doesn't allow for enrollments
# for paywalled courses
CourseEnrollment.enroll(self.user, course.id)
# create a new account since the first account is already enrolled in the course
email = '[email protected]'
password = 'bar'
username = 'test_second'
self.create_account(username,
email, password)
self.activate_user(email)
self.login(email, password)
# Get the about page again and make sure that the page says that the course is full
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("Course is full", resp.content)
self.assertNotIn("Add buyme to Cart ($10)", resp.content)
def test_free_course_display(self):
"""
Make sure other courses that don't have shopping cart enabled don't display the add-to-cart button
and don't display the course_price field if Cosmetic Price is disabled.
"""
course = CourseFactory.create(org='MITx', number='free', display_name='Course For Free')
self.setup_user()
url = reverse('about_course', args=[course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertNotIn("Add free to Cart (Free)", resp.content)
self.assertNotIn('<p class="important-dates-item-title">Price</p>', resp.content)
class CourseAboutTestCaseCCX(SharedModuleStoreTestCase, LoginEnrollmentTestCase):
"""
Test for unenrolled student tries to access ccx.
Note: Only CCX coach can enroll a student in CCX. In sum self-registration not allowed.
"""
MODULESTORE = TEST_DATA_SPLIT_MODULESTORE
@classmethod
def setUpClass(cls):
super(CourseAboutTestCaseCCX, cls).setUpClass()
cls.course = CourseFactory.create()
def setUp(self):
super(CourseAboutTestCaseCCX, self).setUp()
# Create ccx coach account
self.coach = coach = AdminFactory.create(password="test")
self.client.login(username=coach.username, password="test")
def test_redirect_to_dashboard_unenrolled_ccx(self):
"""
Assert that when unenrolled user tries to access CCX do not allow the user to self-register.
Redirect him to his student dashboard
"""
# create ccx
ccx = CcxFactory(course_id=self.course.id, coach=self.coach)
ccx_locator = CCXLocator.from_course_locator(self.course.id, unicode(ccx.id))
self.setup_user()
url = reverse('info', args=[ccx_locator])
response = self.client.get(url)
expected = reverse('dashboard')
self.assertRedirects(response, expected, status_code=302, target_status_code=200)
| fintech-circle/edx-platform | lms/djangoapps/courseware/tests/test_about.py | Python | agpl-3.0 | 25,930 |
import torch as t
from torch.autograd import Variable as V
from torch import FloatTensor as FT
import numpy as np
from bayestorch.hmc import HMCSampler
class SimpleTrainer:
def __init__(self, env,critic,hallucinator,policy_buffer,policy_c, noise_dim):
self.env = env
self.hallucinator = hallucinator
self.critic = critic
self.policy_buffer = policy_buffer
self.policy_c = policy_c
self.noise_dim = noise_dim
def train(self, train_steps,sample_steps,opt_steps):
in_dim=self.env.obs_size
out_dim=self.env.action_size
cur_policy = self.policy_c(in_dim,out_dim)
for i in range(train_steps):
reward = self.sample_episode(cur_policy)
self.policy_buffer.put(cur_policy.state_dict(),reward)
self.train_critic_hallucinator(sample_steps)
self.train_policy(opt_steps)
def sample_episode(self, policy,n=1,skip = 3):
done = False
total_reward = 0
for i in range(n):
cur_obs = self.env.new_episode()
t = 0
while not done:
cur_obs = V(FT(cur_obs)).unsqueeze(0)
display = (t % skip == 0)
cur_action = policy.forward(cur_obs).data.cpu().numpy()
cur_obs,cur_reward,done = self.env.next_obs(cur_action.squeeze(0), render = display)
total_reward += cur_reward
t += 1
avg_episode_reward = total_reward / n
return avg_episode_reward
def train_critic_hallucinator(self,sample_steps):
def closure_gen():
yield (lambda: self.critic.get_prior_llh())
for state_dict,reward in self.policy_buffer:
policy = self.policy_c(self.env.obs_size, self.env.action_size)
policy.load_state_dict(state_dict)
def closure():
noise=V(FT(np.random.randn(self.noise_dim)))
states = self.hallucinator.forward(noise.unsqueeze(0))
# Concatenating dimensions of bath(which is currently 1) and dimensions of
states = states.view(states.size(0)*self.hallucinator.n, -1)
actions = policy.forward(states)
actions = actions.view(1,-1)
states = states.view(1,-1)
mean = self.critic(states,actions)[0]
lsd = self.critic(states,actions)[0]
llh = gaussian_llh(mean,lsd,reward)
return reward
yield closure
params = self.critic.parameter_list() \
+ self.hallucinator.parameter_list()
sampler = HMCSampler(params)
for i in range(sample_steps):
sampler.step(closure_gen)
def train_policy(self,opt_steps):
state_dict, _ = self.policy_buffer.peek()
policy = self.policy_c(self.env.obs_size, self.env.action_size)
policy.load_state_dict(state_dict)
opt = t.optim.SGD(policy.parameters(), lr=0.001)
# This is bad just have one goddamnit
def closure():
noise=V(FT(np.random.randn(self.noise_dim)))
states = self.hallucinator.forward(noise.unsqueeze(0))
# Concatenating dimensions of bath(which is currently 1) and dimensions of
states = states.view(states.size(0)*self.hallucinator.n, -1)
actions = policy.forward(states)
actions = actions.view(1,-1)
states = states.view(1,-1)
reward = self.critic(states,actions)[0]
return reward
for i in range(opt_steps):
opt.zero_grad()
opt.step(closure)
return policy.state_dict()
def gaussian_llh(mean,log_std_dev,reward):
llh = -(mean-reward)**2 - 2*log_std_dev
return llh
| fizz-ml/policybandit | trainer.py | Python | mit | 3,859 |
# -*- coding: utf-8 -*-
# Copyright (c) 2020 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from ansible.module_utils.facts.virtual import linux
def mock_os_path_is_file_docker(filename):
if filename in ('/.dockerenv', '/.dockerinit'):
return True
return False
def test_get_virtual_facts_docker(mocker):
mocker.patch('os.path.exists', mock_os_path_is_file_docker)
module = mocker.Mock()
module.run_command.return_value = (0, '', '')
inst = linux.LinuxVirtual(module)
facts = inst.get_virtual_facts()
expected = {
'virtualization_role': 'guest',
'virtualization_tech_host': set(),
'virtualization_type': 'docker',
'virtualization_tech_guest': set(['docker', 'container']),
}
assert facts == expected
def test_get_virtual_facts_bhyve(mocker):
mocker.patch('os.path.exists', return_value=False)
mocker.patch('ansible.module_utils.facts.virtual.linux.get_file_content', return_value='')
mocker.patch('ansible.module_utils.facts.virtual.linux.get_file_lines', return_value=[])
module = mocker.Mock()
module.run_command.return_value = (0, 'BHYVE\n', '')
inst = linux.LinuxVirtual(module)
facts = inst.get_virtual_facts()
expected = {
'virtualization_role': 'guest',
'virtualization_tech_host': set(),
'virtualization_type': 'bhyve',
'virtualization_tech_guest': set(['bhyve']),
}
assert facts == expected
| ansible/ansible | test/units/module_utils/facts/virtual/test_linux.py | Python | gpl-3.0 | 1,613 |
# -*- coding: utf-8 -*-
#
# Copyright © 2013 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public
# License as published by the Free Software Foundation; either version
# 2 of the License (GPLv2) or (at your option) any later version.
# There is NO WARRANTY for this software, express or implied,
# including the implied warranties of MERCHANTABILITY,
# NON-INFRINGEMENT, or FITNESS FOR A PARTICULAR PURPOSE. You should
# have received a copy of GPLv2 along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
| rbramwell/pulp | client_consumer/pulp/client/__init__.py | Python | gpl-2.0 | 650 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This file is part of the Shiboken Python Bindings Generator project.
#
# Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
#
# Contact: PySide team <[email protected]>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# version 2.1 as published by the Free Software Foundation. Please
# review the following information to ensure the GNU Lesser General
# Public License version 2.1 requirements will be met:
# http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
# #
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
'''Test cases for a class with a private destructor.'''
import gc
import sys
import unittest
import shiboken
from sample import PrivateDtor
class PrivateDtorTest(unittest.TestCase):
'''Test case for PrivateDtor class'''
def testPrivateDtorInstanciation(self):
'''Test if instanciation of class with a private destructor raises an exception.'''
self.assertRaises(TypeError, PrivateDtor)
def testPrivateDtorInheritance(self):
'''Test if inheriting from PrivateDtor raises an exception.'''
def inherit():
class Foo(PrivateDtor):
pass
self.assertRaises(TypeError, inherit)
def testPrivateDtorInstanceMethod(self):
'''Test if PrivateDtor.instance() method return the proper singleton.'''
pd1 = PrivateDtor.instance()
calls = pd1.instanceCalls()
self.assertEqual(type(pd1), PrivateDtor)
pd2 = PrivateDtor.instance()
self.assertEqual(pd2, pd1)
self.assertEqual(pd2.instanceCalls(), calls + 1)
def testPrivateDtorRefCounting(self):
'''Test refcounting of the singleton returned by PrivateDtor.instance().'''
pd1 = PrivateDtor.instance()
calls = pd1.instanceCalls()
refcnt = sys.getrefcount(pd1)
pd2 = PrivateDtor.instance()
self.assertEqual(pd2.instanceCalls(), calls + 1)
self.assertEqual(sys.getrefcount(pd2), sys.getrefcount(pd1))
self.assertEqual(sys.getrefcount(pd2), refcnt + 1)
del pd1
self.assertEqual(sys.getrefcount(pd2), refcnt)
del pd2
gc.collect()
pd3 = PrivateDtor.instance()
self.assertEqual(type(pd3), PrivateDtor)
self.assertEqual(pd3.instanceCalls(), calls + 2)
self.assertEqual(sys.getrefcount(pd3), refcnt)
def testClassDecref(self):
# Bug was that class PyTypeObject wasn't decrefed when instance
# was invalidated
before = sys.getrefcount(PrivateDtor)
for i in range(1000):
obj = PrivateDtor.instance()
shiboken.invalidate(obj)
after = sys.getrefcount(PrivateDtor)
self.assertLess(abs(before - after), 5)
if __name__ == '__main__':
unittest.main()
| o11c/shiboken2 | tests/samplebinding/privatedtor_test.py | Python | gpl-2.0 | 3,309 |
# ===============================================================================
# Copyright 2016 ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= standard library imports ========================
# ============= local library imports ==========================
class Exit(BaseException):
pass
HELP = {'help': 'help <command>: Display additional information about <command>',
'exit': 'exit: Quit the application',
'quit': 'quit: Quit the application',
'commands': 'List all available commands'}
class App(object):
"""
consider using curses for user interaction.
"""
def run(self, command_line_args):
self._welcome()
if command_line_args:
self._execute_commandline(command_line_args)
while 1:
try:
cmd = self._get_command()
except Exit:
self._exit()
break
self._execute_command(cmd)
def _get_command(self):
cmd = raw_input('>>> ')
cmd = cmd.lower()
if cmd in ('exit', 'quit'):
raise Exit
return cmd
def _execute_commandline(self, args):
pass
def _execute_command(self, cmd):
cs = cmd.split(' ')
cmd, args = cs[0], cs[1:]
cmd = '_{}'.format(cmd)
if hasattr(self, cmd):
func = getattr(self, cmd)
func(*args)
else:
print 'Invalid command {}'.format(cmd)
# commands
def _help(self, *args):
cmd = args[0]
try:
msg = HELP[cmd]
except KeyError:
msg = '"{}" is not a valid command'.format(cmd)
print msg
def _commands(self, *args):
print '''************ Available Commands ************
commands: List all available commands
exit: Exit the program
quit: Same as exit
help <command>: Display additional information about <command>
'''
def _welcome(self):
print '''====================================================================================
_______ _______ ______ __ __
| || || _ | | |_| |
| ___||_ _|| | || | |
| |___ | | | |_||_ | |
| ___| | | | __ || |
| |___ | | | | | || ||_|| |
|_______| |___| |___| |_||_| |_|
====================================================================================
Developed by David Ketchum, Jake Ross 2016
New Mexico Tech/New Mexico Bureau of Geology
Available commands are enumerated using "commands"
For more information regarding a specific command use "help <command>". Replace <command> with the command of interest
'''
def _exit(self):
print 'Good Bye'
if __name__ == '__main__':
a = App()
args = None
a.run(args)
# ============= EOF =============================================
| NMTHydro/Recharge | zobs/ross/etrm_app.py | Python | apache-2.0 | 3,451 |
"""rules
Revision ID: 35088e32c557
Revises: d759891ddbf4
Create Date: 2019-11-18 14:20:23.661254
"""
from alembic import op
import sqlalchemy as sa
import datetime
import logging
log = logging.getLogger(__name__)
# revision identifiers, used by Alembic.
revision = '35088e32c557'
down_revision = 'd759891ddbf4'
branch_labels = None
depends_on = None
def upgrade():
if op.get_bind().dialect.name == 'mysql':
try:
op.add_column('tRule', sa.Column('format', sa.String(80), nullable=True, server_default='text'))
op.add_column('tRule', sa.Column('created_by', sa.Integer(), sa.ForeignKey('tUser.id'), nullable=True))
op.add_column('tRule', sa.Column('updated_by', sa.Integer(), sa.ForeignKey('tUser.id'), nullable=True))
op.add_column('tRule', sa.Column('updated_at', sa.DateTime(), default=datetime.datetime.utcnow(), onupdate=datetime.datetime.utcnow(), nullable=True))
except Exception as e:
log.exception(e)
else: ## sqlite
try:
op.drop_table('tRule_new')
except:
log.info('tRule_new does not exist')
try:
op.create_table(
'tRule_new',
sa.Column('id', sa.Integer(), primary_key=True, nullable=False),
sa.Column('name', sa.String(200), nullable=False),
sa.Column('description', sa.String(1000), nullable=True),
sa.Column('format', sa.String(80), nullable=False, server_default='text'),
sa.Column('ref_key', sa.String(60), nullable=False, index=True),
sa.Column('value', sa.Text().with_variant(sa.mysql.LONGTEXT, 'mysql'), nullable=True),
sa.Column('status', sa.String(1), nullable=False, server_default=sa.text(u"'A'")),
sa.Column('scenario_id', sa.Integer(), sa.ForeignKey('tScenario.id'), nullable=True),
sa.Column('network_id', sa.Integer(), sa.ForeignKey('tNetwork.id'), index=True, nullable=True),
sa.Column('node_id', sa.Integer(), sa.ForeignKey('tNode.id'), index=True, nullable=True),
sa.Column('link_id', sa.Integer(), sa.ForeignKey('tLink.id'), index=True, nullable=True),
sa.Column('group_id', sa.Integer(), sa.ForeignKey('tResourceGroup.id'), index=True, nullable=True),
sa.Column('created_by', sa.Integer(), sa.ForeignKey('tUser.id')),
sa.Column('updated_by', sa.Integer(), sa.ForeignKey('tUser.id')),
sa.Column('updated_at', sa.DateTime(), sa.ForeignKey('tUser.id')),
)
op.execute("insert into tRule_new (id, name, description, ref_key, value, status, scenario_id, network_id, node_id, link_id, group_id, created_by, updated_by, updated_at) select id, name, description, ref_key, value, status, scenario_id, network_id, node_id, link_id, group_id, null, null, null from tRule")
op.rename_table('tRule','tRule_old')
op.rename_table('tRule_new', 'tRule')
op.drop_table('tRule_old')
except Exception as e:
log.exception(e)
def downgrade():
if op.get_bind().dialect.name == 'mysql':
try:
op.drop_column('tRule', 'format')
op.drop_column('tRule', 'created_by')
op.drop_column('tRule', 'updated_by')
op.drop_column('tRule', 'updated_at')
except Exception as e:
log.exception(e)
else: ## sqlite
try:
op.drop_table('tRule_new')
except:
log.info('tRule_new does not exist')
try:
op.create_table(
'tRule_new',
sa.Column('id', sa.Integer(), primary_key=True, nullable=False),
sa.Column('name', sa.String(200), nullable=False),
sa.Column('description', sa.String(1000), nullable=True),
sa.Column('ref_key', sa.String(60), nullable=False, index=True),
sa.Column('value', sa.Text().with_variant(sa.mysql.LONGTEXT, 'mysql'), nullable=True),
sa.Column('status', sa.String(1), nullable=False, server_default=sa.text(u"'A'")),
sa.Column('scenario_id', sa.Integer(), sa.ForeignKey('tScenario.id'), nullable=True),
sa.Column('network_id', sa.Integer(), sa.ForeignKey('tNetwork.id'), index=True, nullable=True),
sa.Column('node_id', sa.Integer(), sa.ForeignKey('tNode.id'), index=True, nullable=True),
sa.Column('link_id', sa.Integer(), sa.ForeignKey('tLink.id'), index=True, nullable=True),
sa.Column('group_id', sa.Integer(), sa.ForeignKey('tResourceGroup.id'), index=True, nullable=True),
)
op.execute("insert into tRule_new (id, name, description, ref_key, value, status, scenario_id, network_id, node_id, link_id, group_id) select id, name, description, ref_key, value, status, scenario_id, network_id, node_id, link_id, group_id from tRule")
op.rename_table('tRule','tRule_old')
op.rename_table('tRule_new', 'tRule')
op.drop_table('tRule_old')
except Exception as e:
log.exception(e)
| hydraplatform/hydra-base | hydra_base/db/alembic/versions/35088e32c557_rules.py | Python | lgpl-3.0 | 5,229 |
bl_info = {
"name": "AC3D v2 (.ac)",
"category": "Import-Export",
"author": "ZLSA",
"description": "Import and export AC3D files",
"location": "File > Import/Export > AC3D v2",
"version": (0, 1),
"blender": (2, 72, 0),
"warning": "Alpha",
}
import bpy
from bpy.props import *
import math
from bpy_extras.io_utils import ExportHelper
import os
# Export
def export(forwards = "+Z",
up = "+Y",
limit_render_layers = True,
limit_selection = False):
out = ""
out += "AC3Db\n"
scene = bpy.context.scene
objects = []
def transform_xyz(v):
ix, iy, iz = v
return ix, iz, -iy
def should_export(obj):
if limit_render_layers:
on_render_layer = False
for layer in range(len(scene.layers)):
if scene.layers[layer] and obj.layers[layer]:
on_render_layer = True
break
if not on_render_layer: return False
if limit_selection and not obj.select: return False
return True
# first, get a list of objects that will be exported
for obj in scene.objects:
if not obj.parent and should_export(obj):
objects.append(obj)
# then, get all materials
materials = []
for obj in scene.objects:
slots = obj.material_slots
# only go ahead if there are any materials
if len(slots) == 0 or not should_export(obj): continue
for slot in slots:
if slot.material not in materials:
materials.append(slot.material)
# Default material
out += """MATERIAL "DefaultWhite" rgb 1.0 1.0 1.0 amb 0.2 0.2 0.2 emis 0.0 0.0 0.0 spec 0.2 0.2 0.2 shi 0.6 trans 0\n"""
# now print out all of them
for material in materials:
diffuse = material.diffuse_color
ambient = material.ambient * material.diffuse_color
emission = material.emit * material.diffuse_color
specular = material.specular_color
shininess = material.specular_intensity
transparency = 1 - material.alpha
mat = {
"name": material.name,
"rgb": " ".join([str(round(x, 4)) for x in diffuse]),
"amb": " ".join([str(round(x, 4)) for x in ambient]),
"emission": " ".join([str(round(x, 4)) for x in emission]),
"specular": " ".join([str(round(x, 4)) for x in specular]),
"shininess": str(round(shininess, 4)),
"transparency": str(round(transparency, 4))
}
out += """MATERIAL "{name}" rgb {rgb} amb {amb} emis {emission} spec {specular} shi {shininess} trans {transparency}\n""".format(**mat)
material_indexes = {}
i = 1
for material in materials:
material_indexes[material.name] = i
i += 1
out += "OBJECT world\n"
out += "name \"{name}\"\n".format(name = scene.world.name)
out += "kids {children}\n".format(children = str(len(objects)))
def export_object(obj):
obj_out = ""
obj_type = "group"
if obj.type == "MESH": obj_type = "poly"
obj_out += "OBJECT {obj_type}\n".format(obj_type = obj_type)
obj_out += "name \"{name}\"\n".format(name = obj.name)
# get child number
children = 0
for o in obj.children:
if should_export(o):
children += 1
# location
x, y, z = transform_xyz(obj.matrix_local.to_translation())
obj_out += "loc {:5.6f} {:5.6f} {:5.6f}\n".format(x, y, z)
# rotation
matrix = obj.matrix_local.to_quaternion().to_matrix()
if matrix[0] != [1, 0, 0] or \
matrix[1] != [0, 1, 0] or \
matrix[2] != [0, 0, 1]:
obj_out += "rot"
for x in range(0, 3):
x, y, z = matrix[x]
obj_out += " {:5.6f} {:5.6f} {:5.6f}".format(x, y, z)
obj_out += "\n"
# export mesh data
if obj.type == "MESH":
mesh = obj.to_mesh(scene, True, "RENDER")
slots = obj.material_slots
vertex_number = len(mesh.vertices)
face_number = len(mesh.tessfaces)
if len(mesh.uv_textures) > 0 and vertex_number > 0:
filepath = mesh.uv_textures[0].data[0].image.filepath
if filepath.startswith("//"): filepath = filepath[2:]
obj_out += "texture \"{filepath}\"\n".format(filepath = filepath)
# handle vertices
obj_out += "numvert {vertex_number}\n".format(vertex_number = str(vertex_number))
for vertex in mesh.vertices:
x, y, z = transform_xyz(vertex.co)
obj_out += "{:5.6f} {:5.6f} {:5.6f}\n".format(x, y, z)
has_uv = False
if len(mesh.uv_textures) and mesh.tessface_uv_textures.active: has_uv = True
# handle faces
obj_out += "numsurf {face_number}\n".format(face_number = str(face_number))
poly_index = 0
for poly in mesh.tessfaces:
shading = 0
if poly.use_smooth: shading |= 1<<0
if mesh.show_double_sided: shading |= 1<<1
obj_out += "SURF 0X{shading}\n".format(shading = str(hex(shading << 4))[2:])
if len(slots) == 0:
obj_out += "mat 0\n"
else:
obj_out += "mat {index}\n".format(index = str(material_indexes[slots[poly.material_index].name]))
obj_out += "refs {vertices}\n".format(vertices = str(len(poly.vertices)))
vertex_index = 0
for vertex in poly.vertices:
if has_uv:
uv = mesh.tessface_uv_textures.active.data[poly_index].uv[vertex_index]
else:
uv = [0, 0]
obj_out += "{vertex} {uvx:5.6f} {uvy:5.6f}\n".format(vertex = str(vertex), uvx = uv[0], uvy = uv[1])
vertex_index += 1
poly_index += 1
# handle children
obj_out += "kids {children}\n".format(children = str(children))
for o in obj.children:
if should_export(o):
obj_out += export_object(o)
return obj_out
for obj in objects:
out += export_object(obj)
return out
# Export operator
class ExportAC3D(bpy.types.Operator,ExportHelper):
"""Exports the file as an AC3D model (v2)"""
bl_idname = "export_scene.ac3d_v2"
bl_label = "Export AC3D v2"
filename_ext = ".ac"
filter_glob = StringProperty(
default = "*.ac",
options = {"HIDDEN"},
)
# Forwards = bpy.props.EnumProperty(
# name = "Forward",
# description = "Transforms Blender's native +Y",
# items = [
# ("+X", "+X", "Positive X"),
# ("+Y", "+Y", "Positive Y"),
# ("+Z", "+Z", "Positive Z"),
# ("-X", "-X", "Negative X"),
# ("-Y", "-Y", "Negative Y"),
# ("-Z", "-Z", "Negative Z"),
# ],
# default = "+Z")
# Up = bpy.props.EnumProperty(
# name = "Up",
# description = "Transforms Blender's native +Z",
# items = [
# ("+X", "+X", "Positive X"),
# ("+Y", "+Y", "Positive Y"),
# ("+Z", "+Z", "Positive Z"),
# ("-X", "-X", "Negative X"),
# ("-Y", "-Y", "Negative Y"),
# ("-Z", "-Z", "Negative Z"),
# ],
# default = "+Y")
LimitRenderLayers = bpy.props.BoolProperty(
name = "Limit to render layers",
description = "Limits export to objects on render layers",
default = True)
LimitSelection = bpy.props.BoolProperty(
name = "Limit to selection",
description = "Limits export to selected objects",
default = False)
FaceMaterials = bpy.props.BoolProperty(
name = "Face materials",
description = "Use face materials when exporting",
default = False)
def execute(self,context):
filepath = self.filepath
text = export(
# forwards = self.Forwards,
# up = self.Up,
limit_render_layers = self.LimitRenderLayers,
limit_selection = self.LimitSelection
)
open(filepath,"w").write(text)
return {"FINISHED"}
def invoke(self,context,event):
wm = context.window_manager
wm.fileselect_add(self)
return {"RUNNING_MODAL"}
def menu_func(self,context):
self.layout.operator(ExportAC3D.bl_idname, text = "AC3D (.ac) v2")
def register():
bpy.utils.register_module(__name__)
bpy.types.INFO_MT_file_export.append(menu_func)
def unregister():
bpy.utils.unregister_module(__name__)
bpy.types.INFO_MT_file_export.remove(menu_func)
| zlsa/io_scene_ac3d_v2 | __init__.py | Python | mit | 8,159 |
#!/usr/bin/env python
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# Michael A.G. Aivazis
# California Institute of Technology
# (C) 1998-2005 All Rights Reserved
#
# <LicenseText>
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
from time import hour
from length import nautical_mile
#
# Definitions of common speed units
# Data taken from Appendix F of Halliday, Resnick, Walker, "Fundamentals of Physics",
# fourth edition, John Willey and Sons, 1993
knot = nautical_mile/hour
# version
__id__ = "$Id: speed.py,v 1.1.1.1 2005/03/08 16:13:41 aivazis Exp $"
#
# End of file
| bmi-forum/bmi-pyre | pythia-0.8/packages/pyre/pyre/units/speed.py | Python | gpl-2.0 | 742 |
from pandac.PandaModules import *
from toontown.toonbase.ToonBaseGlobal import *
from direct.interval.IntervalGlobal import *
from BattleBase import *
from direct.distributed.ClockDelta import *
from toontown.toonbase import ToontownBattleGlobals
from direct.distributed import DistributedNode
from direct.fsm import ClassicFSM
from direct.fsm import State
from direct.task.Task import Task
from direct.directnotify import DirectNotifyGlobal
import Movie
import MovieUtil
from toontown.suit import Suit
from direct.actor import Actor
import BattleProps
from direct.particles import ParticleEffect
import BattleParticles
from toontown.hood import ZoneUtil
from toontown.distributed import DelayDelete
from toontown.toon import TTEmote
from otp.avatar import Emote
from toontown.nametag import NametagGlobals
class DistributedBattleBase(DistributedNode.DistributedNode, BattleBase):
notify = DirectNotifyGlobal.directNotify.newCategory('DistributedBattleBase')
camPos = ToontownBattleGlobals.BattleCamDefaultPos
camHpr = ToontownBattleGlobals.BattleCamDefaultHpr
camFov = ToontownBattleGlobals.BattleCamDefaultFov
camMenuFov = ToontownBattleGlobals.BattleCamMenuFov
camJoinPos = ToontownBattleGlobals.BattleCamJoinPos
camJoinHpr = ToontownBattleGlobals.BattleCamJoinHpr
id = 0
def __init__(self, cr, townBattle):
DistributedNode.DistributedNode.__init__(self, cr)
NodePath.__init__(self)
self.assign(render.attachNewNode(self.uniqueBattleName('distributed-battle')))
BattleBase.__init__(self)
self.bossBattle = 0
self.townBattle = townBattle
self.__battleCleanedUp = 0
self.activeIntervals = {}
self.localToonJustJoined = 0
self.choseAttackAlready = 0
self.toons = []
self.exitedToons = []
self.suitTraps = ''
self.membersKeep = None
self.faceOffName = self.uniqueBattleName('faceoff')
self.localToonBattleEvent = self.uniqueBattleName('localtoon-battle-event')
self.adjustName = self.uniqueBattleName('adjust')
self.timerCountdownTaskName = self.uniqueBattleName('timer-countdown')
self.movie = Movie.Movie(self)
self.timer = Timer()
self.needAdjustTownBattle = 0
self.streetBattle = 1
self.levelBattle = 0
self.localToonFsm = ClassicFSM.ClassicFSM('LocalToon', [State.State('HasLocalToon', self.enterHasLocalToon, self.exitHasLocalToon, ['NoLocalToon', 'WaitForServer']), State.State('NoLocalToon', self.enterNoLocalToon, self.exitNoLocalToon, ['HasLocalToon', 'WaitForServer']), State.State('WaitForServer', self.enterWaitForServer, self.exitWaitForServer, ['HasLocalToon', 'NoLocalToon'])], 'WaitForServer', 'WaitForServer')
self.localToonFsm.enterInitialState()
self.fsm = ClassicFSM.ClassicFSM('DistributedBattle', [State.State('Off', self.enterOff, self.exitOff, ['FaceOff',
'WaitForInput',
'WaitForJoin',
'MakeMovie',
'PlayMovie',
'Reward',
'Resume']),
State.State('FaceOff', self.enterFaceOff, self.exitFaceOff, ['WaitForInput']),
State.State('WaitForJoin', self.enterWaitForJoin, self.exitWaitForJoin, ['WaitForInput', 'Resume']),
State.State('WaitForInput', self.enterWaitForInput, self.exitWaitForInput, ['WaitForInput', 'PlayMovie', 'Resume']),
State.State('MakeMovie', self.enterMakeMovie, self.exitMakeMovie, ['PlayMovie', 'Resume']),
State.State('PlayMovie', self.enterPlayMovie, self.exitPlayMovie, ['WaitForInput',
'WaitForJoin',
'Reward',
'Resume']),
State.State('Reward', self.enterReward, self.exitReward, ['Resume']),
State.State('Resume', self.enterResume, self.exitResume, [])], 'Off', 'Off')
self.fsm.enterInitialState()
self.adjustFsm = ClassicFSM.ClassicFSM('Adjust', [State.State('Adjusting', self.enterAdjusting, self.exitAdjusting, ['NotAdjusting']), State.State('NotAdjusting', self.enterNotAdjusting, self.exitNotAdjusting, ['Adjusting'])], 'NotAdjusting', 'NotAdjusting')
self.adjustFsm.enterInitialState()
self.interactiveProp = None
return
def uniqueBattleName(self, name):
DistributedBattleBase.id += 1
return name + '-%d' % DistributedBattleBase.id
def generate(self):
self.notify.debug('generate(%s)' % self.doId)
DistributedNode.DistributedNode.generate(self)
self.__battleCleanedUp = 0
self.reparentTo(render)
self._skippingRewardMovie = False
def storeInterval(self, interval, name):
if name in self.activeIntervals:
ival = self.activeIntervals[name]
if hasattr(ival, 'delayDelete') or hasattr(ival, 'delayDeletes'):
self.clearInterval(name, finish=1)
self.activeIntervals[name] = interval
def __cleanupIntervals(self):
for interval in self.activeIntervals.values():
interval.finish()
DelayDelete.cleanupDelayDeletes(interval)
self.activeIntervals = {}
def clearInterval(self, name, finish = 0):
if name in self.activeIntervals:
ival = self.activeIntervals[name]
if finish:
ival.finish()
else:
ival.pause()
if name in self.activeIntervals:
DelayDelete.cleanupDelayDeletes(ival)
if name in self.activeIntervals:
del self.activeIntervals[name]
else:
self.notify.debug('interval: %s already cleared' % name)
def finishInterval(self, name):
if name in self.activeIntervals:
interval = self.activeIntervals[name]
interval.finish()
def disable(self):
self.notify.debug('disable(%s)' % self.doId)
self.cleanupBattle()
DistributedNode.DistributedNode.disable(self)
def battleCleanedUp(self):
return self.__battleCleanedUp
def cleanupBattle(self):
if self.__battleCleanedUp:
return
self.notify.debug('cleanupBattle(%s)' % self.doId)
self.__battleCleanedUp = 1
self.__cleanupIntervals()
self.fsm.requestFinalState()
if self.hasLocalToon():
self.removeLocalToon()
base.camLens.setMinFov(ToontownGlobals.DefaultCameraFov/(4./3.))
self.localToonFsm.request('WaitForServer')
self.ignoreAll()
for suit in self.suits:
if suit.battleTrap != NO_TRAP:
self.notify.debug('250 calling self.removeTrap, suit=%d' % suit.doId)
self.removeTrap(suit)
suit.battleTrap = NO_TRAP
suit.battleTrapProp = None
self.notify.debug('253 suit.battleTrapProp = None')
suit.battleTrapIsFresh = 0
self.suits = []
self.pendingSuits = []
self.joiningSuits = []
self.activeSuits = []
self.suitTraps = ''
self.toons = []
self.joiningToons = []
self.pendingToons = []
self.activeToons = []
self.runningToons = []
self.__stopTimer()
self.__cleanupIntervals()
self._removeMembersKeep()
return
def delete(self):
self.notify.debug('delete(%s)' % self.doId)
self.__cleanupIntervals()
self._removeMembersKeep()
self.movie.cleanup()
del self.townBattle
self.removeNode()
self.fsm = None
self.localToonFsm = None
self.adjustFsm = None
self.__stopTimer()
self.timer = None
DistributedNode.DistributedNode.delete(self)
return
def loadTrap(self, suit, trapid):
self.notify.debug('loadTrap() trap: %d suit: %d' % (trapid, suit.doId))
trapName = AvProps[TRAP][trapid]
trap = BattleProps.globalPropPool.getProp(trapName)
suit.battleTrap = trapid
suit.battleTrapIsFresh = 0
suit.battleTrapProp = trap
self.notify.debug('suit.battleTrapProp = trap %s' % trap)
if trap.getName() == 'traintrack':
pass
else:
trap.wrtReparentTo(suit)
distance = MovieUtil.SUIT_TRAP_DISTANCE
if trapName == 'rake':
distance = MovieUtil.SUIT_TRAP_RAKE_DISTANCE
distance += MovieUtil.getSuitRakeOffset(suit)
trap.setH(180)
trap.setScale(0.7)
elif trapName == 'trapdoor' or trapName == 'quicksand':
trap.setScale(1.7)
elif trapName == 'marbles':
distance = MovieUtil.SUIT_TRAP_MARBLES_DISTANCE
trap.setH(94)
elif trapName == 'tnt':
trap.setP(90)
tip = trap.find('**/joint_attachEmitter')
sparks = BattleParticles.createParticleEffect(file='tnt')
trap.sparksEffect = sparks
sparks.start(tip)
trap.setPos(0, distance, 0)
if isinstance(trap, Actor.Actor):
frame = trap.getNumFrames(trapName) - 1
trap.pose(trapName, frame)
def removeTrap(self, suit, removeTrainTrack = False):
self.notify.debug('removeTrap() from suit: %d, removeTrainTrack=%s' % (suit.doId, removeTrainTrack))
if suit.battleTrapProp is None or suit.battleTrapProp.isEmpty():
self.notify.debug('suit.battleTrapProp == None, suit.battleTrap=%s setting to NO_TRAP, returning' % suit.battleTrap)
suit.battleTrap = NO_TRAP
return
if suit.battleTrap == UBER_GAG_LEVEL_INDEX:
if removeTrainTrack:
self.notify.debug('doing removeProp on traintrack')
MovieUtil.removeProp(suit.battleTrapProp)
for otherSuit in self.suits:
if not otherSuit == suit:
otherSuit.battleTrapProp = None
self.notify.debug('351 otherSuit=%d otherSuit.battleTrapProp = None' % otherSuit.doId)
otherSuit.battleTrap = NO_TRAP
otherSuit.battleTrapIsFresh = 0
else:
self.notify.debug('deliberately not doing removeProp on traintrack')
else:
self.notify.debug('suit.battleTrap != UBER_GAG_LEVEL_INDEX')
MovieUtil.removeProp(suit.battleTrapProp)
suit.battleTrapProp = None
self.notify.debug('360 suit.battleTrapProp = None')
suit.battleTrap = NO_TRAP
suit.battleTrapIsFresh = 0
return
def pause(self):
self.timer.stop()
def unpause(self):
self.timer.resume()
def findSuit(self, id):
for s in self.suits:
if s.doId == id:
return s
return None
def findToon(self, id):
toon = self.getToon(id)
if toon == None:
return
for t in self.toons:
if t == toon:
return t
return
def isSuitLured(self, suit):
if self.luredSuits.count(suit) != 0:
return 1
return 0
def unlureSuit(self, suit):
self.notify.debug('movie unluring suit %s' % suit.doId)
if self.luredSuits.count(suit) != 0:
self.luredSuits.remove(suit)
self.needAdjustTownBattle = 1
return None
def lureSuit(self, suit):
self.notify.debug('movie luring suit %s' % suit.doId)
if self.luredSuits.count(suit) == 0:
self.luredSuits.append(suit)
self.needAdjustTownBattle = 1
return None
def getActorPosHpr(self, actor, actorList = []):
if isinstance(actor, Suit.Suit):
if actorList == []:
actorList = self.activeSuits
if actorList.count(actor) != 0:
numSuits = len(actorList) - 1
index = actorList.index(actor)
point = self.suitPoints[numSuits][index]
return (Point3(point[0]), VBase3(point[1], 0.0, 0.0))
else:
self.notify.warning('getActorPosHpr() - suit not active')
else:
if actorList == []:
actorList = self.activeToons
if actorList.count(actor) != 0:
numToons = len(actorList) - 1
index = actorList.index(actor)
point = self.toonPoints[numToons][index]
return (Point3(point[0]), VBase3(point[1], 0.0, 0.0))
else:
self.notify.warning('getActorPosHpr() - toon not active')
def setLevelDoId(self, levelDoId):
pass
def setBattleCellId(self, battleCellId):
pass
def setInteractivePropTrackBonus(self, trackBonus):
self.interactivePropTrackBonus = trackBonus
def getInteractivePropTrackBonus(self):
return self.interactivePropTrackBonus
def setPosition(self, x, y, z):
self.notify.debug('setPosition() - %d %d %d' % (x, y, z))
pos = Point3(x, y, z)
self.setPos(pos)
def setInitialSuitPos(self, x, y, z):
self.initialSuitPos = Point3(x, y, z)
self.headsUp(self.initialSuitPos)
def setZoneId(self, zoneId):
self.zoneId = zoneId
def setBossBattle(self, value):
self.bossBattle = value
def setState(self, state, timestamp):
if self.__battleCleanedUp:
return
self.notify.debug('setState(%s)' % state)
self.fsm.request(state, [globalClockDelta.localElapsedTime(timestamp)])
def setMembers(self, suits, suitsJoining, suitsPending, suitsActive, suitsLured, suitTraps, toons, toonsJoining, toonsPending, toonsActive, toonsRunning, timestamp):
if self.__battleCleanedUp:
return
self.notify.debug('setMembers() - suits: %s suitsJoining: %s suitsPending: %s suitsActive: %s suitsLured: %s suitTraps: %s toons: %s toonsJoining: %s toonsPending: %s toonsActive: %s toonsRunning: %s' % (suits,
suitsJoining,
suitsPending,
suitsActive,
suitsLured,
suitTraps,
toons,
toonsJoining,
toonsPending,
toonsActive,
toonsRunning))
ts = globalClockDelta.localElapsedTime(timestamp)
oldsuits = self.suits
self.suits = []
suitGone = 0
for s in suits:
if s in self.cr.doId2do:
suit = self.cr.doId2do[s]
suit.setState('Battle')
self.suits.append(suit)
suit.interactivePropTrackBonus = self.interactivePropTrackBonus
try:
suit.battleTrap
except:
suit.battleTrap = NO_TRAP
suit.battleTrapProp = None
self.notify.debug('496 suit.battleTrapProp = None')
suit.battleTrapIsFresh = 0
else:
self.notify.warning('setMembers() - no suit in repository: %d' % s)
self.suits.append(None)
suitGone = 1
numSuitsThatDied = 0
for s in oldsuits:
if self.suits.count(s) == 0:
self.__removeSuit(s)
numSuitsThatDied += 1
self.notify.debug('suit %d dies, numSuitsThatDied=%d' % (s.doId, numSuitsThatDied))
if numSuitsThatDied == 4:
trainTrap = self.find('**/traintrack')
if not trainTrap.isEmpty():
self.notify.debug('removing old train trap when 4 suits died')
trainTrap.removeNode()
for s in suitsJoining:
suit = self.suits[int(s)]
if suit != None and self.joiningSuits.count(suit) == 0:
self.makeSuitJoin(suit, ts)
for s in suitsPending:
suit = self.suits[int(s)]
if suit != None and self.pendingSuits.count(suit) == 0:
self.__makeSuitPending(suit)
activeSuits = []
for s in suitsActive:
suit = self.suits[int(s)]
if suit != None and self.activeSuits.count(suit) == 0:
activeSuits.append(suit)
oldLuredSuits = self.luredSuits
self.luredSuits = []
for s in suitsLured:
suit = self.suits[int(s)]
if suit != None:
self.luredSuits.append(suit)
if oldLuredSuits.count(suit) == 0:
self.needAdjustTownBattle = 1
if self.needAdjustTownBattle == 0:
for s in oldLuredSuits:
if self.luredSuits.count(s) == 0:
self.needAdjustTownBattle = 1
index = 0
oldSuitTraps = self.suitTraps
self.suitTraps = suitTraps
for s in suitTraps:
trapid = int(s)
if trapid == 9:
trapid = -1
suit = self.suits[index]
index += 1
if suit != None:
if (trapid == NO_TRAP or trapid != suit.battleTrap) and suit.battleTrapProp != None:
self.notify.debug('569 calling self.removeTrap, suit=%d' % suit.doId)
self.removeTrap(suit)
if trapid != NO_TRAP and suit.battleTrapProp == None:
if self.fsm.getCurrentState().getName() != 'PlayMovie':
self.loadTrap(suit, trapid)
if len(oldSuitTraps) != len(self.suitTraps):
self.needAdjustTownBattle = 1
else:
for i in xrange(len(oldSuitTraps)):
if oldSuitTraps[i] == '9' and self.suitTraps[i] != '9' or oldSuitTraps[i] != '9' and self.suitTraps[i] == '9':
self.needAdjustTownBattle = 1
break
if suitGone:
validSuits = []
for s in self.suits:
if s != None:
validSuits.append(s)
self.suits = validSuits
self.needAdjustTownBattle = 1
oldtoons = self.toons
self.toons = []
toonGone = 0
for t in toons:
toon = self.getToon(t)
if toon == None:
self.notify.warning('setMembers() - toon not in cr!')
self.toons.append(None)
toonGone = 1
continue
self.toons.append(toon)
if oldtoons.count(toon) == 0:
self.notify.debug('setMembers() - add toon: %d' % toon.doId)
self.__listenForUnexpectedExit(toon)
toon.stopLookAround()
toon.stopSmooth()
for t in oldtoons:
if self.toons.count(t) == 0:
if self.__removeToon(t) == 1:
self.notify.debug('setMembers() - local toon left battle')
return []
for t in toonsJoining:
if int(t) < len(self.toons):
toon = self.toons[int(t)]
if toon != None and self.joiningToons.count(toon) == 0:
self.__makeToonJoin(toon, toonsPending, ts)
else:
self.notify.warning('setMembers toonsJoining t=%s not in self.toons %s' % (t, self.toons))
for t in toonsPending:
if int(t) < len(self.toons):
toon = self.toons[int(t)]
if toon != None and self.pendingToons.count(toon) == 0:
self.__makeToonPending(toon, ts)
else:
self.notify.warning('setMembers toonsPending t=%s not in self.toons %s' % (t, self.toons))
for t in toonsRunning:
toon = self.toons[int(t)]
if toon != None and self.runningToons.count(toon) == 0:
self.__makeToonRun(toon, ts)
activeToons = []
for t in toonsActive:
toon = self.toons[int(t)]
if toon != None and self.activeToons.count(toon) == 0:
activeToons.append(toon)
if len(activeSuits) > 0 or len(activeToons) > 0:
self.__makeAvsActive(activeSuits, activeToons)
if toonGone == 1:
validToons = []
for toon in self.toons:
if toon != None:
validToons.append(toon)
self.toons = validToons
if len(self.activeToons) > 0:
self.__requestAdjustTownBattle()
currStateName = self.localToonFsm.getCurrentState().getName()
if self.toons.count(base.localAvatar):
if oldtoons.count(base.localAvatar) == 0:
self.notify.debug('setMembers() - local toon just joined')
if self.streetBattle == 1:
base.cr.playGame.getPlace().enterZone(self.zoneId)
self.localToonJustJoined = 1
if currStateName != 'HasLocalToon':
self.localToonFsm.request('HasLocalToon')
else:
if oldtoons.count(base.localAvatar):
self.notify.debug('setMembers() - local toon just ran')
if self.levelBattle:
self.unlockLevelViz()
if currStateName != 'NoLocalToon':
self.localToonFsm.request('NoLocalToon')
for suit in self.luredSuits:
suit.loop('lured')
return oldtoons
def adjust(self, timestamp):
if self.__battleCleanedUp:
return
self.notify.debug('adjust(%f) from server' % globalClockDelta.localElapsedTime(timestamp))
self.adjustFsm.request('Adjusting', [globalClockDelta.localElapsedTime(timestamp)])
def setMovie(self, active, toons, suits, id0, tr0, le0, tg0, hp0, ac0, hpb0, kbb0, died0, revive0, id1, tr1, le1, tg1, hp1, ac1, hpb1, kbb1, died1, revive1, id2, tr2, le2, tg2, hp2, ac2, hpb2, kbb2, died2, revive2, id3, tr3, le3, tg3, hp3, ac3, hpb3, kbb3, died3, revive3, sid0, at0, stg0, dm0, sd0, sb0, st0, sid1, at1, stg1, dm1, sd1, sb1, st1, sid2, at2, stg2, dm2, sd2, sb2, st2, sid3, at3, stg3, dm3, sd3, sb3, st3):
if self.__battleCleanedUp:
return
self.notify.debug('setMovie()')
if int(active) == 1:
self.notify.debug('setMovie() - movie is active')
self.movie.genAttackDicts(toons, suits, id0, tr0, le0, tg0, hp0, ac0, hpb0, kbb0, died0, revive0, id1, tr1, le1, tg1, hp1, ac1, hpb1, kbb1, died1, revive1, id2, tr2, le2, tg2, hp2, ac2, hpb2, kbb2, died2, revive2, id3, tr3, le3, tg3, hp3, ac3, hpb3, kbb3, died3, revive3, sid0, at0, stg0, dm0, sd0, sb0, st0, sid1, at1, stg1, dm1, sd1, sb1, st1, sid2, at2, stg2, dm2, sd2, sb2, st2, sid3, at3, stg3, dm3, sd3, sb3, st3)
def setChosenToonAttacks(self, ids, tracks, levels, targets):
if self.__battleCleanedUp:
return
self.notify.debug('setChosenToonAttacks() - (%s), (%s), (%s), (%s)' % (ids,
tracks,
levels,
targets))
toonIndices = []
targetIndices = []
unAttack = 0
localToonInList = 0
for i in xrange(len(ids)):
track = tracks[i]
level = levels[i]
toon = self.findToon(ids[i])
if toon == None or self.activeToons.count(toon) == 0:
self.notify.warning('setChosenToonAttacks() - toon gone or not in battle: %d!' % ids[i])
toonIndices.append(-1)
tracks.append(-1)
levels.append(-1)
targetIndices.append(-1)
continue
if toon == base.localAvatar:
localToonInList = 1
toonIndices.append(self.activeToons.index(toon))
if track == SOS:
targetIndex = -1
elif track == NPCSOS:
targetIndex = -1
elif track == PETSOS:
targetIndex = -1
elif track == PASS:
targetIndex = -1
tracks[i] = PASS_ATTACK
elif attackAffectsGroup(track, level):
targetIndex = -1
elif track == HEAL:
target = self.findToon(targets[i])
if target != None and self.activeToons.count(target) != 0:
targetIndex = self.activeToons.index(target)
else:
targetIndex = -1
elif track == UN_ATTACK:
targetIndex = -1
tracks[i] = NO_ATTACK
if toon == base.localAvatar:
unAttack = 1
self.choseAttackAlready = 0
elif track == NO_ATTACK:
targetIndex = -1
else:
target = self.findSuit(targets[i])
if target != None and self.activeSuits.count(target) != 0:
targetIndex = self.activeSuits.index(target)
else:
targetIndex = -1
targetIndices.append(targetIndex)
for i in xrange(4 - len(ids)):
toonIndices.append(-1)
tracks.append(-1)
levels.append(-1)
targetIndices.append(-1)
self.townBattleAttacks = (toonIndices,
tracks,
levels,
targetIndices)
if self.localToonActive() and localToonInList == 1:
if unAttack == 1 and self.fsm.getCurrentState().getName() == 'WaitForInput':
if self.townBattle.fsm.getCurrentState().getName() != 'Attack':
self.townBattle.setState('Attack')
self.townBattle.updateChosenAttacks(self.townBattleAttacks[0], self.townBattleAttacks[1], self.townBattleAttacks[2], self.townBattleAttacks[3])
return
def setBattleExperience(self, id0, origExp0, earnedExp0, origQuests0, items0, missedItems0, origMerits0, merits0, parts0, id1, origExp1, earnedExp1, origQuests1, items1, missedItems1, origMerits1, merits1, parts1, id2, origExp2, earnedExp2, origQuests2, items2, missedItems2, origMerits2, merits2, parts2, id3, origExp3, earnedExp3, origQuests3, items3, missedItems3, origMerits3, merits3, parts3, deathList, uberList, helpfulToonsList):
if self.__battleCleanedUp:
return
self.movie.genRewardDicts(id0, origExp0, earnedExp0, origQuests0, items0, missedItems0, origMerits0, merits0, parts0, id1, origExp1, earnedExp1, origQuests1, items1, missedItems1, origMerits1, merits1, parts1, id2, origExp2, earnedExp2, origQuests2, items2, missedItems2, origMerits2, merits2, parts2, id3, origExp3, earnedExp3, origQuests3, items3, missedItems3, origMerits3, merits3, parts3, deathList, uberList, helpfulToonsList)
def __listenForUnexpectedExit(self, toon):
self.accept(toon.uniqueName('disable'), self.__handleUnexpectedExit, extraArgs=[toon])
self.accept(toon.uniqueName('died'), self.__handleDied, extraArgs=[toon])
def __handleUnexpectedExit(self, toon):
self.notify.warning('handleUnexpectedExit() - toon: %d' % toon.doId)
self.__removeToon(toon, unexpected=1)
def __handleDied(self, toon):
self.notify.warning('handleDied() - toon: %d' % toon.doId)
if toon == base.localAvatar:
self.d_toonDied(toon.doId)
self.cleanupBattle()
def delayDeleteMembers(self):
membersKeep = []
for t in self.toons:
membersKeep.append(DelayDelete.DelayDelete(t, 'delayDeleteMembers'))
for s in self.suits:
membersKeep.append(DelayDelete.DelayDelete(s, 'delayDeleteMembers'))
self._removeMembersKeep()
self.membersKeep = membersKeep
def _removeMembersKeep(self):
if self.membersKeep:
for delayDelete in self.membersKeep:
delayDelete.destroy()
self.membersKeep = None
return
def __removeSuit(self, suit):
self.notify.debug('__removeSuit(%d)' % suit.doId)
if self.suits.count(suit) != 0:
self.suits.remove(suit)
if self.joiningSuits.count(suit) != 0:
self.joiningSuits.remove(suit)
if self.pendingSuits.count(suit) != 0:
self.pendingSuits.remove(suit)
if self.activeSuits.count(suit) != 0:
self.activeSuits.remove(suit)
self.suitGone = 1
if suit.battleTrap != NO_TRAP:
self.notify.debug('882 calling self.removeTrap, suit=%d' % suit.doId)
self.removeTrap(suit)
suit.battleTrap = NO_TRAP
suit.battleTrapProp = None
self.notify.debug('883 suit.battleTrapProp = None')
suit.battleTrapIsFresh = 0
return
def __removeToon(self, toon, unexpected = 0):
self.notify.debug('__removeToon(%d)' % toon.doId)
self.exitedToons.append(toon)
if self.toons.count(toon) != 0:
self.toons.remove(toon)
if self.joiningToons.count(toon) != 0:
self.clearInterval(self.taskName('to-pending-toon-%d' % toon.doId))
if toon in self.joiningToons:
self.joiningToons.remove(toon)
if self.pendingToons.count(toon) != 0:
self.pendingToons.remove(toon)
if self.activeToons.count(toon) != 0:
self.activeToons.remove(toon)
if self.runningToons.count(toon) != 0:
self.clearInterval(self.taskName('running-%d' % toon.doId), finish=1)
if toon in self.runningToons:
self.runningToons.remove(toon)
self.ignore(toon.uniqueName('disable'))
self.ignore(toon.uniqueName('died'))
self.toonGone = 1
if toon == base.localAvatar:
self.removeLocalToon()
self.__teleportToSafeZone(toon)
return 1
return 0
def removeLocalToon(self):
if self._skippingRewardMovie:
return
if base.cr.playGame.getPlace() != None:
base.cr.playGame.getPlace().setState('walk')
base.localAvatar.earnedExperience = None
self.localToonFsm.request('NoLocalToon')
return
def removeInactiveLocalToon(self, toon):
self.notify.debug('removeInactiveLocalToon(%d)' % toon.doId)
self.exitedToons.append(toon)
if self.toons.count(toon) != 0:
self.toons.remove(toon)
if self.joiningToons.count(toon) != 0:
self.clearInterval(self.taskName('to-pending-toon-%d' % toon.doId), finish=1)
if toon in self.joiningToons:
self.joiningToons.remove(toon)
if self.pendingToons.count(toon) != 0:
self.pendingToons.remove(toon)
self.ignore(toon.uniqueName('disable'))
self.ignore(toon.uniqueName('died'))
base.cr.playGame.getPlace().setState('walk')
self.localToonFsm.request('WaitForServer')
def __createJoinInterval(self, av, destPos, destHpr, name, ts, callback, toon = 0):
joinTrack = Sequence()
joinTrack.append(Func(Emote.globalEmote.disableAll, av, 'dbattlebase, createJoinInterval'))
avPos = av.getPos(self)
avPos = Point3(avPos[0], avPos[1], 0.0)
av.setShadowHeight(0)
plist = self.buildJoinPointList(avPos, destPos, toon)
if len(plist) == 0:
joinTrack.append(Func(av.headsUp, self, destPos))
if toon == 0:
timeToDest = self.calcSuitMoveTime(avPos, destPos)
joinTrack.append(Func(av.loop, 'walk'))
else:
timeToDest = self.calcToonMoveTime(avPos, destPos)
joinTrack.append(Func(av.loop, 'run'))
if timeToDest > BATTLE_SMALL_VALUE:
joinTrack.append(LerpPosInterval(av, timeToDest, destPos, other=self))
totalTime = timeToDest
else:
totalTime = 0
else:
timeToPerimeter = 0
if toon == 0:
timeToPerimeter = self.calcSuitMoveTime(plist[0], avPos)
timePerSegment = 10.0 / BattleBase.suitSpeed
timeToDest = self.calcSuitMoveTime(BattleBase.posA, destPos)
else:
timeToPerimeter = self.calcToonMoveTime(plist[0], avPos)
timePerSegment = 10.0 / BattleBase.toonSpeed
timeToDest = self.calcToonMoveTime(BattleBase.posE, destPos)
totalTime = timeToPerimeter + (len(plist) - 1) * timePerSegment + timeToDest
if totalTime > MAX_JOIN_T:
self.notify.warning('__createJoinInterval() - time: %f' % totalTime)
joinTrack.append(Func(av.headsUp, self, plist[0]))
if toon == 0:
joinTrack.append(Func(av.loop, 'walk'))
else:
joinTrack.append(Func(av.loop, 'run'))
joinTrack.append(LerpPosInterval(av, timeToPerimeter, plist[0], other=self))
for p in plist[1:]:
joinTrack.append(Func(av.headsUp, self, p))
joinTrack.append(LerpPosInterval(av, timePerSegment, p, other=self))
joinTrack.append(Func(av.headsUp, self, destPos))
joinTrack.append(LerpPosInterval(av, timeToDest, destPos, other=self))
joinTrack.append(Func(av.loop, 'neutral'))
joinTrack.append(Func(av.headsUp, self, Point3(0, 0, 0)))
tval = totalTime - ts
if tval < 0:
tval = totalTime
joinTrack.append(Func(Emote.globalEmote.releaseAll, av, 'dbattlebase, createJoinInterval'))
joinTrack.append(Func(callback, av, tval))
if av == base.localAvatar:
camTrack = Sequence()
def setCamFov(fov):
base.camLens.setMinFov(fov/(4./3.))
camTrack.append(Func(setCamFov, self.camFov))
camTrack.append(Func(base.camera.wrtReparentTo, self))
camTrack.append(Func(base.camera.setPos, self.camJoinPos))
camTrack.append(Func(base.camera.setHpr, self.camJoinHpr))
return Parallel(joinTrack, camTrack, name=name)
else:
return Sequence(joinTrack, name=name)
def makeSuitJoin(self, suit, ts):
self.notify.debug('makeSuitJoin(%d)' % suit.doId)
spotIndex = len(self.pendingSuits) + len(self.joiningSuits)
self.joiningSuits.append(suit)
suit.setState('Battle')
openSpot = self.suitPendingPoints[spotIndex]
pos = openSpot[0]
hpr = VBase3(openSpot[1], 0.0, 0.0)
trackName = self.taskName('to-pending-suit-%d' % suit.doId)
track = self.__createJoinInterval(suit, pos, hpr, trackName, ts, self.__handleSuitJoinDone)
track.start(ts)
track.delayDelete = DelayDelete.DelayDelete(suit, 'makeSuitJoin')
self.storeInterval(track, trackName)
if ToontownBattleGlobals.SkipMovie:
track.finish()
def __handleSuitJoinDone(self, suit, ts):
self.notify.debug('suit: %d is now pending' % suit.doId)
if self.hasLocalToon():
self.d_joinDone(base.localAvatar.doId, suit.doId)
def __makeSuitPending(self, suit):
self.notify.debug('__makeSuitPending(%d)' % suit.doId)
self.clearInterval(self.taskName('to-pending-suit-%d' % suit.doId), finish=1)
if self.joiningSuits.count(suit):
self.joiningSuits.remove(suit)
self.pendingSuits.append(suit)
def __teleportToSafeZone(self, toon):
self.notify.debug('teleportToSafeZone(%d)' % toon.doId)
hoodId = ZoneUtil.getCanonicalHoodId(self.zoneId)
if hoodId in base.localAvatar.hoodsVisited:
target_sz = ZoneUtil.getSafeZoneId(self.zoneId)
else:
target_sz = ZoneUtil.getSafeZoneId(base.localAvatar.defaultZone)
base.cr.playGame.getPlace().fsm.request('teleportOut', [{'loader': ZoneUtil.getLoaderName(target_sz),
'where': ZoneUtil.getWhereName(target_sz, 1),
'how': 'teleportIn',
'hoodId': target_sz,
'zoneId': target_sz,
'shardId': None,
'avId': -1,
'battle': 1}])
return
def __makeToonJoin(self, toon, pendingToons, ts):
self.notify.debug('__makeToonJoin(%d)' % toon.doId)
spotIndex = len(pendingToons) + len(self.joiningToons)
self.joiningToons.append(toon)
openSpot = self.toonPendingPoints[spotIndex]
pos = openSpot[0]
hpr = VBase3(openSpot[1], 0.0, 0.0)
trackName = self.taskName('to-pending-toon-%d' % toon.doId)
track = self.__createJoinInterval(toon, pos, hpr, trackName, ts, self.__handleToonJoinDone, toon=1)
if toon != base.localAvatar:
toon.animFSM.request('off')
track.start(ts)
track.delayDelete = DelayDelete.DelayDelete(toon, '__makeToonJoin')
self.storeInterval(track, trackName)
def __handleToonJoinDone(self, toon, ts):
self.notify.debug('__handleToonJoinDone() - pending: %d' % toon.doId)
if self.hasLocalToon():
self.d_joinDone(base.localAvatar.doId, toon.doId)
def __makeToonPending(self, toon, ts):
self.notify.debug('__makeToonPending(%d)' % toon.doId)
self.clearInterval(self.taskName('to-pending-toon-%d' % toon.doId), finish=1)
if self.joiningToons.count(toon):
self.joiningToons.remove(toon)
spotIndex = len(self.pendingToons)
self.pendingToons.append(toon)
openSpot = self.toonPendingPoints[spotIndex]
pos = openSpot[0]
hpr = VBase3(openSpot[1], 0.0, 0.0)
toon.loop('neutral')
toon.setPosHpr(self, pos, hpr)
if base.localAvatar == toon:
currStateName = self.fsm.getCurrentState().getName()
def __makeAvsActive(self, suits, toons):
self.notify.debug('__makeAvsActive()')
self.__stopAdjusting()
for s in suits:
if self.joiningSuits.count(s):
self.notify.warning('suit: %d was in joining list!' % s.doId)
self.joiningSuits.remove(s)
if self.pendingSuits.count(s):
self.pendingSuits.remove(s)
self.notify.debug('__makeAvsActive() - suit: %d' % s.doId)
self.activeSuits.append(s)
if len(self.activeSuits) >= 1:
for suit in self.activeSuits:
suitPos, suitHpr = self.getActorPosHpr(suit)
if self.isSuitLured(suit) == 0:
suit.setPosHpr(self, suitPos, suitHpr)
else:
spos = Point3(suitPos[0], suitPos[1] - MovieUtil.SUIT_LURE_DISTANCE, suitPos[2])
suit.setPosHpr(self, spos, suitHpr)
suit.loop('neutral')
for toon in toons:
if self.joiningToons.count(toon):
self.notify.warning('toon: %d was in joining list!' % toon.doId)
self.joiningToons.remove(toon)
if self.pendingToons.count(toon):
self.pendingToons.remove(toon)
self.notify.debug('__makeAvsActive() - toon: %d' % toon.doId)
if self.activeToons.count(toon) == 0:
self.activeToons.append(toon)
else:
self.notify.warning('makeAvsActive() - toon: %d is active!' % toon.doId)
if len(self.activeToons) >= 1:
for toon in self.activeToons:
toonPos, toonHpr = self.getActorPosHpr(toon)
toon.setPosHpr(self, toonPos, toonHpr)
toon.loop('neutral')
if self.fsm.getCurrentState().getName() == 'WaitForInput' and self.localToonActive() and self.localToonJustJoined == 1:
self.notify.debug('makeAvsActive() - local toon just joined')
self.__enterLocalToonWaitForInput()
self.localToonJustJoined = 0
self.startTimer()
def __makeToonRun(self, toon, ts):
self.notify.debug('__makeToonRun(%d)' % toon.doId)
if self.activeToons.count(toon):
self.activeToons.remove(toon)
self.runningToons.append(toon)
self.toonGone = 1
self.__stopTimer()
if self.localToonRunning():
self.townBattle.setState('Off')
runMTrack = MovieUtil.getToonTeleportOutInterval(toon)
runName = self.taskName('running-%d' % toon.doId)
self.notify.debug('duration: %f' % runMTrack.getDuration())
runMTrack.start(ts)
runMTrack.delayDelete = DelayDelete.DelayDelete(toon, '__makeToonRun')
self.storeInterval(runMTrack, runName)
def getToon(self, toonId):
if toonId in self.cr.doId2do:
return self.cr.doId2do[toonId]
else:
self.notify.warning('getToon() - toon: %d not in repository!' % toonId)
return None
return None
def d_toonRequestJoin(self, toonId, pos):
self.notify.debug('network:toonRequestJoin()')
self.sendUpdate('toonRequestJoin', [pos[0], pos[1], pos[2]])
def d_toonRequestRun(self, toonId):
self.notify.debug('network:toonRequestRun()')
self.sendUpdate('toonRequestRun', [])
def d_toonDied(self, toonId):
self.notify.debug('network:toonDied()')
self.sendUpdate('toonDied', [])
def d_faceOffDone(self, toonId):
self.notify.debug('network:faceOffDone()')
self.sendUpdate('faceOffDone', [])
def d_adjustDone(self, toonId):
self.notify.debug('network:adjustDone()')
self.sendUpdate('adjustDone', [])
def d_timeout(self, toonId):
self.notify.debug('network:timeout()')
self.sendUpdate('timeout', [])
def d_movieDone(self, toonId):
self.notify.debug('network:movieDone()')
self.sendUpdate('movieDone', [])
def d_rewardDone(self, toonId):
self.notify.debug('network:rewardDone()')
self.sendUpdate('rewardDone', [])
def d_joinDone(self, toonId, avId):
self.notify.debug('network:joinDone(%d)' % avId)
self.sendUpdate('joinDone', [avId])
def d_requestAttack(self, toonId, track, level, av):
self.notify.debug('network:requestAttack(%d, %d, %d)' % (track, level, av))
self.sendUpdate('requestAttack', [track, level, av])
def d_requestPetProxy(self, toonId, av):
self.notify.debug('network:requestPetProxy(%s)' % av)
self.sendUpdate('requestPetProxy', [av])
def enterOff(self, ts = 0):
self.localToonFsm.requestFinalState()
return None
def exitOff(self):
return None
def enterFaceOff(self, ts = 0):
return None
def exitFaceOff(self):
return None
def enterWaitForJoin(self, ts = 0):
self.notify.debug('enterWaitForJoin()')
return None
def exitWaitForJoin(self):
return None
def __enterLocalToonWaitForInput(self):
self.notify.debug('enterLocalToonWaitForInput()')
base.camera.setPosHpr(self.camPos, self.camHpr)
base.camLens.setMinFov(self.camMenuFov/(4./3.))
NametagGlobals.setWant2dNametags(False)
self.townBattle.setState('Attack')
self.accept(self.localToonBattleEvent, self.__handleLocalToonBattleEvent)
def startTimer(self, ts = 0):
self.notify.debug('startTimer()')
if ts >= CLIENT_INPUT_TIMEOUT:
self.notify.warning('startTimer() - ts: %f timeout: %f' % (ts, CLIENT_INPUT_TIMEOUT))
self.__timedOut()
return
self.timer.startCallback(CLIENT_INPUT_TIMEOUT - ts, self.__timedOut)
timeTask = Task.loop(Task(self.__countdown), Task.pause(0.2))
taskMgr.add(timeTask, self.timerCountdownTaskName)
def __stopTimer(self):
self.notify.debug('__stopTimer()')
self.timer.stop()
taskMgr.remove(self.timerCountdownTaskName)
def __countdown(self, task):
if hasattr(self.townBattle, 'timer'):
self.townBattle.updateTimer(int(self.timer.getT()))
else:
self.notify.warning('__countdown has tried to update a timer that has been deleted. Stopping timer')
self.__stopTimer()
return Task.done
def enterWaitForInput(self, ts = 0):
self.notify.debug('enterWaitForInput()')
if self.interactiveProp:
self.interactiveProp.gotoBattleCheer()
self.choseAttackAlready = 0
if self.localToonActive():
self.__enterLocalToonWaitForInput()
self.startTimer(ts)
if self.needAdjustTownBattle == 1:
self.__adjustTownBattle()
return None
def exitWaitForInput(self):
self.notify.debug('exitWaitForInput()')
if self.localToonActive():
self.townBattle.setState('Off')
base.camLens.setMinFov(self.camFov/(4./3.))
self.ignore(self.localToonBattleEvent)
self.__stopTimer()
return None
def __handleLocalToonBattleEvent(self, response):
mode = response['mode']
noAttack = 0
if mode == 'Attack':
self.notify.debug('got an attack')
track = response['track']
level = response['level']
target = response['target']
targetId = target
if track == HEAL and not levelAffectsGroup(HEAL, level):
if target >= 0 and target < len(self.activeToons):
targetId = self.activeToons[target].doId
else:
self.notify.warning('invalid toon target: %d' % target)
track = -1
level = -1
targetId = -1
elif track == HEAL and len(self.activeToons) == 1:
self.notify.warning('invalid group target for heal')
track = -1
level = -1
elif not attackAffectsGroup(track, level):
if target >= 0 and target < len(self.activeSuits):
targetId = self.activeSuits[target].doId
else:
target = -1
if len(self.luredSuits) > 0:
if track == TRAP or track == LURE and not levelAffectsGroup(LURE, level):
if target != -1:
suit = self.findSuit(targetId)
if self.luredSuits.count(suit) != 0:
self.notify.warning('Suit: %d was lured!' % targetId)
track = -1
level = -1
targetId = -1
elif track == LURE:
if levelAffectsGroup(LURE, level) and len(self.activeSuits) == len(self.luredSuits):
self.notify.warning('All suits are lured!')
track = -1
level = -1
targetId = -1
if track == TRAP:
if target != -1:
if attackAffectsGroup(track, level):
pass
else:
suit = self.findSuit(targetId)
if suit.battleTrap != NO_TRAP:
self.notify.warning('Suit: %d was already trapped!' % targetId)
track = -1
level = -1
targetId = -1
self.d_requestAttack(base.localAvatar.doId, track, level, targetId)
elif mode == 'Run':
self.notify.debug('got a run')
self.d_toonRequestRun(base.localAvatar.doId)
elif mode == 'SOS':
targetId = response['id']
self.notify.debug('got an SOS for friend: %d' % targetId)
self.d_requestAttack(base.localAvatar.doId, SOS, -1, targetId)
elif mode == 'NPCSOS':
targetId = response['id']
self.notify.debug('got an NPCSOS for friend: %d' % targetId)
self.d_requestAttack(base.localAvatar.doId, NPCSOS, -1, targetId)
elif mode == 'PETSOS':
targetId = response['id']
trickId = response['trickId']
self.notify.debug('got an PETSOS for pet: %d' % targetId)
self.d_requestAttack(base.localAvatar.doId, PETSOS, trickId, targetId)
elif mode == 'PETSOSINFO':
petProxyId = response['id']
self.notify.debug('got a PETSOSINFO for pet: %d' % petProxyId)
if petProxyId in base.cr.doId2do:
self.notify.debug('pet: %d was already in the repository' % petProxyId)
proxyGenerateMessage = 'petProxy-%d-generated' % petProxyId
messenger.send(proxyGenerateMessage)
else:
self.d_requestPetProxy(base.localAvatar.doId, petProxyId)
noAttack = 1
elif mode == 'Pass':
targetId = response['id']
self.notify.debug('got a Pass')
self.d_requestAttack(base.localAvatar.doId, PASS, -1, -1)
elif mode == 'UnAttack':
self.d_requestAttack(base.localAvatar.doId, UN_ATTACK, -1, -1)
noAttack = 1
elif mode == 'Fire':
target = response['target']
targetId = self.activeSuits[target].doId
self.d_requestAttack(base.localAvatar.doId, FIRE, -1, targetId)
else:
self.notify.warning('unknown battle response')
return
if noAttack == 1:
self.choseAttackAlready = 0
else:
self.choseAttackAlready = 1
def __timedOut(self):
if self.choseAttackAlready == 1:
return
self.notify.debug('WaitForInput timed out')
if self.localToonActive():
self.notify.debug('battle timed out')
self.d_timeout(base.localAvatar.doId)
def enterMakeMovie(self, ts = 0):
self.notify.debug('enterMakeMovie()')
return None
def exitMakeMovie(self):
return None
def enterPlayMovie(self, ts):
self.notify.debug('enterPlayMovie()')
self.delayDeleteMembers()
if self.hasLocalToon():
NametagGlobals.setWant2dNametags(False)
pass
if ToontownBattleGlobals.SkipMovie:
self.movie.play(ts, self.__handleMovieDone)
self.movie.finish()
else:
self.movie.play(ts, self.__handleMovieDone)
return None
def __handleMovieDone(self):
self.notify.debug('__handleMovieDone()')
if self.hasLocalToon():
self.d_movieDone(base.localAvatar.doId)
self.movie.reset()
def exitPlayMovie(self):
self.notify.debug('exitPlayMovie()')
self.movie.reset(finish=1)
self._removeMembersKeep()
self.townBattleAttacks = ([-1,
-1,
-1,
-1],
[-1,
-1,
-1,
-1],
[-1,
-1,
-1,
-1],
[0,
0,
0,
0])
return None
def hasLocalToon(self):
return self.toons.count(base.localAvatar) > 0
def localToonPendingOrActive(self):
return self.pendingToons.count(base.localAvatar) > 0 or self.activeToons.count(base.localAvatar) > 0
def localToonActive(self):
return self.activeToons.count(base.localAvatar) > 0
def localToonActiveOrRunning(self):
return self.activeToons.count(base.localAvatar) > 0 or self.runningToons.count(base.localAvatar) > 0
def localToonRunning(self):
return self.runningToons.count(base.localAvatar) > 0
def enterHasLocalToon(self):
self.notify.debug('enterHasLocalToon()')
if base.cr.playGame.getPlace() != None:
base.cr.playGame.getPlace().setState('battle', self.localToonBattleEvent)
if localAvatar and hasattr(localAvatar, 'inventory') and localAvatar.inventory:
localAvatar.inventory.setInteractivePropTrackBonus(self.interactivePropTrackBonus)
base.camera.wrtReparentTo(self)
base.camLens.setMinFov(self.camFov/(4./3.))
return
def exitHasLocalToon(self):
self.ignore(self.localToonBattleEvent)
self.__stopTimer()
if localAvatar and hasattr(localAvatar, 'inventory') and localAvatar.inventory:
localAvatar.inventory.setInteractivePropTrackBonus(-1)
stateName = None
place = base.cr.playGame.getPlace()
if place:
stateName = place.fsm.getCurrentState().getName()
if stateName == 'died':
self.movie.reset()
base.camera.reparentTo(render)
base.camera.setPosHpr(localAvatar, 5.2, 5.45, localAvatar.getHeight() * 0.66, 131.5, 3.6, 0)
else:
base.camera.wrtReparentTo(base.localAvatar)
messenger.send('localToonLeftBattle')
base.camLens.setMinFov(ToontownGlobals.DefaultCameraFov/(4./3.))
return
def enterNoLocalToon(self):
self.notify.debug('enterNoLocalToon()')
return None
def exitNoLocalToon(self):
return None
def setSkippingRewardMovie(self):
self._skippingRewardMovie = True
def enterWaitForServer(self):
self.notify.debug('enterWaitForServer()')
return None
def exitWaitForServer(self):
return None
def createAdjustInterval(self, av, destPos, destHpr, toon = 0, run = 0):
if run == 1:
adjustTime = self.calcToonMoveTime(destPos, av.getPos(self))
else:
adjustTime = self.calcSuitMoveTime(destPos, av.getPos(self))
self.notify.debug('creating adjust interval for: %d' % av.doId)
adjustTrack = Sequence()
if run == 1:
adjustTrack.append(Func(av.loop, 'run'))
else:
adjustTrack.append(Func(av.loop, 'walk'))
adjustTrack.append(Func(av.headsUp, self, destPos))
adjustTrack.append(LerpPosInterval(av, adjustTime, destPos, other=self))
adjustTrack.append(Func(av.setHpr, self, destHpr))
adjustTrack.append(Func(av.loop, 'neutral'))
return adjustTrack
def __adjust(self, ts, callback):
self.notify.debug('__adjust(%f)' % ts)
adjustTrack = Parallel()
if len(self.pendingSuits) > 0 or self.suitGone == 1:
self.suitGone = 0
numSuits = len(self.pendingSuits) + len(self.activeSuits) - 1
index = 0
for suit in self.activeSuits:
point = self.suitPoints[numSuits][index]
pos = suit.getPos(self)
destPos = point[0]
if self.isSuitLured(suit) == 1:
destPos = Point3(destPos[0], destPos[1] - MovieUtil.SUIT_LURE_DISTANCE, destPos[2])
if pos != destPos:
destHpr = VBase3(point[1], 0.0, 0.0)
adjustTrack.append(self.createAdjustInterval(suit, destPos, destHpr))
index += 1
for suit in self.pendingSuits:
point = self.suitPoints[numSuits][index]
destPos = point[0]
destHpr = VBase3(point[1], 0.0, 0.0)
adjustTrack.append(self.createAdjustInterval(suit, destPos, destHpr))
index += 1
if len(self.pendingToons) > 0 or self.toonGone == 1:
self.toonGone = 0
numToons = len(self.pendingToons) + len(self.activeToons) - 1
index = 0
for toon in self.activeToons:
point = self.toonPoints[numToons][index]
pos = toon.getPos(self)
destPos = point[0]
if pos != destPos:
destHpr = VBase3(point[1], 0.0, 0.0)
adjustTrack.append(self.createAdjustInterval(toon, destPos, destHpr))
index += 1
for toon in self.pendingToons:
point = self.toonPoints[numToons][index]
destPos = point[0]
destHpr = VBase3(point[1], 0.0, 0.0)
adjustTrack.append(self.createAdjustInterval(toon, destPos, destHpr))
index += 1
if len(adjustTrack) > 0:
self.notify.debug('creating adjust multitrack')
e = Func(self.__handleAdjustDone)
track = Sequence(adjustTrack, e, name=self.adjustName)
self.storeInterval(track, self.adjustName)
track.start(ts)
if ToontownBattleGlobals.SkipMovie:
track.finish()
else:
self.notify.warning('adjust() - nobody needed adjusting')
self.__adjustDone()
def __handleAdjustDone(self):
self.notify.debug('__handleAdjustDone() - client adjust finished')
self.clearInterval(self.adjustName)
self.__adjustDone()
def __stopAdjusting(self):
self.notify.debug('__stopAdjusting()')
self.clearInterval(self.adjustName)
if self.adjustFsm.getCurrentState().getName() == 'Adjusting':
self.adjustFsm.request('NotAdjusting')
def __requestAdjustTownBattle(self):
self.notify.debug('__requestAdjustTownBattle() curstate = %s' % self.fsm.getCurrentState().getName())
if self.fsm.getCurrentState().getName() == 'WaitForInput':
self.__adjustTownBattle()
else:
self.needAdjustTownBattle = 1
def __adjustTownBattle(self):
self.notify.debug('__adjustTownBattle()')
if self.localToonActive() and len(self.activeSuits) > 0:
self.notify.debug('__adjustTownBattle() - adjusting town battle')
luredSuits = []
for suit in self.luredSuits:
if suit not in self.activeSuits:
self.notify.error('lured suit not in self.activeSuits')
luredSuits.append(self.activeSuits.index(suit))
trappedSuits = []
for suit in self.activeSuits:
if suit.battleTrap != NO_TRAP:
trappedSuits.append(self.activeSuits.index(suit))
self.townBattle.adjustCogsAndToons(self.activeSuits, luredSuits, trappedSuits, self.activeToons)
if hasattr(self, 'townBattleAttacks'):
self.townBattle.updateChosenAttacks(self.townBattleAttacks[0], self.townBattleAttacks[1], self.townBattleAttacks[2], self.townBattleAttacks[3])
self.needAdjustTownBattle = 0
def __adjustDone(self):
self.notify.debug('__adjustDone()')
if self.hasLocalToon():
self.d_adjustDone(base.localAvatar.doId)
self.adjustFsm.request('NotAdjusting')
def enterAdjusting(self, ts):
self.notify.debug('enterAdjusting()')
if self.localToonActive():
self.__stopTimer()
self.delayDeleteMembers()
self.__adjust(ts, self.__handleAdjustDone)
return None
def exitAdjusting(self):
self.notify.debug('exitAdjusting()')
self.finishInterval(self.adjustName)
self._removeMembersKeep()
currStateName = self.fsm.getCurrentState().getName()
if currStateName == 'WaitForInput' and self.localToonActive():
self.startTimer()
return None
def enterNotAdjusting(self):
self.notify.debug('enterNotAdjusting()')
return None
def exitNotAdjusting(self):
return None
def visualize(self):
try:
self.isVisualized
except:
self.isVisualized = 0
if self.isVisualized:
self.vis.removeNode()
del self.vis
self.detachNode()
self.isVisualized = 0
else:
lsegs = LineSegs()
lsegs.setColor(0.5, 0.5, 1, 1)
lsegs.moveTo(0, 0, 0)
for p in BattleBase.allPoints:
lsegs.drawTo(p[0], p[1], p[2])
p = BattleBase.allPoints[0]
lsegs.drawTo(p[0], p[1], p[2])
self.vis = self.attachNewNode(lsegs.create())
self.reparentTo(render)
self.isVisualized = 1
def setupCollisions(self, name):
self.lockout = CollisionTube(0, 0, 0, 0, 0, 9, 9)
lockoutNode = CollisionNode(name)
lockoutNode.addSolid(self.lockout)
lockoutNode.setCollideMask(ToontownGlobals.WallBitmask)
self.lockoutNodePath = self.attachNewNode(lockoutNode)
self.lockoutNodePath.detachNode()
def removeCollisionData(self):
del self.lockout
self.lockoutNodePath.removeNode()
del self.lockoutNodePath
def enableCollision(self):
self.lockoutNodePath.reparentTo(self)
if len(self.toons) < 4:
self.accept(self.getCollisionName(), self.__handleLocalToonCollision)
def __handleLocalToonCollision(self, collEntry):
self.notify.debug('localToonCollision')
if self.fsm.getCurrentState().getName() == 'Off':
self.notify.debug('ignoring collision in Off state')
return
if not base.localAvatar.wantBattles:
return
if self._skippingRewardMovie:
return
base.cr.playGame.getPlace().setState('WaitForBattle')
toon = base.localAvatar
self.d_toonRequestJoin(toon.doId, toon.getPos(self))
base.localAvatar.preBattleHpr = base.localAvatar.getHpr(render)
self.localToonFsm.request('WaitForServer')
self.onWaitingForJoin()
def onWaitingForJoin(self):
pass
def denyLocalToonJoin(self):
self.notify.debug('denyLocalToonJoin()')
place = self.cr.playGame.getPlace()
if place.fsm.getCurrentState().getName() == 'WaitForBattle':
place.setState('walk')
self.localToonFsm.request('NoLocalToon')
def disableCollision(self):
self.ignore(self.getCollisionName())
self.lockoutNodePath.detachNode()
def openBattleCollision(self):
if not self.hasLocalToon():
self.enableCollision()
def closeBattleCollision(self):
self.ignore(self.getCollisionName())
def getCollisionName(self):
return 'enter' + self.lockoutNodePath.getName()
| linktlh/Toontown-journey | toontown/battle/DistributedBattleBase.py | Python | apache-2.0 | 61,661 |
"""Test state helpers."""
import asyncio
from datetime import timedelta
import unittest
from unittest.mock import patch
import homeassistant.core as ha
from homeassistant.setup import async_setup_component
from homeassistant.const import (SERVICE_TURN_ON, SERVICE_TURN_OFF)
from homeassistant.util.async_ import run_coroutine_threadsafe
from homeassistant.util import dt as dt_util
from homeassistant.helpers import state
from homeassistant.const import (
STATE_OPEN, STATE_CLOSED,
STATE_LOCKED, STATE_UNLOCKED,
STATE_ON, STATE_OFF,
STATE_HOME, STATE_NOT_HOME)
from homeassistant.components.sun import (STATE_ABOVE_HORIZON,
STATE_BELOW_HORIZON)
from tests.common import get_test_home_assistant, mock_service
import pytest
@asyncio.coroutine
def test_async_track_states(hass):
"""Test AsyncTrackStates context manager."""
point1 = dt_util.utcnow()
point2 = point1 + timedelta(seconds=5)
point3 = point2 + timedelta(seconds=5)
with patch('homeassistant.core.dt_util.utcnow') as mock_utcnow:
mock_utcnow.return_value = point2
with state.AsyncTrackStates(hass) as states:
mock_utcnow.return_value = point1
hass.states.async_set('light.test', 'on')
mock_utcnow.return_value = point2
hass.states.async_set('light.test2', 'on')
state2 = hass.states.get('light.test2')
mock_utcnow.return_value = point3
hass.states.async_set('light.test3', 'on')
state3 = hass.states.get('light.test3')
assert [state2, state3] == \
sorted(states, key=lambda state: state.entity_id)
@asyncio.coroutine
def test_call_to_component(hass):
"""Test calls to components state reproduction functions."""
with patch(('homeassistant.components.media_player.'
'async_reproduce_states')) as media_player_fun:
media_player_fun.return_value = asyncio.Future()
media_player_fun.return_value.set_result(None)
with patch(('homeassistant.components.climate.'
'async_reproduce_states')) as climate_fun:
climate_fun.return_value = asyncio.Future()
climate_fun.return_value.set_result(None)
state_media_player = ha.State('media_player.test', 'bad')
state_climate = ha.State('climate.test', 'bad')
context = "dummy_context"
yield from state.async_reproduce_state(
hass,
[state_media_player, state_climate],
blocking=True,
context=context)
media_player_fun.assert_called_once_with(
hass,
[state_media_player],
context=context)
climate_fun.assert_called_once_with(
hass,
[state_climate],
context=context)
class TestStateHelpers(unittest.TestCase):
"""Test the Home Assistant event helpers."""
def setUp(self): # pylint: disable=invalid-name
"""Run when tests are started."""
self.hass = get_test_home_assistant()
run_coroutine_threadsafe(async_setup_component(
self.hass, 'homeassistant', {}), self.hass.loop).result()
def tearDown(self): # pylint: disable=invalid-name
"""Stop when tests are finished."""
self.hass.stop()
def test_get_changed_since(self):
"""Test get_changed_since."""
point1 = dt_util.utcnow()
point2 = point1 + timedelta(seconds=5)
point3 = point2 + timedelta(seconds=5)
with patch('homeassistant.core.dt_util.utcnow', return_value=point1):
self.hass.states.set('light.test', 'on')
state1 = self.hass.states.get('light.test')
with patch('homeassistant.core.dt_util.utcnow', return_value=point2):
self.hass.states.set('light.test2', 'on')
state2 = self.hass.states.get('light.test2')
with patch('homeassistant.core.dt_util.utcnow', return_value=point3):
self.hass.states.set('light.test3', 'on')
state3 = self.hass.states.get('light.test3')
assert [state2, state3] == \
state.get_changed_since([state1, state2, state3], point2)
def test_reproduce_with_no_entity(self):
"""Test reproduce_state with no entity."""
calls = mock_service(self.hass, 'light', SERVICE_TURN_ON)
state.reproduce_state(self.hass, ha.State('light.test', 'on'))
self.hass.block_till_done()
assert len(calls) == 0
assert self.hass.states.get('light.test') is None
def test_reproduce_turn_on(self):
"""Test reproduce_state with SERVICE_TURN_ON."""
calls = mock_service(self.hass, 'light', SERVICE_TURN_ON)
self.hass.states.set('light.test', 'off')
state.reproduce_state(self.hass, ha.State('light.test', 'on'))
self.hass.block_till_done()
assert len(calls) > 0
last_call = calls[-1]
assert 'light' == last_call.domain
assert SERVICE_TURN_ON == last_call.service
assert ['light.test'] == last_call.data.get('entity_id')
def test_reproduce_turn_off(self):
"""Test reproduce_state with SERVICE_TURN_OFF."""
calls = mock_service(self.hass, 'light', SERVICE_TURN_OFF)
self.hass.states.set('light.test', 'on')
state.reproduce_state(self.hass, ha.State('light.test', 'off'))
self.hass.block_till_done()
assert len(calls) > 0
last_call = calls[-1]
assert 'light' == last_call.domain
assert SERVICE_TURN_OFF == last_call.service
assert ['light.test'] == last_call.data.get('entity_id')
def test_reproduce_complex_data(self):
"""Test reproduce_state with complex service data."""
calls = mock_service(self.hass, 'light', SERVICE_TURN_ON)
self.hass.states.set('light.test', 'off')
complex_data = ['hello', {'11': '22'}]
state.reproduce_state(self.hass, ha.State('light.test', 'on', {
'complex': complex_data
}))
self.hass.block_till_done()
assert len(calls) > 0
last_call = calls[-1]
assert 'light' == last_call.domain
assert SERVICE_TURN_ON == last_call.service
assert complex_data == last_call.data.get('complex')
def test_reproduce_bad_state(self):
"""Test reproduce_state with bad state."""
calls = mock_service(self.hass, 'light', SERVICE_TURN_ON)
self.hass.states.set('light.test', 'off')
state.reproduce_state(self.hass, ha.State('light.test', 'bad'))
self.hass.block_till_done()
assert len(calls) == 0
assert 'off' == self.hass.states.get('light.test').state
def test_as_number_states(self):
"""Test state_as_number with states."""
zero_states = (STATE_OFF, STATE_CLOSED, STATE_UNLOCKED,
STATE_BELOW_HORIZON, STATE_NOT_HOME)
one_states = (STATE_ON, STATE_OPEN, STATE_LOCKED, STATE_ABOVE_HORIZON,
STATE_HOME)
for _state in zero_states:
assert 0 == state.state_as_number(
ha.State('domain.test', _state, {}))
for _state in one_states:
assert 1 == state.state_as_number(
ha.State('domain.test', _state, {}))
def test_as_number_coercion(self):
"""Test state_as_number with number."""
for _state in ('0', '0.0', 0, 0.0):
assert 0.0 == state.state_as_number(
ha.State('domain.test', _state, {}))
for _state in ('1', '1.0', 1, 1.0):
assert 1.0 == state.state_as_number(
ha.State('domain.test', _state, {}))
def test_as_number_invalid_cases(self):
"""Test state_as_number with invalid cases."""
for _state in ('', 'foo', 'foo.bar', None, False, True, object,
object()):
with pytest.raises(ValueError):
state.state_as_number(ha.State('domain.test', _state, {}))
| molobrakos/home-assistant | tests/helpers/test_state.py | Python | apache-2.0 | 8,089 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This module contains the NewRect class.
"""
from harpia.GUI.fieldtypes import *
from harpia.model.plugin import Plugin
class NewRect(Plugin):
"""
This class contains methods related the NewRect class.
"""
# -------------------------------------------------------------------------
def __init__(self):
Plugin.__init__(self)
self.help = "Creates new rectangle"
self.label = "New Rectangle"
self.color = "50:50:200:150"
self.out_ports = [{"type":"harpia.extensions.c.ports.rect",
"name":"size",
"label":"Size"}]
self.group = "Basic Data Type"
self.properties = [{"label": "X",
"name": "x",
"type": HARPIA_INT,
"lower": 0,
"upper": 65535,
"step": 1,
"value":0
},
{"label": "Y",
"name": "y",
"type": HARPIA_INT,
"lower": 0,
"upper": 65535,
"step": 1,
"value":0
},
{"label": "Width",
"name": "width",
"type": HARPIA_INT,
"lower": 0,
"upper": 65535,
"step": 1,
"value":640
},
{"label": "Height",
"name": "height",
"type": HARPIA_INT,
"lower": 0,
"upper": 65535,
"step": 1,
"value":480
}
]
self.codes[1] = "CvRect block$id$_rect_o0 = cvRect( 0, 0, 1, 1);"
self.codes[2] = \
'block$id$_rect_o0 = cvRect($prop[x]$, $prop[y]$, $prop[width]$, $prop[height]$);\n'
self.language = "c"
self.framework = "opencv"
# -----------------------------------------------------------------------------
| flschiavoni/harpia | harpia/extensions/c/opencv/newRect.py | Python | gpl-3.0 | 2,398 |
'''
A Mini-implementation of the Storlet middleware filter.
@author: josep sampe
'''
from swift.common.utils import get_logger
from swift.common.utils import register_swift_info
from swift.common.swob import Request
from swift.common.utils import config_true_value
from storlets.swift_middleware.handlers.base import SwiftFileManager
from swift.common.swob import wsgify
class StorletFilter(object):
def __init__(self, app, conf):
self.app = app
self.conf = conf
self.exec_server = self.conf.get('execution_server')
self.logger = get_logger(self.conf, log_route='storlet_filter')
self.filter_data = self.conf['filter_data']
self.parameters = self.filter_data['params']
self.gateway_class = self.conf['storlets_gateway_module']
self.sreq_class = self.gateway_class.request_class
self.storlet_container = conf.get('storlet_container')
self.storlet_dependency = conf.get('storlet_dependency')
self.log_container = conf.get('storlet_logcontainer')
self.client_conf_file = '/etc/swift/storlet-proxy-server.conf'
self.register_info()
def register_info(self):
register_swift_info('storlet_filter')
def _setup_gateway(self):
"""
Setup gateway instance
"""
self.gateway = self.gateway_class(self.conf, self.logger, self.scope)
def _augment_storlet_request(self, req):
"""
Add to request the storlet parameters to be used in case the request
is forwarded to the data node (GET case)
:param params: parameters to be augmented to request
"""
req.headers['X-Storlet-Language'] = self.filter_data['language']
req.headers['X-Storlet-Main'] = self.filter_data['main']
req.headers['X-Storlet-Dependency'] = self.filter_data['dependencies']
req.headers['X-Storlet-Content-Length'] = self.filter_data['size']
req.headers['X-Storlet-Generate-Log'] = False
req.headers['X-Storlet-X-Timestamp'] = 0
def _get_storlet_invocation_options(self, req):
options = dict()
filtered_key = ['X-Storlet-Range', 'X-Storlet-Generate-Log']
for key in req.headers:
prefix = 'X-Storlet-'
if key.startswith(prefix) and key not in filtered_key:
new_key = 'storlet_' + \
key[len(prefix):].lower().replace('-', '_')
options[new_key] = req.headers.get(key)
generate_log = req.headers.get('X-Storlet-Generate-Log')
options['generate_log'] = config_true_value(generate_log)
options['scope'] = self.scope
options['file_manager'] = \
SwiftFileManager(self.account, self.storlet_container,
self.storlet_dependency, self.log_container,
self.client_conf_file, self.logger)
return options
def _build_storlet_request(self, req_resp, params, data_iter):
storlet_id = self.storlet_name
new_env = dict(req_resp.environ)
req = Request.blank(new_env['PATH_INFO'], new_env)
req.headers['X-Run-Storlet'] = self.storlet_name
self._augment_storlet_request(req)
options = self._get_storlet_invocation_options(req)
if hasattr(data_iter, '_fp'):
sreq = self.sreq_class(storlet_id, params, dict(),
data_fd=data_iter._fp.fileno(),
options=options)
else:
sreq = self.sreq_class(storlet_id, params, dict(),
data_iter, options=options)
return sreq
def _call_gateway(self, req_resp, params, crystal_iter):
sreq = self._build_storlet_request(req_resp, params, crystal_iter)
sresp = self.gateway.invocation_flow(sreq)
return sresp.data_iter
@wsgify
def __call__(self, req):
if req.method in ('GET', 'PUT'):
storlet = self.filter_data.pop('name')
params = self.parameters
self.storlet_name = storlet
etag = None
try:
if self.exec_server == 'proxy':
_, self.account, _, _ = req.split_path(4, 4, rest_with_last=True)
elif self.exec_server == 'object':
_, _, self.account, _, _ = req.split_path(5, 5, rest_with_last=True)
except:
# No object Request
return req.get_response(self.app)
self.scope = self.account[5:18]
self.logger.info('Go to execute ' + storlet +
' storlet with parameters "' + str(params) + '"')
self._setup_gateway()
if 'Etag' in req.headers.keys():
etag = req.headers.pop('Etag')
if req.method == 'GET':
response = req.get_response(self.app)
data_iter = response.app_iter
response.app_iter = self._call_gateway(response, params, data_iter)
if 'Content-Length' in response.headers:
response.headers.pop('Content-Length')
if 'Transfer-Encoding' in response.headers:
response.headers.pop('Transfer-Encoding')
elif req.method == 'PUT':
reader = req.environ['wsgi.input'].read
data_iter = iter(lambda: reader(65536), '')
req.environ['wsgi.input'] = self._call_gateway(req, params, data_iter)
if 'CONTENT_LENGTH' in req.environ:
req.environ.pop('CONTENT_LENGTH')
req.headers['Transfer-Encoding'] = 'chunked'
response = req.get_response(self.app)
if etag:
response.headers['etag'] = etag
else:
response.headers['etag'] = ''
return response
return req.get_response(self.app)
def filter_factory(global_conf, **local_conf):
conf = global_conf.copy()
conf.update(local_conf)
def storlet_filter(app):
return StorletFilter(app, conf)
return storlet_filter
| Crystal-SDS/filter-middleware | crystal_filter_middleware/filters/storlet.py | Python | gpl-3.0 | 6,161 |
import re
from markdown.blockprocessors import HRProcessor
class BBCodeHRProcessor(HRProcessor):
RE = r'^\[hr\]*'
# Detect hr on any line of a block.
SEARCH_RE = re.compile(RE, re.MULTILINE | re.IGNORECASE)
| 390910131/Misago | misago/markup/bbcode/blocks.py | Python | gpl-2.0 | 223 |
# Copyright 2014-2020 The ODL contributors
#
# This file is part of ODL.
#
# This Source Code Form is subject to the terms of the Mozilla Public License,
# v. 2.0. If a copy of the MPL was not distributed with this file, You can
# obtain one at https://mozilla.org/MPL/2.0/.
"""Utility functions for FOMs."""
import numpy as np
from odl.discr import uniform_discr
from odl.trafos.backends import PYFFTW_AVAILABLE
__all__ = ()
def filter_image_sep2d(image, fh, fv, impl='numpy', padding=None):
"""Filter an image with a separable filter.
Parameters
----------
image : 2D array-like
The image to be filtered. It must have a real (vs. complex) dtype.
fh, fv : 1D array-like
Horizontal (axis 0) and vertical (axis 1) filters. Their sizes
can be at most the image sizes in the respective axes.
impl : {'numpy', 'pyfftw'}, optional
FFT backend to use. The ``pyfftw`` backend requires the
``pyfftw`` package to be installed. It is usually significantly
faster than the NumPy backend.
padding : positive int, optional
Amount of zeros added to the left and right of the image in all
axes before FFT. This helps avoiding wraparound artifacts due to
large boundary values.
For ``None``, the padding is computed as ::
padding = min(max(len(fh), len(fv)) - 1, 64)
A padding of ``len(filt) - 1`` ensures that errors in FFT-based
convolutions are small. At the same time, the padding should not
be excessive to retain efficiency.
Returns
-------
filtered : 2D `numpy.ndarray`
The image filtered horizontally by ``fh`` and vertically by ``fv``.
It has the same shape as ``image``, and its dtype is
``np.result_type(image, fh, fv)``.
"""
# TODO: generalize for nD
impl, impl_in = str(impl).lower(), impl
if impl not in ('numpy', 'pyfftw'):
raise ValueError('`impl` {!r} not understood'
''.format(impl_in))
image = np.asarray(image)
if image.ndim != 2:
raise ValueError('`image` must be 2-dimensional, got image with '
'ndim={}'.format(image.ndim))
if image.size == 0:
raise ValueError('`image` cannot have size 0')
if not np.issubsctype(image.dtype, np.floating):
image = image.astype(float)
fh = np.asarray(fh).astype(image.dtype)
if fh.ndim != 1:
raise ValueError('`fh` must be one-dimensional')
elif fh.size == 0:
raise ValueError('`fh` cannot have size 0')
elif fh.size > image.shape[0]:
raise ValueError('`fh` can be at most `image.shape[0]`, got '
'{} > {}'.format(fh.size, image.shape[0]))
fv = np.asarray(fv).astype(image.dtype)
if fv.ndim != 1:
raise ValueError('`fv` must be one-dimensional')
elif fv.size == 0:
raise ValueError('`fv` cannot have size 0')
elif fv.size > image.shape[0]:
raise ValueError('`fv` can be at most `image.shape[1]`, got '
'{} > {}'.format(fv.size, image.shape[1]))
# Pad image with zeros
if padding is None:
padding = min(max(len(fh), len(fv)) - 1, 64)
if padding != 0:
image_padded = np.pad(image, padding, mode='constant')
else:
image_padded = image.copy() if impl == 'pyfftw' else image
# Prepare filters for the convolution
def prepare_for_fft(filt, n_new):
"""Return padded and shifted filter ready for FFT.
The filter is padded with zeros to the new size, and then shifted
such that such that the middle element of old filter, i.e., the
one at index ``(len(filt) - 1) // 2`` ends up at index 0.
"""
mid = (len(filt) - 1) // 2
padded = np.zeros(n_new, dtype=filt.dtype)
padded[:len(filt) - mid] = filt[mid:]
padded[len(padded) - mid:] = filt[:mid]
return padded
fh = prepare_for_fft(fh, image_padded.shape[0])
fv = prepare_for_fft(fv, image_padded.shape[1])
# Perform the multiplication in Fourier space and apply inverse FFT
if impl == 'numpy':
image_ft = np.fft.rfftn(image_padded)
fh_ft = np.fft.fft(fh)
fv_ft = np.fft.rfft(fv)
image_ft *= fh_ft[:, None]
image_ft *= fv_ft[None, :]
# Important to specify the shape since `irfftn` cannot know the
# original shape
conv = np.fft.irfftn(image_ft, s=image_padded.shape)
if conv.dtype != image.dtype:
conv = conv.astype(image.dtype)
elif impl == 'pyfftw':
if not PYFFTW_AVAILABLE:
raise ValueError(
'`pyfftw` package is not available; you need to install it '
'to use the pyfftw backend')
import pyfftw
import multiprocessing
# Generate output arrays, for half-complex transform of image and
# vertical filter, and full FT of the horizontal filter
out_img_shape = (image_padded.shape[0], image_padded.shape[1] // 2 + 1)
out_img_dtype = np.result_type(image_padded, 1j)
out_img = np.empty(out_img_shape, out_img_dtype)
out_fh_shape = out_img_shape[0]
out_fh_dtype = np.result_type(fh, 1j)
fh_c = fh.astype(out_fh_dtype) # need to make this a C2C trafo
out_fh = np.empty(out_fh_shape, out_fh_dtype)
out_fv_shape = out_img_shape[1]
out_fv_dtype = np.result_type(fv, 1j)
out_fv = np.empty(out_fv_shape, out_fv_dtype)
# Perform the forward transforms of image and filters. We use
# the `FFTW_ESTIMATE` flag to not allow the planner to destroy
# the input.
plan = pyfftw.FFTW(image_padded, out_img, axes=(0, 1),
direction='FFTW_FORWARD',
flags=['FFTW_ESTIMATE'],
threads=multiprocessing.cpu_count())
plan(image_padded, out_img)
plan = pyfftw.FFTW(fh_c, out_fh, axes=(0,),
direction='FFTW_FORWARD',
flags=['FFTW_ESTIMATE'],
threads=multiprocessing.cpu_count())
plan(fh_c, out_fh)
plan = pyfftw.FFTW(fv, out_fv, axes=(0,),
direction='FFTW_FORWARD',
flags=['FFTW_ESTIMATE'],
threads=multiprocessing.cpu_count())
plan(fv, out_fv)
# Fourier space multiplication
out_img *= out_fh[:, None]
out_img *= out_fv[None, :]
# Inverse trafo
conv = image_padded # Overwrite
plan = pyfftw.FFTW(out_img.copy(), conv, axes=(0, 1),
direction='FFTW_BACKWARD',
flags=['FFTW_ESTIMATE'],
threads=multiprocessing.cpu_count())
plan(out_img, conv)
else:
raise ValueError('unsupported `impl` {!r}'.format(impl_in))
if padding:
return conv[padding:-padding, padding:-padding]
else:
return conv
def haarpsi_similarity_map(img1, img2, axis, c, a):
r"""Local similarity map for directional features along an axis.
Parameters
----------
img1, img2 : array-like
The images to compare. They must have equal shape.
axis : {0, 1}
Direction in which to look for edge similarities.
c : positive float
Constant determining the score of maximally dissimilar values.
Smaller constant means higher penalty for dissimilarity.
See Notes for details.
a : positive float
Parameter in the logistic function. Larger value leads to a
steeper curve, thus lowering the threshold for an input to
be mapped to an output close to 1. See Notes for details.
Returns
-------
local_sim : `numpy.ndarray`
Pointwise similarity of directional edge features of ``img1`` and
``img2``, measured using two Haar wavelet detail levels.
Notes
-----
For input images :math:`f_1, f_2` this function is defined as
.. math::
\mathrm{HS}_{f_1, f_2}^{(k)}(x) =
l_a \left(
\frac{1}{2} \sum_{j=1}^2
S\left(\left|g_j^{(k)} \ast f_1 \right|(x),
\left|g_j^{(k)} \ast f_2 \right|(x), c\right)
\right),
see `[Rei+2016] <https://arxiv.org/abs/1607.06140>`_ equation (10).
Here, the superscript :math:`(k)` refers to the axis (0 or 1)
in which edge features are compared, :math:`l_a` is the logistic
function :math:`l_a(x) = (1 + \mathrm{e}^{-a x})^{-1}`, and :math:`S`
is the pointwise similarity score
.. math::
S(x, y, c) = \frac{2xy + c^2}{x^2 + y^2 + c^2},
Hence, :math:`c` is the :math:`y`-value at which the score
drops to :math:`1 / 2` for :math:`x = 0`. In other words, the smaller
:math:`c` is chosen, the more dissimilarity is penalized.
The filters :math:`g_j^{(k)}` are high-pass Haar wavelet filters in the
axis :math:`k` and low-pass Haar wavelet filters in the other axes.
The index :math:`j` refers to the scaling level of the wavelet.
In code, these filters can be computed as ::
f_lo_level1 = [np.sqrt(2), np.sqrt(2)] # low-pass Haar filter
f_hi_level1 = [-np.sqrt(2), np.sqrt(2)] # high-pass Haar filter
f_lo_level2 = np.repeat(f_lo_level1, 2)
f_hi_level2 = np.repeat(f_hi_level1, 2)
f_lo_level3 = np.repeat(f_lo_level2, 2)
f_hi_level3 = np.repeat(f_hi_level2, 2)
...
The logistic function :math:`l_a` transforms values in
:math:`[0, \infty)` to :math:`[1/2, 1)`, where the parameter
:math:`a` determines how fast the curve attains values close
to 1. Larger :math:`a` means that smaller :math:`x` will yield
a value :math:`l_a(x)` close to 1 (and thus result in a higher
score). In other words, the larger :math:`a`, the more forgiving
the similarity measure.
References
----------
[Rei+2016] Reisenhofer, R, Bosse, S, Kutyniok, G, and Wiegand, T.
*A Haar Wavelet-Based Perceptual Similarity Index for Image Quality
Assessment*. arXiv:1607.06140 [cs], Jul. 2016.
"""
# TODO: generalize for nD
import scipy.special
impl = 'pyfftw' if PYFFTW_AVAILABLE else 'numpy'
# Haar wavelet filters for levels 1 and 2
dec_lo_lvl1 = np.array([np.sqrt(2), np.sqrt(2)])
dec_lo_lvl2 = np.repeat(dec_lo_lvl1, 2)
dec_hi_lvl1 = np.array([-np.sqrt(2), np.sqrt(2)])
dec_hi_lvl2 = np.repeat(dec_hi_lvl1, 2)
if axis == 0:
# High-pass in axis 0, low-pass in axis 1
fh_lvl1 = dec_hi_lvl1
fv_lvl1 = dec_lo_lvl1
fh_lvl2 = dec_hi_lvl2
fv_lvl2 = dec_lo_lvl2
elif axis == 1:
# Low-pass in axis 0, high-pass in axis 1
fh_lvl1 = dec_lo_lvl1
fv_lvl1 = dec_hi_lvl1
fh_lvl2 = dec_lo_lvl2
fv_lvl2 = dec_hi_lvl2
else:
raise ValueError('`axis` out of the valid range 0 -> 1')
# Filter images with level 1 and 2 filters
img1_lvl1 = filter_image_sep2d(img1, fh_lvl1, fv_lvl1, impl=impl)
img1_lvl2 = filter_image_sep2d(img1, fh_lvl2, fv_lvl2, impl=impl)
img2_lvl1 = filter_image_sep2d(img2, fh_lvl1, fv_lvl1, impl=impl)
img2_lvl2 = filter_image_sep2d(img2, fh_lvl2, fv_lvl2, impl=impl)
c = float(c)
def S(x, y):
"""Return ``(2 * x * y + c ** 2) / (x ** 2 + y ** 2 + c ** 2)``."""
num = 2 * x
num *= y
num += c ** 2
denom = x ** 2
denom += y ** 2
denom += c ** 2
frac = num
frac /= denom
return frac
# Compute similarity scores for both levels
np.abs(img1_lvl1, out=img1_lvl1)
np.abs(img2_lvl1, out=img2_lvl1)
np.abs(img1_lvl2, out=img1_lvl2)
np.abs(img2_lvl2, out=img2_lvl2)
sim_lvl1 = S(img1_lvl1, img2_lvl1)
sim_lvl2 = S(img1_lvl2, img2_lvl2)
# Return logistic of the mean value
sim = sim_lvl1
sim += sim_lvl2
sim /= 2
sim *= a
return scipy.special.expit(sim)
def haarpsi_weight_map(img1, img2, axis):
r"""Weighting map for directional features along an axis.
Parameters
----------
img1, img2 : array-like
The images to compare. They must have equal shape.
axis : {0, 1}
Direction in which to look for edge similarities.
Returns
-------
weight_map : `numpy.ndarray`
The pointwise weight map. See Notes for details.
Notes
-----
The pointwise weight map of associated with input images :math:`f_1, f_2`
and axis :math:`k` is defined
as
.. math::
\mathrm{W}_{f_1, f_2}^{(k)}(x) =
\max \left\{
\left|g_3^{(k)} \ast f_1 \right|(x),
\left|g_3^{(k)} \ast f_2 \right|(x)
\right\},
see `[Rei+2016] <https://arxiv.org/abs/1607.06140>`_ equations (11)
and (13).
Here, :math:`g_3^{(k)}` is a Haar wavelet filter for scaling level 3
that performs high-pass filtering in axis :math:`k` and low-pass
filtering in the other axes. Such a filter can be computed as ::
f_lo_level1 = [np.sqrt(2), np.sqrt(2)] # low-pass Haar filter
f_hi_level1 = [-np.sqrt(2), np.sqrt(2)] # high-pass Haar filter
f_lo_level3 = np.repeat(f_lo_level1, 4)
f_hi_level3 = np.repeat(f_hi_level1, 4)
References
----------
[Rei+2016] Reisenhofer, R, Bosse, S, Kutyniok, G, and Wiegand, T.
*A Haar Wavelet-Based Perceptual Similarity Index for Image Quality
Assessment*. arXiv:1607.06140 [cs], Jul. 2016.
"""
# TODO: generalize for nD
impl = 'pyfftw' if PYFFTW_AVAILABLE else 'numpy'
# Haar wavelet filters for level 3
dec_lo_lvl3 = np.repeat([np.sqrt(2), np.sqrt(2)], 4)
dec_hi_lvl3 = np.repeat([-np.sqrt(2), np.sqrt(2)], 4)
if axis == 0:
fh_lvl3 = dec_hi_lvl3
fv_lvl3 = dec_lo_lvl3
elif axis == 1:
fh_lvl3 = dec_lo_lvl3
fv_lvl3 = dec_hi_lvl3
else:
raise ValueError('`axis` out of the valid range 0 -> 1')
# Filter with level 3 wavelet filter
img1_lvl3 = filter_image_sep2d(img1, fh_lvl3, fv_lvl3, impl=impl)
img2_lvl3 = filter_image_sep2d(img2, fh_lvl3, fv_lvl3, impl=impl)
# Return the pointwise maximum of the filtered images
np.abs(img1_lvl3, out=img1_lvl3)
np.abs(img2_lvl3, out=img2_lvl3)
return np.maximum(img1_lvl3, img2_lvl3)
def spherical_sum(image, binning_factor=1.0):
"""Sum image values over concentric annuli.
Parameters
----------
image : `DiscretizedSpace` element
Input data whose radial sum should be computed.
binning_factor : positive float, optional
Reduce the number of output bins by this factor. Increasing this
number can help reducing fluctuations due to the variance of points
that fall in a particular annulus.
A binning factor of ``1`` corresponds to a bin size equal to
image pixel size for images with square pixels, otherwise ::
max(norm2(c)) / norm2(shape)
where the maximum is taken over all corners of the image domain.
Returns
-------
spherical_sum : 1D `DiscretizedSpace` element
The spherical sum of ``image``. Its space is one-dimensional with
domain ``[0, rmax]``, where ``rmax`` is the radius of the smallest
ball containing ``image.space.domain``. Its shape is ``(N,)`` with ::
N = int(sqrt(sum(n ** 2 for n in image.shape)) / binning_factor)
"""
r = np.sqrt(sum(xi ** 2 for xi in image.space.meshgrid))
rmax = max(np.linalg.norm(c) for c in image.space.domain.corners())
n_bins = int(np.sqrt(sum(n ** 2 for n in image.shape)) / binning_factor)
rad_sum, _ = np.histogram(r, weights=image, bins=n_bins, range=(0, rmax))
out_spc = uniform_discr(min_pt=0, max_pt=rmax, shape=n_bins,
impl=image.space.impl, dtype=image.space.dtype,
interp="linear", axis_labels=["$r$"])
return out_spc.element(rad_sum)
| kohr-h/odl | odl/contrib/fom/util.py | Python | mpl-2.0 | 16,016 |
'''
flaskext.bcrypt
---------------
A Flask extension providing bcrypt hasing and comparison facilities.
:copyright: (c) 2011 by Max Countryman.
:license: BSD, see LICENSE for more details.
'''
from __future__ import absolute_import
from __future__ import print_function
__version_info__ = ('0', '6', '3')
__version__ = '.'.join(__version_info__)
__author__ = 'Max Countryman'
__license__ = 'BSD'
__copyright__ = '(c) 2011 by Max Countryman'
__all__ = ['Bcrypt', 'check_password_hash', 'generate_password_hash']
from werkzeug.security import safe_str_cmp
try:
import bcrypt
except ImportError as e:
print('python-bcrypt is required to use Flask-Bcrypt')
raise e
from sys import version_info as PYVER
PYVER = PYVER[0]
def generate_password_hash(password, rounds=None):
'''This helper function wraps the eponymous method of :class:`Bcrypt`. It
is intended to be used as a helper function at the expense of the
configuration variable provided when passing back the app object. In other
words this shortcut does not make use of the app object at all.
To this this function, simple import it from the module and use it in a
similar fashion as the method would be used. Here is a quick example::
from flask.ext.bcrypt import generate_password_hash
pw_hash = generate_password_hash('hunter2', 10)
:param password: The password to be hashed.
:param rounds: The optional number of rounds.
'''
return Bcrypt().generate_password_hash(password, rounds)
def check_password_hash(pw_hash, password):
'''This helper function wraps the eponymous method of :class:`Bcrypt.` It
is intended to be used as a helper function at the expense of the
configuration variable provided when passing back the app object. In other
words this shortcut does not make use of the app object at all.
To this this function, simple import it from the module and use it in a
similar fashion as the method would be used. Here is a quick example::
from flask.ext.bcrypt import check_password_hash
check_password_hash(pw_hash, 'hunter2') # returns True
:param pw_hash: The hash to be compared against.
:param password: The password to compare.
'''
return Bcrypt().check_password_hash(pw_hash, password)
class Bcrypt(object):
'''Bcrypt class container for password hashing and checking logic using
bcrypt, of course. This class may be used to intialize your Flask app
object. The purpose is to provide a simple interface for overriding
Werkzeug's built-in password hashing utilities.
Although such methods are not actually overriden, the API is intentionally
made similar so that existing applications which make use of the previous
hashing functions might be easily adapted to the stronger facility of
bcrypt.
To get started you will wrap your application's app object something like
this::
app = Flask(__name__)
bcrypt = Bcrypt(app)
Now the two primary utility methods are exposed via this object, `bcrypt`.
So in the context of the application, important data, such as passwords,
could be hashed using this syntax::
password = 'hunter2'
pw_hash = bcrypt.generate_password_hash(password)
Once hashed, the value is irreversible. However in the case of validating
logins a simple hashing of candidate password and subsequent comparison.
Importantly a comparison should be done in constant time. This helps
prevent timing attacks. A simple utility method is provided for this::
candidate = 'secret'
bcrypt.check_password_hash(pw_hash, candidate)
If both the candidate and the existing password hash are a match
`check_password_hash` returns True. Otherwise, it returns False.
.. admonition:: Namespacing Issues
It's worth noting that if you use the format, `bcrypt = Bcrypt(app)`
you are effectively overriding the bcrypt module. Though it's unlikely
you would need to access the module outside of the scope of the
extension be aware that it's overriden.
Alternatively consider using a different name, such as `flask_bcrypt
= Bcrypt(app)` to prevent naming collisions.
Additionally a configuration value for `BCRYPT_LOG_ROUNDS` may be set in
the configuration of the Flask app. If none is provided this will
internally be assigned to 12. (This value is used in determining the
complexity of the encryption, see bcrypt for more details.)
:param app: The Flask application object. Defaults to None.
'''
_log_rounds = 12
def __init__(self, app=None):
if app is not None:
self.init_app(app)
def init_app(self, app):
'''Initalizes the application with the extension.
:param app: The Flask application object.
'''
self._log_rounds = app.config.get('BCRYPT_LOG_ROUNDS', 12)
def generate_password_hash(self, password, rounds=None):
'''Generates a password hash using bcrypt. Specifying `rounds`
sets the log_rounds parameter of `bcrypt.gensalt()` which determines
the complexity of the salt. 12 is the default value.
Example usage of :class:`generate_password_hash` might look something
like this::
pw_hash = bcrypt.generate_password_hash('secret', 10)
:param password: The password to be hashed.
:param rounds: The optional number of rounds.
'''
if not password:
raise ValueError('Password must be non-empty.')
if rounds is None:
rounds = self._log_rounds
if PYVER < 3 and isinstance(password, unicode):
password = password.encode('u8')
elif PYVER >= 3 and isinstance(password, bytes):
password = password.decode('utf-8')
password = str(password)
return bcrypt.hashpw(password, bcrypt.gensalt(rounds))
def check_password_hash(self, pw_hash, password):
'''Tests a password hash against a candidate password. The candidate
password is first hashed and then subsequently compared in constant
time to the existing hash. This will either return `True` or `False`.
Example usage of :class:`check_password_hash` would look something
like this::
pw_hash = bcrypt.generate_password_hash('secret', 10)
bcrypt.check_password_hash(pw_hash, 'secret') # returns True
:param pw_hash: The hash to be compared against.
:param password: The password to compare.
'''
if PYVER < 3 and isinstance(password, unicode):
password = password.encode('u8')
elif PYVER >= 3 and isinstance(password, bytes):
password = password.decode('utf-8')
password = str(password)
return safe_str_cmp(bcrypt.hashpw(password, pw_hash), pw_hash)
| growingdever/flask-bcrypt | flask_bcrypt.py | Python | bsd-3-clause | 7,187 |
# coding: utf-8
from django.http import HttpResponseRedirect
from django.shortcuts import get_object_or_404
from django.core.urlresolvers import reverse
from django.forms import ModelForm, ValidationError
from django import forms
from django.views.generic import CreateView, UpdateView, DeleteView, TemplateView
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, Fieldset, ButtonHolder, Submit, HTML, Button
from ..models import Student, Group
from ..util import paginate
from django.utils.translation import ugettext as _
class GroupUpdateForm(ModelForm):
class Meta:
model = Group
fields = ['title', 'leader', 'notes']
def __init__(self, *args, **kwargs):
super(GroupUpdateForm, self).__init__(*args, **kwargs)
self.helper = FormHelper(self)
self.helper.form_class = 'form-horizontal'
self.helper.form_method = 'POST'
self.helper.form_action = reverse('groups_edit', kwargs={'pk': kwargs['instance'].id})
self.helper.help_text_inline = False
self.helper.label_class = 'col-sm-4'
self.helper.field_class = 'col-sm-7'
self.helper.layout = Layout(
Fieldset('', 'title', 'leader', 'notes'),
ButtonHolder(
Submit('save_button', _(u'Save')),
Button('cancel_button', _(u'Cancel'), css_class='btn-default')))
def clean_leader(self):
""" Check if (student not in this group) and (student is not None)
If yes, then ensure it's the same as selected group. """
# get students in current group
students = Student.objects.filter(student_group=self.instance)
if (self.cleaned_data['leader'] not in students) and self.cleaned_data['leader']:
raise ValidationError(_(u'Student not in this group'), code='invalid')
return self.cleaned_data['leader']
class GroupUpdateView(UpdateView):
model = Group
template_name = 'students/crud.html'
form_class = GroupUpdateForm
def get_object(self, queryset=None):
return get_object_or_404(self.model, pk=self.kwargs.get('pk'))
def get_context_data(self, **kwargs):
context = super(GroupUpdateView, self).get_context_data(**kwargs)
context['title'] = _(u'Group edit')
return context
def get_success_url(self):
return u'%s?status_message=%s' % (reverse('groups'), _(u'Group updated successfully!'))
def post(self, request, *args, **kwargs):
if request.POST.get('cancel_button'):
return HttpResponseRedirect(u'%s?status_message=%s' % (reverse('groups'), _(u'Group update canceled!')))
else:
return super(GroupUpdateView, self).post(request, *args, **kwargs)
class GroupAddForm(ModelForm):
class Meta:
model = Group
fields = ['title', 'leader', 'notes']
def __init__(self, *args, **kwargs):
super(GroupAddForm, self).__init__(*args, **kwargs)
self.helper = FormHelper(self)
self.helper.form_class = 'form-horizontal'
self.helper.form_method = 'POST'
self.helper.form_action = reverse('groups_add')
self.helper.help_text_inline = False
self.helper.label_class = 'col-sm-4'
self.helper.field_class = 'col-sm-7'
self.helper.layout = Layout(
Fieldset('', 'title', 'leader', 'notes'),
ButtonHolder(
Submit('save_button', _(u'Save')),
Button('cancel_button', _(u'Cancel'), css_class='btn-default')))
class GroupAddView(CreateView):
model = Group
template_name = 'students/crud.html'
form_class = GroupAddForm
def get_context_data(self, **kwargs):
context = super(GroupAddView, self).get_context_data(**kwargs)
context['title'] = _(u'Group add')
return context
def get_success_url(self):
return u'%s?status_message=%s' % (reverse('groups'), _(u'Group added successfully!'))
def post(self, request, *args, **kwargs):
if request.POST.get('cancel_button'):
return HttpResponseRedirect(u'%s?status_message=%s' % (reverse('groups'), _(u'Group addition canceled!')))
else:
return super(GroupAddView, self).post(request, *args, **kwargs)
class GroupDeleteForm(forms.Form):
def __init__(self, *args, **kwargs):
super(GroupDeleteForm, self).__init__(*args, **kwargs)
self.helper = FormHelper(self)
self.helper.form_class = 'form-horizontal'
self.helper.form_action = reverse('groups_delete', kwargs={'pk': kwargs['initial']['pk']})
self.helper.form_method = 'POST'
self.helper.help_text_inline = False
self.helper.label_class = 'col-sm-4'
self.helper.field_class = 'col-sm-7'
self.helper.layout = Layout(
HTML(u"<p>%s</p>" % _(u'Do you really want to delete group {{ object }}?')),
ButtonHolder(Submit('submit_button', _(u'Delete'), css_class='btn-danger'),
Button('cancel_button', _(u'Cancel'), css_class='btn-default')))
class GroupDeleteView(DeleteView):
model = Group
template_name = 'students/crud.html'
def get_object(self, queryset=None):
return get_object_or_404(self.model, pk=self.kwargs.get('pk'))
def get_context_data(self, **kwargs):
context = super(GroupDeleteView, self).get_context_data(**kwargs)
context['title'] = _(u'Delete Group')
context['form'] = GroupDeleteForm(initial={'pk': self.kwargs['pk']})
return context
def get_success_url(self):
return u'%s?status_message=%s' % (reverse('groups'), _(u'Group deleted successfully!'))
def post(self, request, *args, **kwargs):
if request.POST.get('cancel_button'):
return HttpResponseRedirect(u'%s?status_message=%s' % (reverse('groups'), _(u'Group deletion canceled!')))
else:
return super(GroupDeleteView, self).post(request, *args, **kwargs)
class GroupListView(TemplateView):
template_name = 'students/groups_list.html'
def get_context_data(self, **kwargs):
context = super(GroupListView, self).get_context_data(**kwargs)
context['groups_url'] = reverse('groups')
groups = Group.objects.all()
order_by = self.request.GET.get('order_by', '')
if order_by not in ('id', 'leader'):
order_by = 'title'
groups = groups.order_by(order_by)
context['order_by'] = order_by
reverse_by = self.request.GET.get('reverse', '')
if reverse_by == '1':
groups = groups.reverse()
context['reverse'] = reverse_by
# apply pagination, 2 groups per page
context.update(paginate(groups, 2, self.request, {}, var_name='groups'))
return context
| PyDev777/studentsdb | students/views/groups.py | Python | mit | 6,802 |
from toolbox.filetools import *
from toolbox.graphictools import *
from toolbox.misctools import *
from toolbox.objecttools import *
from toolbox.webtools import *
from toolbox.kanjitools import *
from toolbox.dataqualitytools import *
| marrcio/relate-kanji | resources/util/toolbox/__init__.py | Python | mit | 236 |
# Copyright 2017 The Forseti Security Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module to determine whether an exception should be retried."""
import http.client
import socket
import ssl
import urllib.error
import urllib.parse
from future import standard_library
from googleapiclient import errors
import httplib2
from google.cloud.forseti.scanner.scanners.config_validator_util import (
errors as cv_errors
)
standard_library.install_aliases()
RETRYABLE_EXCEPTIONS = (
http.client.ResponseNotReady,
http.client.IncompleteRead,
httplib2.ServerNotFoundError,
socket.error,
ssl.SSLError,
urllib.error.URLError, # include "no network connection"
)
CONFIG_VALIDATOR_EXCEPTIONS = (
cv_errors.ConfigValidatorServerUnavailableError,
)
def is_retryable_exception(e):
"""Whether exception should be retried.
Args:
e (Exception): Exception object.
Returns:
bool: True for exceptions to retry. False otherwise.
"""
if isinstance(e, errors.HttpError):
if e.resp.status == 429:
# Resource exhausted error.
return True
return isinstance(e, RETRYABLE_EXCEPTIONS)
def is_retryable_exception_cv(e):
"""Whether exception should be retried for Config Validator communications.
Args:
e (Exception): Exception object.
Returns:
bool: True for exceptions to retry. False otherwise.
"""
return isinstance(e, CONFIG_VALIDATOR_EXCEPTIONS)
| forseti-security/forseti-security | google/cloud/forseti/common/util/retryable_exceptions.py | Python | apache-2.0 | 2,006 |
"""
Savings
===============================================================================
Overview
-------------------------------------------------------------------------------
The function ``savings`` computes the final balance for a savings account
with arbitrary deposits and withdrawls and variable interset rate.
Functions in this module
-------------------------------------------------------------------------------
"""
# import sys
# import os
# sys.path.insert(0, os.path.abspath('..'))
import pandas as pd
#cashflows.
from cashflows.timeseries import *
from cashflows.common import *
def savings(deposits, nrate, initbal=0):
"""
Computes the final balance for a savings account with arbitrary deposits and
withdrawls and variable interset rate.
Args:
cflo (pandas.Series): Generic cashflow.
deposits (pandas.Series): deposits to the account.
nrate (pandas.Series): nominal interest rate paid by the account.
initbal (float): initial balance of the account.
Return:
A pandas.DataFrame.
**Examples**
>>> cflo = cashflow(const_value=[100]*12, start='2000Q1', freq='Q')
>>> nrate = interest_rate([10]*12, start='2000Q1', freq='Q')
>>> savings(deposits=cflo, nrate=nrate, initbal=0) # doctest: +NORMALIZE_WHITESPACE
Beginning_Balance Deposits Earned_Interest Ending_Balance \\
2000Q1 0.000000 100.0 0.000000 100.000000
2000Q2 100.000000 100.0 2.500000 202.500000
2000Q3 202.500000 100.0 5.062500 307.562500
2000Q4 307.562500 100.0 7.689063 415.251562
2001Q1 415.251562 100.0 10.381289 525.632852
2001Q2 525.632852 100.0 13.140821 638.773673
2001Q3 638.773673 100.0 15.969342 754.743015
2001Q4 754.743015 100.0 18.868575 873.611590
2002Q1 873.611590 100.0 21.840290 995.451880
2002Q2 995.451880 100.0 24.886297 1120.338177
2002Q3 1120.338177 100.0 28.008454 1248.346631
2002Q4 1248.346631 100.0 31.208666 1379.555297
<BLANKLINE>
Nominal_Rate
2000Q1 10.0
2000Q2 10.0
2000Q3 10.0
2000Q4 10.0
2001Q1 10.0
2001Q2 10.0
2001Q3 10.0
2001Q4 10.0
2002Q1 10.0
2002Q2 10.0
2002Q3 10.0
2002Q4 10.0
>>> cflo = cashflow(const_value=[0, 100, 0, 100, 100], start='2000Q1', freq='A')
>>> nrate = interest_rate([0, 1, 2, 3, 4], start='2000Q1', freq='A')
>>> savings(deposits=cflo, nrate=nrate, initbal=1000) # doctest: +NORMALIZE_WHITESPACE
Beginning_Balance Deposits Earned_Interest Ending_Balance \\
2000 1000.000 0.0 0.00000 1000.00000
2001 1000.000 100.0 10.00000 1110.00000
2002 1110.000 0.0 22.20000 1132.20000
2003 1132.200 100.0 33.96600 1266.16600
2004 1266.166 100.0 50.64664 1416.81264
<BLANKLINE>
Nominal_Rate
2000 0.0
2001 1.0
2002 2.0
2003 3.0
2004 4.0
"""
verify_period_range([deposits, nrate])
begbal = deposits.copy()
interest = deposits.copy()
endbal = deposits.copy()
pyr = getpyr(deposits)
for time, _ in enumerate(deposits):
if time == 0:
begbal[0] = initbal
interest[0] = begbal[0] * nrate[0] / 100 / pyr
endbal[0] = begbal[0] + deposits[0] + interest[0]
else:
begbal[time] = endbal[time - 1]
interest[time] = begbal[time] * nrate[time] / 100 / pyr
if deposits[time] < 0 and -deposits[time] > begbal[time] + interest[time]:
deposits[time] = -(begbal[time] + interest[time])
endbal[time] = begbal[time] + deposits[time] + interest[time]
table = pd.DataFrame({'Beginning_Balance' : begbal,
'Deposits' : deposits,
'Nominal_Rate':nrate,
'Earned_Interest': interest,
'Ending_Balance': endbal })
return table
if __name__ == "__main__":
import doctest
doctest.testmod()
| jdvelasq/cashflows | cashflows/savings.py | Python | mit | 4,537 |
from .utils import merge_dict, perform_request, CommentClientRequestError
import models
import settings
class User(models.Model):
accessible_fields = ['username', 'follower_ids', 'upvoted_ids', 'downvoted_ids',
'id', 'external_id', 'subscribed_user_ids', 'children', 'course_id',
'subscribed_thread_ids', 'subscribed_commentable_ids',
'subscribed_course_ids', 'threads_count', 'comments_count',
'default_sort_key'
]
updatable_fields = ['username', 'external_id', 'default_sort_key']
initializable_fields = updatable_fields
metric_tag_fields = ['course_id']
base_url = "{prefix}/users".format(prefix=settings.PREFIX)
default_retrieve_params = {'complete': True}
type = 'user'
@classmethod
def from_django_user(cls, user):
return cls(id=str(user.id),
external_id=str(user.id),
username=user.username)
def follow(self, source):
params = {'source_type': source.type, 'source_id': source.id}
response = perform_request(
'post',
_url_for_subscription(self.id),
params,
metric_action='user.follow',
metric_tags=self._metric_tags + ['target.type:{}'.format(source.type)],
)
def unfollow(self, source):
params = {'source_type': source.type, 'source_id': source.id}
response = perform_request(
'delete',
_url_for_subscription(self.id),
params,
metric_action='user.unfollow',
metric_tags=self._metric_tags + ['target.type:{}'.format(source.type)],
)
def vote(self, voteable, value):
if voteable.type == 'thread':
url = _url_for_vote_thread(voteable.id)
elif voteable.type == 'comment':
url = _url_for_vote_comment(voteable.id)
else:
raise CommentClientRequestError("Can only vote / unvote for threads or comments")
params = {'user_id': self.id, 'value': value}
request = perform_request(
'put',
url,
params,
metric_action='user.vote',
metric_tags=self._metric_tags + ['target.type:{}'.format(voteable.type)],
)
voteable.update_attributes(request)
def unvote(self, voteable):
if voteable.type == 'thread':
url = _url_for_vote_thread(voteable.id)
elif voteable.type == 'comment':
url = _url_for_vote_comment(voteable.id)
else:
raise CommentClientRequestError("Can only vote / unvote for threads or comments")
params = {'user_id': self.id}
request = perform_request(
'delete',
url,
params,
metric_action='user.unvote',
metric_tags=self._metric_tags + ['target.type:{}'.format(voteable.type)],
)
voteable.update_attributes(request)
def active_threads(self, query_params={}):
if not self.course_id:
raise CommentClientRequestError("Must provide course_id when retrieving active threads for the user")
url = _url_for_user_active_threads(self.id)
params = {'course_id': self.course_id}
params = merge_dict(params, query_params)
response = perform_request(
'get',
url,
params,
metric_action='user.active_threads',
metric_tags=self._metric_tags,
paged_results=True,
)
return response.get('collection', []), response.get('page', 1), response.get('num_pages', 1)
def subscribed_threads(self, query_params={}):
if not self.course_id:
raise CommentClientRequestError("Must provide course_id when retrieving subscribed threads for the user")
url = _url_for_user_subscribed_threads(self.id)
params = {'course_id': self.course_id}
params = merge_dict(params, query_params)
response = perform_request(
'get',
url,
params,
metric_action='user.subscribed_threads',
metric_tags=self._metric_tags,
paged_results=True
)
return response.get('collection', []), response.get('page', 1), response.get('num_pages', 1)
def _retrieve(self, *args, **kwargs):
url = self.url(action='get', params=self.attributes)
retrieve_params = self.default_retrieve_params
if self.attributes.get('course_id'):
retrieve_params['course_id'] = self.course_id
try:
response = perform_request(
'get',
url,
retrieve_params,
metric_action='model.retrieve',
metric_tags=self._metric_tags,
)
except CommentClientRequestError as e:
if e.status_code == 404:
# attempt to gracefully recover from a previous failure
# to sync this user to the comments service.
self.save()
response = perform_request(
'get',
url,
retrieve_params,
metric_action='model.retrieve',
metric_tags=self._metric_tags,
)
else:
raise
self.update_attributes(**response)
def _url_for_vote_comment(comment_id):
return "{prefix}/comments/{comment_id}/votes".format(prefix=settings.PREFIX, comment_id=comment_id)
def _url_for_vote_thread(thread_id):
return "{prefix}/threads/{thread_id}/votes".format(prefix=settings.PREFIX, thread_id=thread_id)
def _url_for_subscription(user_id):
return "{prefix}/users/{user_id}/subscriptions".format(prefix=settings.PREFIX, user_id=user_id)
def _url_for_user_active_threads(user_id):
return "{prefix}/users/{user_id}/active_threads".format(prefix=settings.PREFIX, user_id=user_id)
def _url_for_user_subscribed_threads(user_id):
return "{prefix}/users/{user_id}/subscribed_threads".format(prefix=settings.PREFIX, user_id=user_id)
def _url_for_user_stats(user_id,course_id):
return "{prefix}/users/{user_id}/stats?course_id={course_id}".format(prefix=settings.PREFIX, user_id=user_id,course_id=course_id)
| echanna/EdxNotAFork | lms/lib/comment_client/user.py | Python | agpl-3.0 | 6,362 |
# -*- coding: utf-8 -*-
import shutil
import widgetUtils
import platform
if platform.system() == "Windows":
import wxUI as view
from controller import settings
elif platform.system() == "Linux":
import gtkUI as view
import paths
import time
import os
import logging
import session
import manager
import config_utils
import config
log = logging.getLogger("sessionmanager.sessionManager")
class sessionManagerController(object):
def __init__(self, started=False):
super(sessionManagerController, self).__init__()
log.debug("Setting up the session manager.")
self.started = started
manager.setup()
self.view = view.sessionManagerWindow()
widgetUtils.connect_event(self.view.new, widgetUtils.BUTTON_PRESSED, self.manage_new_account)
widgetUtils.connect_event(self.view.remove, widgetUtils.BUTTON_PRESSED, self.remove)
if self.started == False:
widgetUtils.connect_event(self.view.configuration, widgetUtils.BUTTON_PRESSED, self.configuration)
else:
self.view.hide_configuration()
self.new_sessions = {}
self.removed_sessions = []
def fill_list(self):
sessionsList = []
log.debug("Filling the sessions list.")
self.sessions = []
for i in os.listdir(paths.config_path()):
if os.path.isdir(paths.config_path(i)):
log.debug("Adding session %s" % (i,))
strconfig = "%s/session.conf" % (paths.config_path(i))
config_test = config_utils.load_config(strconfig)
name = config_test["twitter"]["user_name"]
if config_test["twitter"]["user_key"] != "" and config_test["twitter"]["user_secret"] != "":
sessionsList.append(name)
self.sessions.append(i)
else:
try:
log.debug("Deleting session %s" % (i,))
shutil.rmtree(paths.config_path(i))
except:
output.speak("An exception was raised while attempting to clean malformed session data. See the error log for details. If this message persists, contact the developers.",True)
os.exception("Exception thrown while removing malformed session")
self.view.fill_list(sessionsList)
def show(self):
if self.view.get_response() == widgetUtils.OK:
self.do_ok()
# else:
self.view.destroy()
def do_ok(self):
log.debug("Starting sessions...")
for i in self.sessions:
if session.sessions.has_key(i) == True: continue
s = session.Session(i)
s.get_configuration()
if i not in config.app["sessions"]["ignored_sessions"]:
s.login()
session.sessions[i] = s
self.new_sessions[i] = s
# self.view.destroy()
def manage_new_account(self, *args, **kwargs):
if self.view.new_account_dialog() == widgetUtils.YES:
location = (str(time.time())[-6:])
log.debug("Creating session in the %s path" % (location,))
s = session.Session(location)
manager.manager.add_session(location)
s.get_configuration()
# try:
s.authorise()
self.sessions.append(location)
self.view.add_new_session_to_list()
# except:
# log.exception("Error authorising the session")
# self.view.show_unauthorised_error()
# return
def remove(self, *args, **kwargs):
if self.view.remove_account_dialog() == widgetUtils.YES:
selected_account = self.sessions[self.view.get_selected()]
self.view.remove_session(self.view.get_selected())
self.removed_sessions.append(selected_account)
self.sessions.remove(selected_account)
shutil.rmtree(path=paths.config_path(selected_account), ignore_errors=True)
def configuration(self, *args, **kwargs):
""" Opens the global settings dialogue."""
d = settings.globalSettingsController()
if d.response == widgetUtils.OK:
d.save_configuration()
| codeofdusk/ProjectMagenta | src/sessionmanager/sessionManager.py | Python | gpl-2.0 | 3,568 |
from .analysis import find_outliers
from mollib.utils import MDTable, dict_table, FormattedStr
from mollib.utils.numbers import center
from mollib.utils.interactions import sort_func
def stats_table(stats, *args, **kwargs):
"""Render a table for the statistics.
Parameters
----------
stats: dict
A dict with the statistics from the SVD fit
Returns
-------
table: :obj:`mollib.utils.MDTable`
A table of the statistics
"""
return dict_table(stats, *args, **kwargs)
#: rename to datafit_tables
def report_tables(data, predicted=None):
"""Produce the partial alignment report for the observed and predicted
RDC and RACS values.
Parameters
----------
data: dict
The experimental/observed RDC and RACS data.
- **key**: interaction labels (str)
- **value**: :obj:`mollib.pa.RDC` or :obj:`mollib.pa.RACS` data values.
predicted: dict (optional)
The SVD predicted RDC and RACS data.
- **key**: interaction labels (str)
- **value**: :obj:`mollib.pa.RDC` or :obj:`mollib.pa.RACS` predicted
values.
Returns
-------
tables: dict
A dict with the tables:
- **keys**
- 'fit': the fit table
- 'xx_pred': the predicted data table. ex: 'N-H_pred',
'CA-HA_pred'
- **values**
- The `mollib.utils.markdown.MDTable` objects.
"""
if predicted is None:
predicted = dict()
# Prepare the fit data table
tables = {}
tables['fit'] = MDTable('Interaction', 'Value', 'Error',
'Predicted', 'Deviation')
# Make a (shallow) copy of the predicted data dict. As we print off
# interactions from the data, we remove them from this copied dict so that
# we do not print the RDC/RACS twice.
predicted_copy = predicted.copy()
# Find the warning and bad outliers
warning, bad = find_outliers(data, predicted)
# Iterate over the data and add the values to the table.
for label in sorted(data, key=sort_func):
# Get the fields
interaction = label
value = data[label].value
error = data[label].error if data[label].error else '-'
# Find the number of digits in the observed value so that the predicted
# values (and deviations) can be rounded to the same precision
split = str(value).split('.')
if len(split) > 1:
no_digits = len(str(value).split('.')[1])
else:
no_digits = 0
# Get the predicted value and deviation between observed and predicted
# (or put a '-' if there is None).
if label in predicted_copy:
pred = predicted_copy.pop(label).value
pred = round(pred , no_digits)
deviation = round(value - pred, no_digits)
else:
pred = " -"
deviation = " -"
# Identify outlier points with either a warning (yellow) or as bad (red)
# Also put an asterisk or exclamation mark next to the label's name
if label in warning:
fmt = 'yellow'
interaction = label + '*'
elif label in bad:
fmt = 'red'
interaction = label + '!'
else:
fmt = ''
# Add the fields to a row in the table
tables['fit'].add_row(FormattedStr(interaction, fmt),
FormattedStr(center(value), fmt),
FormattedStr(center(error), fmt),
FormattedStr(center(pred), fmt),
FormattedStr(center(deviation), fmt))
# Prepare tables for predicted values
predicted_interactions = set([sort_func(i)[2]
for i in predicted_copy.keys()])
# Populate the table and rows for the predicted data
tables['pred'] = MDTable('Interaction', 'Predicted')
table = tables['pred']
for label in sorted(predicted_copy.keys(), key=sort_func):
# Get the fields
value = round(predicted_copy[label].value, 2)
# Add the fields to a row in the table
table.add_row(label, center(value))
return tables | jlorieau/mollib | mollib/pa/reports.py | Python | gpl-3.0 | 4,286 |
from django.utils.translation import ugettext_lazy as _
from django.utils.html import mark_safe
from django.core.urlresolvers import reverse
from tendenci.libs.model_report.report import reports, ReportAdmin
from tendenci.libs.model_report.utils import (sum_column, us_date_format, date_label,
obj_type_format, date_from_datetime,
entity_format)
from tendenci.apps.invoices.models import Invoice
from tendenci.apps.site_settings.utils import get_setting
CURRENCY_SYMBOL = get_setting("site", "global", "currencysymbol")
def id_format(value, instance):
link = reverse('invoice.view', args=[value])
html = "<a href=\"%s\">%s</a>" % (link, value)
return mark_safe(html)
def currency_format(value, instance):
return "%s%s" % (CURRENCY_SYMBOL, value)
class InvoiceReport(ReportAdmin):
# choose a title for your report for h1, title tag and report list
title = _('Invoice Report')
# specify your model
model = Invoice
# fields in the specified model to display in the report table
fields = [
'id',
'bill_to',
'create_dt',
'status_detail',
'object_type',
'entity',
'payments_credits',
'balance',
'total'
]
# fields in the model to show filters for
list_filter = ('status_detail', 'create_dt', 'object_type')
# fields in the model to order results by
list_order_by = ('create_dt', 'status_detail')
# fields to group results by
list_group_by = ('object_type', 'status_detail', 'entity', 'create_dt')
# allowed export formats. default is excel and pdf
exports = ('excel', 'pdf',)
# type = report for report only, type = chart for report and charts. default is report.
type = 'report'
# override field formats by referencing a function
override_field_formats = {
'create_dt': us_date_format,
'object_type': obj_type_format,
'id': id_format,
'balance': currency_format,
'total': currency_format,
'payments_credits': currency_format
}
# override the label for a field by referencing a function
override_field_labels = {
'create_dt': date_label
}
override_group_value = {
'create_dt': date_from_datetime,
'entity': entity_format
}
group_totals = {
'balance': sum_column,
'total': sum_column,
'payments_credits': sum_column
}
report_totals = {
'balance': sum_column,
'total': sum_column,
'payments_credits': sum_column
}
# register your report with the slug and name
reports.register('invoices', InvoiceReport)
| alirizakeles/tendenci | tendenci/apps/invoices/reports.py | Python | gpl-3.0 | 2,731 |
from components import Graph
from src.utiltools import mkdir
import os, sys
import pygraphviz as pgv
class GraphAM(Graph):
def __init__(self, num_nodes, directed):
Graph.__init__(self, num_nodes, directed)
self.repr_type = 'AM'
self.graph = [[0] * self.num_nodes for _ in range(self.num_nodes)]
def __str__(self):
if self.graph == None or self.graph == []:
print "Graph not initialized"
return ''
str_repr = ""
for row in self.graph:
for col in row:
str_repr += str(col) + " "
str_repr += "\n"
return str_repr
def read(self, file_path):
if not os.path.exists(file_path):
sys.stderr.write('graphAM:: Error -> read : no such file exists')
exit()
self.graph = []
with open(file_path, 'r') as fr:
for line in fr.read().splitlines():
if line[0] == '#':
continue
self.graph.append([int(i) for i in line.split(',')])
def write(self, file_path):
mkdir(file_path)
with open(file_path, 'w') as fw:
fw.write("#Adjacency matrix graph\n")
for row in self.graph:
fw.write(str(row)[1:-1] + '\n')
def _create_pgv_object(self, update = False):
if hasattr(self, 'pgv_object') and not update:
return
if self.directed == 1:
self.pgv_object = pgv.AGraph(directed=True)
else:
self.pgv_object = pgv.AGraph(directed=False)
for x in range(self.num_nodes):
self.pgv_object.add_node(str(x))
for x in range(self.num_nodes):
for y in range(self.num_nodes):
if self.graph[x][y] > 0:
self.pgv_object.add_edge(str(x), str(y))
def plot_to_file(self, file_path):
self._create_pgv_object()
self.pgv_object.graph_attr.update(size="50!")
self.pgv_object.layout()
self.pgv_object.draw(file_path)
def plot(self):
pass
def write_dot(self, file_path):
self._create_pgv_object()
self.pgv_object.write(file_path)
def add_node(self):
for row in self.graph:
row.append(0)
self.graph.append([0] * len(self.graph[0]))
self.num_nodes += 1
def add_edge(self, start_node, end_node, value=1):
if self.graph == None:
print "Graph not initialized"
return
self.graph[start_node][end_node] = value
if self.directed == 0:
self.graph[end_node][start_node] = value
def get_node_successors(self, node_idx):
return [index for index, succ in enumerate(self.graph[node_idx]) if succ == 1]
#TODO: Write unittests for graphAM.py | MartinPenicka/randgraphlib | src/components/graphAM.py | Python | gpl-3.0 | 3,046 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# python 3.5.1
"""
Author: Bobby McDonnell
Date: 12/17/2015
Heirarchy:
BSQL > database > table > record
"""
from table import table
class record(table):
def __init__(self):
pass
| rmcdonnell/BSQL | record.py | Python | mit | 229 |
# coding:utf-8
from models import HMenu, HOrg, HRole, HUser
from peewee import JOIN_INNER, fn
__author__ = "chenghao"
def init_menus(user_id):
"""
用户登录时获取当前用户的菜单数据集
:param user_id: 用户id
:return:
"""
sql = HMenu.select(
HMenu.pid, HMenu.menu_name.alias("title"), HMenu.menu_url.alias("href"), HMenu.parent_menu, HMenu.icon
).join(
HOrg, join_type=JOIN_INNER, on=HMenu.org_ids ** (fn.CONCAT("%,", HOrg.pid, ",%"))
).join(
HRole, join_type=JOIN_INNER, on=HMenu.role_ids ** (fn.CONCAT("%,", HRole.pid, ",%"))
).join(
HUser, join_type=JOIN_INNER,
on=((HUser.org_ids ** (fn.CONCAT("%,", HOrg.pid, ",%"))) &
(HUser.role_ids ** (fn.CONCAT("%,", HRole.pid, ",%"))))
).where(
HUser.pid == user_id
).order_by(HMenu.parent_menu, HMenu.sort)
result = [f for f in sql.dicts()]
level_1_menus = [] # 一级菜单集合
level_2_menus = {} # 二级菜单集合
level_1_child_key = "menu_%s"
for res in result:
if res["parent_menu"]:
menus = level_2_menus[level_1_child_key % res["parent_menu"]]
menus.append(res)
level_2_menus[level_1_child_key % res["parent_menu"]] = menus
else:
level_2_menus[level_1_child_key % res["pid"]] = []
level_1_menus.append(res)
return {"level1": level_1_menus, "level2": level_2_menus}
| chenghao/haoAdmin_flask | dal/__init__.py | Python | apache-2.0 | 1,454 |
class Application(object):
url = "http://rozetka.com.ua/"
def __init__(self, driver):
self.driver = driver
def go_to_home_page(self):
self.driver.get(self.url)
def search_product(self, product):
driver = self.driver
search_field = driver.find_element_by_name("text")
search_field.click()
search_field.clear()
search_field.send_keys(product)
search_button = driver.find_element_by_name("search-button")
search_button.click() | VolodyaEsk/selenium-python-vkhatianovskyi | other_test/model/application.py | Python | apache-2.0 | 515 |
# Copyright (c) 2014 Adafruit Industries
# Author: Tony DiCola
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import math
from . import GPIO
from . import I2C
class MCP230xxBase(GPIO.BaseGPIO):
"""Base class to represent an MCP230xx series GPIO extender. Is compatible
with the Adafruit_GPIO BaseGPIO class so it can be used as a custom GPIO
class for interacting with device.
"""
def __init__(self, address, i2c=None, **kwargs):
"""Initialize MCP230xx at specified I2C address and bus number. If bus
is not specified it will default to the appropriate platform detected bus.
"""
# Create I2C device.
if i2c is None:
from . import I2C
i2c = I2C
self._device = i2c.get_i2c_device(address, **kwargs)
# Assume starting in ICON.BANK = 0 mode (sequential access).
# Compute how many bytes are needed to store count of GPIO.
self.gpio_bytes = int(math.ceil(self.NUM_GPIO/8.0))
# Buffer register values so they can be changed without reading.
self.iodir = [0x00]*self.gpio_bytes # Default direction to all inputs.
self.gppu = [0x00]*self.gpio_bytes # Default to pullups disabled.
self.gpio = [0x00]*self.gpio_bytes
# Write current direction and pullup buffer state.
self.write_iodir()
self.write_gppu()
def setup(self, pin, value):
"""Set the input or output mode for a specified pin. Mode should be
either GPIO.OUT or GPIO.IN.
"""
self._validate_pin(pin)
# Set bit to 1 for input or 0 for output.
if value == GPIO.IN:
self.iodir[int(pin/8)] |= 1 << (int(pin%8))
elif value == GPIO.OUT:
self.iodir[int(pin/8)] &= ~(1 << (int(pin%8)))
else:
raise ValueError('Unexpected value. Must be GPIO.IN or GPIO.OUT.')
self.write_iodir()
def output(self, pin, value):
"""Set the specified pin the provided high/low value. Value should be
either GPIO.HIGH/GPIO.LOW or a boolean (True = HIGH).
"""
self.output_pins({pin: value})
def output_pins(self, pins):
"""Set multiple pins high or low at once. Pins should be a dict of pin
name to pin value (HIGH/True for 1, LOW/False for 0). All provided pins
will be set to the given values.
"""
[self._validate_pin(pin) for pin in pins.keys()]
# Set each changed pin's bit.
for pin, value in iter(pins.items()):
if value:
self.gpio[int(pin/8)] |= 1 << (int(pin%8))
else:
self.gpio[int(pin/8)] &= ~(1 << (int(pin%8)))
# Write GPIO state.
self.write_gpio()
def input(self, pin):
"""Read the specified pin and return GPIO.HIGH/True if the pin is pulled
high, or GPIO.LOW/False if pulled low.
"""
return self.input_pins([pin])[0]
def input_pins(self, pins):
"""Read multiple pins specified in the given list and return list of pin values
GPIO.HIGH/True if the pin is pulled high, or GPIO.LOW/False if pulled low.
"""
[self._validate_pin(pin) for pin in pins]
# Get GPIO state.
gpio = self._device.readList(self.GPIO, self.gpio_bytes)
# Return True if pin's bit is set.
return [(gpio[int(pin/8)] & 1 << (int(pin%8))) > 0 for pin in pins]
def pullup(self, pin, enabled):
"""Turn on the pull-up resistor for the specified pin if enabled is True,
otherwise turn off the pull-up resistor.
"""
self._validate_pin(pin)
if enabled:
self.gppu[int(pin/8)] |= 1 << (int(pin%8))
else:
self.gppu[int(pin/8)] &= ~(1 << (int(pin%8)))
self.write_gppu()
def write_gpio(self, gpio=None):
"""Write the specified byte value to the GPIO registor. If no value
specified the current buffered value will be written.
"""
if gpio is not None:
self.gpio = gpio
self._device.writeList(self.GPIO, self.gpio)
def write_iodir(self, iodir=None):
"""Write the specified byte value to the IODIR registor. If no value
specified the current buffered value will be written.
"""
if iodir is not None:
self.iodir = iodir
self._device.writeList(self.IODIR, self.iodir)
def write_gppu(self, gppu=None):
"""Write the specified byte value to the GPPU registor. If no value
specified the current buffered value will be written.
"""
if gppu is not None:
self.gppu = gppu
self._device.writeList(self.GPPU, self.gppu)
class MCP23017(MCP230xxBase):
"""MCP23017-based GPIO class with 16 GPIO pins."""
# Define number of pins and registor addresses.
NUM_GPIO = 16
IODIR = 0x00
GPIO = 0x12
GPPU = 0x0C
def __init__(self, address=0x20, **kwargs):
super(MCP23017, self).__init__(address, **kwargs)
class MCP23008(MCP230xxBase):
"""MCP23008-based GPIO class with 8 GPIO pins."""
# Define number of pins and registor addresses.
NUM_GPIO = 8
IODIR = 0x00
GPIO = 0x09
GPPU = 0x06
def __init__(self, address=0x20, **kwargs):
super(MCP23008, self).__init__(address, **kwargs)
| pelme/pyspresso | src/pyspresso/_vendor/Adafruit_GPIO/MCP230xx.py | Python | gpl-3.0 | 6,382 |
# -*- coding: utf-8 -*-
"""
flask.wrappers
~~~~~~~~~~~~~~
Implements the WSGI wrappers (request and response).
:copyright: (c) 2014 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from werkzeug.wrappers import Request as RequestBase, Response as ResponseBase
from werkzeug.exceptions import BadRequest
from . import json
from .globals import _request_ctx_stack
_missing = object()
def _get_data(req, cache):
getter = getattr(req, 'get_data', None)
if getter is not None:
return getter(cache=cache)
return req.data
class Request(RequestBase):
"""The request object used by default in Flask. Remembers the
matched endpoint and view arguments.
It is what ends up as :class:`~flask.request`. If you want to replace
the request object used you can subclass this and set
:attr:`~flask.Flask.request_class` to your subclass.
The request object is a :class:`~werkzeug.wrappers.Request` subclass and
provides all of the attributes Werkzeug defines plus a few Flask
specific ones.
"""
#: The internal URL rule that matched the request. This can be
#: useful to inspect which methods are allowed for the URL from
#: a before/after handler (``request.url_rule.methods``) etc.
#:
#: .. versionadded:: 0.6
url_rule = None
#: A dict of view arguments that matched the request. If an exception
#: happened when matching, this will be `None`.
view_args = None
#: If matching the URL failed, this is the exception that will be
#: raised / was raised as part of the request handling. This is
#: usually a :exc:`~werkzeug.exceptions.NotFound` exception or
#: something similar.
routing_exception = None
# Switched by the request context until 1.0 to opt in deprecated
# module functionality.
_is_old_module = False
@property
def max_content_length(self):
"""Read-only view of the `MAX_CONTENT_LENGTH` config key."""
ctx = _request_ctx_stack.top
if ctx is not None:
return ctx.app.config['MAX_CONTENT_LENGTH']
@property
def endpoint(self):
"""The endpoint that matched the request. This in combination with
:attr:`view_args` can be used to reconstruct the same or a
modified URL. If an exception happened when matching, this will
be `None`.
"""
if self.url_rule is not None:
return self.url_rule.endpoint
@property
def module(self):
"""The name of the current module if the request was dispatched
to an actual module. This is deprecated functionality, use blueprints
instead.
"""
from warnings import warn
warn(DeprecationWarning('modules were deprecated in favor of '
'blueprints. Use request.blueprint '
'instead.'), stacklevel=2)
if self._is_old_module:
return self.blueprint
@property
def blueprint(self):
"""The name of the current blueprint"""
if self.url_rule and '.' in self.url_rule.endpoint:
return self.url_rule.endpoint.rsplit('.', 1)[0]
@property
def json(self):
"""If the mimetype is `application/json` this will contain the
parsed JSON data. Otherwise this will be `None`.
The :meth:`get_json` method should be used instead.
"""
from warnings import warn
warn(DeprecationWarning('json is deprecated. '
'Use get_json() instead.'), stacklevel=2)
return self.get_json()
@property
def is_json(self):
"""Indicates if this request is JSON or not. By default a request
is considered to include JSON data if the mimetype is
``application/json`` or ``application/*+json``.
.. versionadded:: 0.11
"""
mt = self.mimetype
if mt == 'application/json':
return True
if mt.startswith('application/') and mt.endswith('+json'):
return True
return False
def get_json(self, force=False, silent=False, cache=True):
"""Parses the incoming JSON request data and returns it. If
parsing fails the :meth:`on_json_loading_failed` method on the
request object will be invoked. By default this function will
only load the json data if the mimetype is ``application/json``
but this can be overridden by the `force` parameter.
:param force: if set to `True` the mimetype is ignored.
:param silent: if set to `True` this method will fail silently
and return `None`.
:param cache: if set to `True` the parsed JSON data is remembered
on the request.
"""
rv = getattr(self, '_cached_json', _missing)
if rv is not _missing:
return rv
if not (force or self.is_json):
return None
# We accept a request charset against the specification as
# certain clients have been using this in the past. This
# fits our general approach of being nice in what we accept
# and strict in what we send out.
request_charset = self.mimetype_params.get('charset')
try:
data = _get_data(self, cache)
if request_charset is not None:
rv = json.loads(data, encoding=request_charset)
else:
rv = json.loads(data)
except ValueError as e:
if silent:
rv = None
else:
rv = self.on_json_loading_failed(e)
if cache:
self._cached_json = rv
return rv
def on_json_loading_failed(self, e):
"""Called if decoding of the JSON data failed. The return value of
this method is used by :meth:`get_json` when an error occurred. The
default implementation just raises a :class:`BadRequest` exception.
.. versionchanged:: 0.10
Removed buggy previous behavior of generating a random JSON
response. If you want that behavior back you can trivially
add it by subclassing.
.. versionadded:: 0.8
"""
raise BadRequest()
def _load_form_data(self):
RequestBase._load_form_data(self)
# In debug mode we're replacing the files multidict with an ad-hoc
# subclass that raises a different error for key errors.
ctx = _request_ctx_stack.top
if ctx is not None and ctx.app.debug and \
self.mimetype != 'multipart/form-data' and not self.files:
from .debughelpers import attach_enctype_error_multidict
attach_enctype_error_multidict(self)
class Response(ResponseBase):
"""The response object that is used by default in Flask. Works like the
response object from Werkzeug but is set to have an HTML mimetype by
default. Quite often you don't have to create this object yourself because
:meth:`~flask.Flask.make_response` will take care of that for you.
If you want to replace the response object used you can subclass this and
set :attr:`~flask.Flask.response_class` to your subclass.
"""
default_mimetype = 'text/html'
| Aaron1992/flask | flask/wrappers.py | Python | bsd-3-clause | 7,304 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Franck Cuny <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = '''
---
module: cpanm
short_description: Manages Perl library dependencies.
description:
- Manage Perl library dependencies.
version_added: "1.6"
options:
name:
description:
- The name of the Perl library to install. You may use the "full distribution path", e.g. MIYAGAWA/Plack-0.99_05.tar.gz
required: false
default: null
aliases: ["pkg"]
from_path:
description:
- The local directory from where to install
required: false
default: null
notest:
description:
- Do not run unit tests
required: false
default: false
locallib:
description:
- Specify the install base to install modules
required: false
default: false
mirror:
description:
- Specifies the base URL for the CPAN mirror to use
required: false
default: false
mirror_only:
description:
- Use the mirror's index file instead of the CPAN Meta DB
required: false
default: false
installdeps:
description:
- Only install dependencies
required: false
default: false
version_added: "2.0"
version:
description:
- minimum version of perl module to consider acceptable
required: false
default: false
version_added: "2.1"
system_lib:
description:
- Use this if you want to install modules to the system perl include path. You must be root or have "passwordless" sudo for this to work.
- This uses the cpanm commandline option '--sudo', which has nothing to do with ansible privilege escalation.
required: false
default: false
version_added: "2.0"
aliases: ['use_sudo']
executable:
description:
- Override the path to the cpanm executable
required: false
default: null
version_added: "2.1"
notes:
- Please note that U(http://search.cpan.org/dist/App-cpanminus/bin/cpanm, cpanm) must be installed on the remote host.
author: "Franck Cuny (@franckcuny)"
'''
EXAMPLES = '''
# install Dancer perl package
- cpanm:
name: Dancer
# install version 0.99_05 of the Plack perl package
- cpanm:
name: MIYAGAWA/Plack-0.99_05.tar.gz
# install Dancer into the specified locallib
- cpanm:
name: Dancer
locallib: /srv/webapps/my_app/extlib
# install perl dependencies from local directory
- cpanm:
from_path: /srv/webapps/my_app/src/
# install Dancer perl package without running the unit tests in indicated locallib
- cpanm:
name: Dancer
notest: True
locallib: /srv/webapps/my_app/extlib
# install Dancer perl package from a specific mirror
- cpanm:
name: Dancer
mirror: 'http://cpan.cpantesters.org/'
# install Dancer perl package into the system root path
- cpanm:
name: Dancer
system_lib: yes
# install Dancer if it's not already installed
# OR the installed version is older than version 1.0
- cpanm:
name: Dancer
version: '1.0'
'''
def _is_package_installed(module, name, locallib, cpanm, version):
cmd = ""
if locallib:
os.environ["PERL5LIB"] = "%s/lib/perl5" % locallib
cmd = "%s perl -e ' use %s" % (cmd, name)
if version:
cmd = "%s %s;'" % (cmd, version)
else:
cmd = "%s;'" % cmd
res, stdout, stderr = module.run_command(cmd, check_rc=False)
if res == 0:
return True
else:
return False
def _build_cmd_line(name, from_path, notest, locallib, mirror, mirror_only, installdeps, cpanm, use_sudo):
# this code should use "%s" like everything else and just return early but not fixing all of it now.
# don't copy stuff like this
if from_path:
cmd = cpanm + " " + from_path
else:
cmd = cpanm + " " + name
if notest is True:
cmd = cmd + " -n"
if locallib is not None:
cmd = cmd + " -l " + locallib
if mirror is not None:
cmd = cmd + " --mirror " + mirror
if mirror_only is True:
cmd = cmd + " --mirror-only"
if installdeps is True:
cmd = cmd + " --installdeps"
if use_sudo is True:
cmd = cmd + " --sudo"
return cmd
def _get_cpanm_path(module):
if module.params['executable']:
return module.params['executable']
else:
return module.get_bin_path('cpanm', True)
def main():
arg_spec = dict(
name=dict(default=None, required=False, aliases=['pkg']),
from_path=dict(default=None, required=False, type='path'),
notest=dict(default=False, type='bool'),
locallib=dict(default=None, required=False, type='path'),
mirror=dict(default=None, required=False),
mirror_only=dict(default=False, type='bool'),
installdeps=dict(default=False, type='bool'),
system_lib=dict(default=False, type='bool', aliases=['use_sudo']),
version=dict(default=None, required=False),
executable=dict(required=False, type='path'),
)
module = AnsibleModule(
argument_spec=arg_spec,
required_one_of=[['name', 'from_path']],
)
cpanm = _get_cpanm_path(module)
name = module.params['name']
from_path = module.params['from_path']
notest = module.boolean(module.params.get('notest', False))
locallib = module.params['locallib']
mirror = module.params['mirror']
mirror_only = module.params['mirror_only']
installdeps = module.params['installdeps']
use_sudo = module.params['system_lib']
version = module.params['version']
changed = False
installed = _is_package_installed(module, name, locallib, cpanm, version)
if not installed:
cmd = _build_cmd_line(name, from_path, notest, locallib, mirror, mirror_only, installdeps, cpanm, use_sudo)
rc_cpanm, out_cpanm, err_cpanm = module.run_command(cmd, check_rc=False)
if rc_cpanm != 0:
module.fail_json(msg=err_cpanm, cmd=cmd)
if (err_cpanm.find('is up to date') == -1 and out_cpanm.find('is up to date') == -1):
changed = True
module.exit_json(changed=changed, binary=cpanm, name=name)
# import module snippets
from ansible.module_utils.basic import *
main()
| kbrebanov/ansible-modules-extras | packaging/language/cpanm.py | Python | gpl-3.0 | 6,848 |
###############################################################################
#
# temboo.core.util.*
#
# Utility functions.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
###############################################################################
class ExecutionStatus(object):
SUCCESS='SUCCESS'
ERROR='ERROR'
TERMINATED_MANUALLY='TERMINATED_MANUALLY'
TERMINATED_LIMIT='TERMINATED_LIMIT'
RUNNING='RUNNING'
| jordanemedlock/psychtruths | temboo/core/util.py | Python | apache-2.0 | 1,009 |
import sh
import unittest
from sh import cut, grep, cat, wc, uniq, mv
zmap_std_args = [ "-b",
"configs/blocklist_shard.conf",
"--seed=1234",
"192.168.1.0/24",
"--dryrun",
"-c",
"1"
]
zmap = sh.Command("../src/zmap").bake(*zmap_std_args)
def shard_file_name(shards, threads):
# Use naming conversion <shards>-t<threads>
return ''.join([str(shards), '-t', str(threads)])
def output_file_name(shards, shard, threads):
# Use naming convention: <shards>.<shard>-t<threads>
return ''.join([str(shards), '.', str(shard), '-t', str(threads)])
def parse(filename, **kwargs):
# cat outfile | grep ip | cut -d '|' -f 2 | cut -d ' ' -f 3 | cut -d '.' -f 4 | sort -n | wc -l
return sh.sort(cut(cut(cut(grep(cat(filename), "ip"), d="|", f=2), d=" ", f=3), d=".", f=4), "-n", _out=kwargs.get("_out"))
class TestSharding(unittest.TestCase):
NUM_IPS = 256
def setUp(self):
pass
def takeDown(self):
pass
def _runTest(self, shards, max_threads):
for threads in range(1, max_threads + 1):
for shard in range(0, shards):
with sh.sudo:
outfile = output_file_name(shards, shard, threads)
zmap(p=80, T=threads, shards=shards, shard=shard, _out="tempfile")
parse("tempfile", _out=outfile)
dup_lines = int(wc(uniq(cat(outfile), "-d"), "-l"))
self.assertEqual(dup_lines, 0)
shard_file = shard_file_name(shards, threads)
if shard == 0:
cat(outfile, _out=shard_file)
else:
cat(shard_file, outfile, _out="tempfile")
mv("tempfile", shard_file)
for threads in range(1, max_threads + 1):
shard_file = shard_file_name(shards, threads)
num_lines = int(wc(cat(shard_file), "-l"))
self.assertEqual(num_lines, TestSharding.NUM_IPS)
dup_lines = int(wc(uniq(sh.sort(cat(shard_file), "-n"), "-d"), "-l"))
self.assertEqual(dup_lines, 0)
def testOneShard(self):
# Test with one shard
self._runTest(1, 4)
def testTwoShards(self):
self._runTest(2, 4)
if __name__ == '__main__':
unittest.main()
| zmap/zmap | test/test_sharding.py | Python | apache-2.0 | 2,420 |
# This file is part of Invenio.
# Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
__revision__ = "$Id$"
## Description: function Update_Approval_DB
## This function updates the approval database with the
## decision of the referee
## Author: T.Baron
## PARAMETERS: categformatDAM: variable used to compute the category
## of the document from its reference
import os
import re
import time
from invenio.dbquery import run_sql
def Update_Approval_DB(parameters, curdir, form, user_info=None):
"""
This function updates the approval database when a document has
just been approved or rejected. It uses the [categformatDAM]
parameter to compute the category of the document. Must be called
after the Get_Report_Number function.
Parameters:
* categformatDAM: It contains the regular expression which
allows the retrieval of the category from the
reference number.
Eg: if [categformatDAM]="TEST-<CATEG>-.*" and
the reference is "TEST-CATEG1-2001-001" then
the category will be recognized as "CATEG1".
"""
global rn
doctype = form['doctype']
act = form['act']
categformat = parameters['categformatDAM']
## Get the name of the decision file:
try:
decision_filename = parameters['decision_file']
except KeyError:
decision_filename = ""
pid = os.getpid()
now = time.time()
access = "%i_%s" % (now,pid)
if act != "APP":
# retrieve category
if re.search("<FILE:",categformat):
filename = categformat.replace("<FILE:","")
filename = filename.replace(">","")
if os.path.exists("%s/%s" % (curdir,filename)):
fp = open("%s/%s" % (curdir,filename))
category = fp.read()
fp.close()
else:
category=""
category = category.replace("\n","")
else:
categformat = categformat.replace("<CATEG>","([^-]*)")
m_categ_search = re.match(categformat, rn)
if m_categ_search is not None:
if len(m_categ_search.groups()) > 0:
## Found a match for the category of this document. Get it:
category = m_categ_search.group(1)
else:
## This document has no category.
category = ""
else:
category = ""
if category == "":
category = "unknown"
sth = run_sql("SELECT status,dFirstReq,dLastReq,dAction FROM sbmAPPROVAL WHERE doctype=%s and categ=%s and rn=%s", (doctype,category,rn,))
if len(sth) == 0:
run_sql("INSERT INTO sbmAPPROVAL (doctype, categ, rn, status, dFirstReq, dLastReq, dAction, access) VALUES (%s,%s,%s,'waiting',NOW(),NOW(),'',%s)", (doctype,category,rn,access,))
else:
run_sql("UPDATE sbmAPPROVAL SET dLastReq=NOW(), status='waiting' WHERE doctype=%s and categ=%s and rn=%s", (doctype,category,rn,))
else:
## Since this is the "APP" action, this call of the function must be
## on behalf of the referee - in order to approve or reject an item.
## We need to get the decision from the decision file:
if decision_filename in (None, "", "NULL"):
## We don't have a name for the decision file.
## For backward compatibility reasons, try to read the decision from
## a file called 'decision' in curdir:
if os.path.exists("%s/decision" % curdir):
fh_decision = open("%s/decision" % curdir, "r")
decision = fh_decision.read()
fh_decision.close()
else:
decision = ""
else:
## Try to read the decision from the decision file:
try:
fh_decision = open("%s/%s" % (curdir, decision_filename), "r")
decision = fh_decision.read().strip()
fh_decision.close()
except IOError:
## Oops, unable to open the decision file.
decision = ""
## Either approve or reject the item, based upon the contents
## of 'decision':
if decision == "approve":
run_sql("UPDATE sbmAPPROVAL SET dAction=NOW(),status='approved' WHERE rn=%s", (rn,))
else:
run_sql("UPDATE sbmAPPROVAL SET dAction=NOW(),status='rejected' WHERE rn=%s", (rn,))
return ""
| CERNDocumentServer/invenio | modules/websubmit/lib/functions/Update_Approval_DB.py | Python | gpl-2.0 | 5,345 |
"""
Unittests for the rpc.jsonrpc module
"""
import sys
import unittest
if sys.version_info < (2, 7):
import unittest2 as unittest
from mock import Mock
from rpc import jsonp
class Handler(object):
def ping(self):
return "pong!"
def sayhi(self, person):
return "Hi " + person
class ServerTestCase(unittest.TestCase):
def setUp(self):
self.s = jsonp.Server('localhost', 55543, Handler)
self.mock_get = get = Mock(name="Mock GET")
get.method = "GET"
def test_contextmanager(self):
""" Can we use as a contextmanager """
with jsonp.Server('localhost', 666, Handler) as s:
self.assertIsInstance(s, jsonp.Server)
self.assertEqual('localhost', s.host)
def test_parse_response(self):
""" Jsonify our response """
data = dict(id='FAKEID', result='pong!', error=None)
# This is quite fragile- it relies on dict ordering
expected = 'runit({"error": null, "id": "FAKEID", "result": "pong!"})'
self.mock_get.GET = dict(callback='runit')
self.assertEqual(expected, self.s.parse_response(self.mock_get, data))
def tearDown(self):
self.s.close()
if __name__ == '__main__':
unittest.main()
| davidmiller/rpc | test/test_jsonp.py | Python | lgpl-3.0 | 1,254 |
# I18N-related pylint module
#
# Copyright (C) 2013 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
# Red Hat Author(s): Chris Lumens <[email protected]>
#
import astroid
from pylint.checkers import BaseChecker
from pylint.checkers.strings import StringFormatChecker
from pylint.checkers.logging import LoggingChecker
from pylint.checkers.utils import check_messages
from pylint.interfaces import IAstroidChecker
from copy import copy
translationMethods = frozenset(["_", "N_", "P_", "C_", "CN_", "CP_"])
# Returns a list of the message strings for a given translation method call
def _get_message_strings(node):
msgstrs = []
if node.func.name in ("_", "N_") and len(node.args) >= 1:
if isinstance(node.args[0], astroid.Const):
msgstrs.append(node.args[0].value)
elif node.func.name in ("C_", "CN_") and len(node.args) >= 2:
if isinstance(node.args[1], astroid.Const):
msgstrs.append(node.args[1].value)
elif node.func.name == "P_" and len(node.args) >= 2:
if isinstance(node.args[0], astroid.Const):
msgstrs.append(node.args[0].value)
if isinstance(node.args[1], astroid.Const):
msgstrs.append(node.args[1].value)
elif node.func.name == "CP_" and len(node.args) >= 3:
if isinstance(node.args[1], astroid.Const):
msgstrs.append(node.args[1].value)
if isinstance(node.args[2], astroid.Const):
msgstrs.append(node.args[2].value)
return msgstrs
class IntlChecker(BaseChecker):
__implements__ = (IAstroidChecker, )
name = "internationalization"
msgs = {"W9901": ("Found % in a call to a _() method",
"found-percent-in-_",
"% in a call to one of the _() methods results in incorrect translations"),
"W9902": ("Found _ call at module/class level",
"found-_-in-module-class",
"Calling _ at the module or class level results in translations to the wrong language")
}
@check_messages("found-percent-in-_")
def visit_binop(self, node):
if node.op != "%":
return
curr = node
while curr.parent:
if isinstance(curr.parent, astroid.CallFunc) and getattr(curr.parent.func, "name", "") in translationMethods:
self.add_message("W9901", node=node)
break
curr = curr.parent
@check_messages("found-_-in-module-class")
def visit_callfunc(self, node):
# The first test skips internal functions like getattr.
if isinstance(node.func, astroid.Name) and node.func.name == "_":
if isinstance(node.scope(), astroid.Module) or isinstance(node.scope(), astroid.Class):
self.add_message("W9902", node=node)
# Extend LoggingChecker to check translated logging strings
class IntlLoggingChecker(LoggingChecker):
__implements__ = (IAstroidChecker,)
name = 'intl-logging'
msgs = {'W9903': ("Fake message for translated E/W120* checks",
"translated-log",
"This message is not emitted itself, but can be used to control the display of \
logging format messages extended for translated strings")
}
options = ()
@check_messages('translated-log')
def visit_callfunc(self, node):
if len(node.args) >= 1 and isinstance(node.args[0], astroid.CallFunc) and \
getattr(node.args[0].func, "name", "") in translationMethods:
for formatstr in _get_message_strings(node.args[0]):
# Both the node and the args need to be copied so we don't replace args
# on the original node.
copynode = copy(node)
copyargs = copy(node.args)
copyargs[0] = astroid.Const(formatstr)
copynode.args = copyargs
LoggingChecker.visit_callfunc(self, copynode)
def __init__(self, *args, **kwargs):
LoggingChecker.__init__(self, *args, **kwargs)
# Just set logging_modules to 'logging', instead of trying to take a parameter
# like LoggingChecker
self.config.logging_modules = ('logging',)
# Extend StringFormatChecker to check translated format strings
class IntlStringFormatChecker(StringFormatChecker):
__implements__ = (IAstroidChecker,)
name = 'intl-string'
msgs = {'W9904': ("Fake message for translated E/W130* checks",
"translated-format",
"This message is not emitted itself, but can be used to control the display of \
string format messages extended for translated strings")
}
options = ()
@check_messages('translated-format')
def visit_binop(self, node):
if node.op != '%':
return
if isinstance(node.left, astroid.CallFunc) and getattr(node.left.func, "name", "") in translationMethods:
for formatstr in _get_message_strings(node.left):
# Create a copy of the node with just the message string as the format
copynode = copy(node)
copynode.left = astroid.Const(formatstr)
StringFormatChecker.visit_binop(self, copynode)
def register(linter):
"""required method to auto register this checker """
linter.register_checker(IntlChecker(linter))
linter.register_checker(IntlLoggingChecker(linter))
linter.register_checker(IntlStringFormatChecker(linter))
| gautamMalu/XenInBox | tests/pylint/intl.py | Python | gpl-2.0 | 6,421 |
# EFILTER Forensic Query Language
#
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
EFILTER versioning scheme.
EFILTER version is in the following format: YEAR.MONTH.REVCOUNT, where revcount
is the number of commits since initial commit on the master branch. This we
believe strikes a good balance between human readable strings, and ability to
tie a release to the git revision it was built from.
"""
__author__ = "Adam Sindelar <[email protected]>"
import logging
import re
RELEASE = "Awesome Sauce"
MAJOR = 1
MINOR = 5
ANCHOR_TAG = "v%d.%d" % (MAJOR, MINOR)
try:
import datetime
import pytz
import subprocess
# The below functionality is only available if dateutil is installed.
from dateutil import parser
def git_commits_since_tag(tag):
try:
p = subprocess.Popen(
["git", "log", "%s..master" % tag, "--oneline"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False)
errors = p.stderr.read()
p.stderr.close()
commits = p.stdout.readlines()
return commits
except (OSError, IndexError):
if errors:
logging.warn("git log failed with %r" % errors)
return None
def git_dev_version():
commits = git_commits_since_tag(ANCHOR_TAG)
if not commits:
return "1!%d.%d.dev0" % (MAJOR, MINOR)
return "1!%d.%d.dev%d" % (MAJOR, MINOR, len(commits))
except ImportError:
logging.warn("pytz or dateutil are not available - getting a version "
"number from git won't work.")
# If there's no dateutil then doing the git tango is pointless.
def git_verbose_version():
pass
def get_pkg_version():
"""Get version string by parsing PKG-INFO."""
try:
with open("PKG-INFO", "r") as fp:
rgx = re.compile(r"Version: (\d+)")
for line in fp.readlines():
match = rgx.match(line)
if match:
return match.group(1)
except IOError:
return None
def get_txt_version():
"""Get version string from version.txt."""
try:
with open("version.txt", "r") as fp:
return fp.read().strip()
except IOError:
return None
def get_version(dev_version=False):
"""Generates a version string.
Arguments:
dev_version: Generate a verbose development version from git commits.
Examples:
1.1
1.1.dev43 # If 'dev_version' was passed.
"""
if dev_version:
version = git_dev_version()
if not version:
raise RuntimeError("Could not generate dev version from git.")
return version
return "1!%d.%d" % (MAJOR, MINOR)
| google/dotty | efilter/version.py | Python | apache-2.0 | 3,313 |
import bglib
from Adapter import Adapter
import logging
from utils import ConnectTimeout
logging.basicConfig(format='%(asctime)s:%(threadName)s:%(levelname)s:%(name)s:%(module)s:%(message)s', level=logging.DEBUG)
logger = logging.getLogger('BLEPython')
| AshtonInstruments/BLEPython | blepython/__init__.py | Python | gpl-2.0 | 254 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.